answer
stringlengths
17
10.2M
package org.jboss.as.controller; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicInteger; import org.jboss.msc.service.AbstractServiceListener; import org.jboss.msc.service.ServiceController; import org.jboss.msc.service.ServiceName; import org.jboss.msc.service.ServiceRegistry; import static org.jboss.as.controller.ControllerLogger.ROOT_LOGGER; import static org.jboss.as.controller.ControllerMessages.MESSAGES; /** * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ public final class ContainerStateMonitor extends AbstractServiceListener<Object> { private final ServiceRegistry serviceRegistry; private final ServiceController<?> controllerController; private final AtomicInteger busyServiceCount = new AtomicInteger(); // protected by "this" /** Failed controllers pending tick reaching zero */ private final Map<ServiceController<?>, String> failedControllers = new IdentityHashMap<ServiceController<?>, String>(); /** Services with missing deps */ private final Set<ServiceController<?>> servicesWithMissingDeps = identitySet(); /** Services with missing deps as of the last time tick reached zero */ private Set<ServiceName> previousMissingDepSet = new HashSet<ServiceName>(); ContainerStateMonitor(final ServiceRegistry registry, final ServiceController<?> controller) { serviceRegistry = registry; controllerController = controller; } void acquire() { untick(); } void release() { tick(); } @Override public void listenerAdded(final ServiceController<?> controller) { if (controller == controllerController) { controller.removeListener(this); } else { untick(); } } @Override public void transition(final ServiceController<? extends Object> controller, final ServiceController.Transition transition) { switch (transition) { case STARTING_to_START_FAILED: { synchronized (this) { failedControllers.put(controller, controller.getStartException().toString()); } break; } case REMOVING_to_REMOVED: { synchronized (this) { failedControllers.remove(controller); servicesWithMissingDeps.remove(controller); } break; } case START_FAILED_to_DOWN: case START_FAILED_to_STARTING: { synchronized (this) { failedControllers.remove(controller); } break; } } final ServiceController.Substate before = transition.getBefore(); final ServiceController.Substate after = transition.getAfter(); if (before.isRestState() && ! after.isRestState()) { untick(); } else if (! before.isRestState() && after.isRestState()) { tick(); } } @Override public void immediateDependencyAvailable(final ServiceController<?> controller) { synchronized (this) { servicesWithMissingDeps.remove(controller); } } @Override public void immediateDependencyUnavailable(final ServiceController<?> controller) { synchronized (this) { servicesWithMissingDeps.add(controller); } } void awaitUninterruptibly(int count) { boolean intr = false; try { synchronized (this) { while (busyServiceCount.get() > count) { try { wait(); } catch (InterruptedException e) { intr = true; } } } } finally { if (intr) { Thread.currentThread().interrupt(); } } } void await(int count) throws InterruptedException { synchronized (this) { while (busyServiceCount.get() > count) { wait(); } } } ContainerStateChangeReport awaitContainerStateChangeReport(int count) throws InterruptedException { synchronized (this) { while (busyServiceCount.get() > count) { wait(); } return createContainerStateChangeReport(false); } } /** * Tick down the count, triggering a deployment status report when the count is zero. */ private void tick() { int tick = busyServiceCount.decrementAndGet(); synchronized (this) { notifyAll(); if (tick == 0) { ContainerStateChangeReport changeReport = createContainerStateChangeReport(true); if (changeReport != null) { final String msg = createChangeReportLogMessage(changeReport); ROOT_LOGGER.info(msg); } } } } private void untick() { busyServiceCount.incrementAndGet(); } /** * Creates a data structure reporting recent favorable and unfavorable changes in the state of installed services. * * @param resetHistory {@code true} if history tracking state used for detecting what has changed on the next * invocation of this method should be reset (meaning the next run will detect * more changes); {@code false} if the current history should be retained * (meaning the next run will act as if this run never happened) * * @return the report, or {@code null} if there is nothing noteworthy to report; i.e. no newly failed or missing * services and no newly corrected services */ private synchronized ContainerStateChangeReport createContainerStateChangeReport(boolean resetHistory) { final Map<ServiceName, Set<ServiceName>> missingDeps = new HashMap<ServiceName, Set<ServiceName>>(); for (ServiceController<?> controller : servicesWithMissingDeps) { for (ServiceName missing : controller.getImmediateUnavailableDependencies()) { Set<ServiceName> dependents = missingDeps.get(missing); if (dependents == null) { dependents = new HashSet<ServiceName>(); missingDeps.put(missing, dependents); } dependents.add(controller.getName()); } } final Set<ServiceName> previousMissing = previousMissingDepSet; // no longer missing deps... final Map<ServiceName, Boolean> noLongerMissingServices = new TreeMap<ServiceName, Boolean>(); for (ServiceName name : previousMissing) { if (! missingDeps.containsKey(name)) { ServiceController<?> controller = serviceRegistry.getService(name); noLongerMissingServices.put(name, controller == null); } } // newly missing deps final Map<ServiceName, MissingDependencyInfo> missingServices = new TreeMap<ServiceName, MissingDependencyInfo>(); for (Map.Entry<ServiceName, Set<ServiceName>> entry : missingDeps.entrySet()) { final ServiceName name = entry.getKey(); if (! previousMissing.contains(name)) { ServiceController<?> controller = serviceRegistry.getService(name); boolean unavailable = controller != null; missingServices.put(name, new MissingDependencyInfo(name, unavailable, entry.getValue())); } } final Map<ServiceController<?>, String> currentFailedControllers = new HashMap<ServiceController<?>, String>(failedControllers); if (resetHistory) { previousMissingDepSet = new HashSet<ServiceName>(missingDeps.keySet()); failedControllers.clear(); } boolean needReport = !missingServices.isEmpty() || !currentFailedControllers.isEmpty() || !noLongerMissingServices.isEmpty(); return needReport ? new ContainerStateChangeReport(missingServices, currentFailedControllers, noLongerMissingServices) : null; } private synchronized String createChangeReportLogMessage(ContainerStateChangeReport changeReport) { final StringBuilder msg = new StringBuilder(); msg.append(MESSAGES.serviceStatusReportHeader()); if (!changeReport.getMissingServices().isEmpty()) { msg.append(MESSAGES.serviceStatusReportDependencies()); for (Map.Entry<ServiceName, MissingDependencyInfo> entry : changeReport.getMissingServices().entrySet()) { if (!entry.getValue().isUnavailable()) { msg.append(MESSAGES.serviceStatusReportMissing(entry.getKey(), createDependentsString(entry.getValue().getDependents()))); } else { msg.append(MESSAGES.serviceStatusReportUnavailable(entry.getKey(), createDependentsString(entry.getValue().getDependents()))); } } } if (!changeReport.getNoLongerMissingServices().isEmpty()) { msg.append(MESSAGES.serviceStatusReportCorrected()); for (Map.Entry<ServiceName, Boolean> entry : changeReport.getNoLongerMissingServices().entrySet()) { if (!entry.getValue()) { msg.append(MESSAGES.serviceStatusReportNoLongerRequired(entry.getKey())); } else { msg.append(MESSAGES.serviceStatusReportAvailable(entry.getKey())); } } } if (!changeReport.getFailedControllers().isEmpty()) { msg.append(MESSAGES.serviceStatusReportFailed()); for (Map.Entry<ServiceController<?>, String> entry : changeReport.getFailedControllers().entrySet()) { msg.append(" ").append(entry.getKey().getName()).append(": ").append(entry.getValue()).append('\n'); } } return msg.toString(); } public static class ContainerStateChangeReport { private final Map<ServiceName, MissingDependencyInfo> missingServices; private final Map<ServiceController<?>, String> failedControllers; private final Map<ServiceName, Boolean> noLongerMissingServices; private ContainerStateChangeReport(final Map<ServiceName, MissingDependencyInfo> missingServices, final Map<ServiceController<?>, String> failedControllers, final Map<ServiceName, Boolean> noLongerMissingServices) { this.missingServices = missingServices; this.failedControllers = failedControllers; this.noLongerMissingServices = noLongerMissingServices; } public final Map<ServiceController<?>, String> getFailedControllers() { return failedControllers; } public Map<ServiceName, MissingDependencyInfo> getMissingServices() { return missingServices; } public Map<ServiceName, Boolean> getNoLongerMissingServices() { return noLongerMissingServices; } } private static String createDependentsString(final Set<ServiceName> serviceNames) { if(serviceNames.size() <= 4) { return serviceNames.toString(); } else { final StringBuilder ret = new StringBuilder("["); int count = 0; Iterator<ServiceName> it = serviceNames.iterator(); while (count < 4) { final ServiceName val = it.next(); ret.append(val); ret.append(", "); ++count; } ret.append(MESSAGES.andNMore(serviceNames.size() - 3)); ret.append(" ]"); return ret.toString(); } } public static class MissingDependencyInfo { private final ServiceName serviceName; private final boolean unavailable; private final Set<ServiceName> dependents; public MissingDependencyInfo(ServiceName serviceName, boolean unavailable, final Set<ServiceName> dependents) { this.serviceName = serviceName; this.unavailable = unavailable; this.dependents = dependents; } public ServiceName getServiceName() { return serviceName; } public boolean isUnavailable() { return unavailable; } public Set<ServiceName> getDependents() { return Collections.unmodifiableSet(dependents); } } private static <T> Set<T> identitySet() { return Collections.newSetFromMap(new IdentityHashMap<T, Boolean>()); } }
package de.bwaldvogel.mongo.backend; import static de.bwaldvogel.mongo.backend.Constants.ID_FIELD; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.bwaldvogel.mongo.MongoCollection; import de.bwaldvogel.mongo.MongoDatabase; import de.bwaldvogel.mongo.backend.projection.ProjectingIterable; import de.bwaldvogel.mongo.backend.projection.Projection; import de.bwaldvogel.mongo.bson.Document; import de.bwaldvogel.mongo.bson.ObjectId; import de.bwaldvogel.mongo.exception.BadValueException; import de.bwaldvogel.mongo.exception.ConflictingUpdateOperatorsException; import de.bwaldvogel.mongo.exception.CursorNotFoundException; import de.bwaldvogel.mongo.exception.FailedToParseException; import de.bwaldvogel.mongo.exception.ImmutableFieldException; import de.bwaldvogel.mongo.exception.IndexOptionsConflictException; import de.bwaldvogel.mongo.exception.MongoServerError; import de.bwaldvogel.mongo.exception.MongoServerException; import de.bwaldvogel.mongo.wire.message.MongoKillCursors; public abstract class AbstractMongoCollection<P> implements MongoCollection<P> { private static final Logger log = LoggerFactory.getLogger(AbstractMongoCollection.class); private MongoDatabase database; private String collectionName; private final List<Index<P>> indexes = new ArrayList<>(); private final QueryMatcher matcher = new DefaultQueryMatcher(); protected final String idField; protected final ConcurrentMap<Long, Cursor> cursors = new ConcurrentHashMap<>(); protected AbstractMongoCollection(MongoDatabase database, String collectionName, String idField) { this.database = database; this.collectionName = collectionName; this.idField = idField; } protected boolean documentMatchesQuery(Document document, Document query) { return matcher.matches(document, query); } private QueryResult queryDocuments(Document query, Document orderBy, int numberToSkip, int numberToReturn) { synchronized (indexes) { for (Index<P> index : indexes) { if (index.canHandle(query)) { Iterable<P> positions = index.getPositions(query); return matchDocuments(query, positions, orderBy, numberToSkip, numberToReturn); } } } return matchDocuments(query, orderBy, numberToSkip, numberToReturn); } protected void sortDocumentsInMemory(List<Document> documents, Document orderBy) { DocumentComparator documentComparator = deriveComparator(orderBy); if (documentComparator != null) { documents.sort(documentComparator); } else if (isNaturalDescending(orderBy)) { Collections.reverse(documents); } } protected abstract QueryResult matchDocuments(Document query, Document orderBy, int numberToSkip, int numberToReturn); protected QueryResult matchDocuments(Document query, Iterable<P> positions, Document orderBy, int numberToSkip, int numberToReturn) { List<Document> matchedDocuments = new ArrayList<>(); for (P position : positions) { Document document = getDocument(position); if (documentMatchesQuery(document, query)) { matchedDocuments.add(document); } } sortDocumentsInMemory(matchedDocuments, orderBy); if (numberToSkip > 0) { matchedDocuments = matchedDocuments.subList(numberToSkip, matchedDocuments.size()); } if (numberToReturn > 0 && matchedDocuments.size() > numberToReturn) { matchedDocuments = matchedDocuments.subList(0, numberToReturn); } return new QueryResult(matchedDocuments); } protected static boolean isNaturalDescending(Document orderBy) { if (orderBy != null && !orderBy.keySet().isEmpty()) { if (orderBy.keySet().iterator().next().equals("$natural")) { Number sortValue = (Number) orderBy.get("$natural"); if (sortValue.intValue() == -1) { return true; } if (sortValue.intValue() != 1) { throw new IllegalArgumentException("Illegal sort value: " + sortValue); } } } return false; } protected static DocumentComparator deriveComparator(Document orderBy) { if (orderBy != null && !orderBy.keySet().isEmpty()) { if (orderBy.keySet().iterator().next().equals("$natural")) { // already sorted } else { return new DocumentComparator(orderBy); } } return null; } protected abstract Document getDocument(P position); protected abstract void updateDataSize(int sizeDelta); protected abstract int getDataSize(); protected abstract P addDocumentInternal(Document document); @Override public synchronized void addDocument(Document document) { if (document.get(ID_FIELD) instanceof Collection) { throw new BadValueException("can't use an array for _id"); } if (!document.containsKey(ID_FIELD) && !isSystemCollection()) { ObjectId generatedObjectId = new ObjectId(); log.debug("Generated {} for {} in {}", generatedObjectId, document, this); document.put(ID_FIELD, generatedObjectId); } for (Index<P> index : indexes) { index.checkAdd(document, this); } P position = addDocumentInternal(document); for (Index<P> index : indexes) { index.add(document, position, this); } updateDataSize(Utils.calculateSize(document)); } @Override public MongoDatabase getDatabase() { return database; } @Override public String getCollectionName() { return collectionName; } @Override public String toString() { return getClass().getSimpleName() + "(" + getFullName() + ")"; } @Override public void addIndex(Index<P> index) { Index<P> existingIndex = findByName(index.getName()); if (existingIndex != null) { if (!existingIndex.hasSameOptions(index)) { throw new IndexOptionsConflictException(existingIndex); } log.debug("Index with name '{}' already exists", index.getName()); return; } if (index.isEmpty()) { streamAllDocumentsWithPosition().forEach(documentWithPosition -> { Document document = documentWithPosition.getDocument(); index.checkAdd(document, this); }); streamAllDocumentsWithPosition().forEach(documentWithPosition -> { Document document = documentWithPosition.getDocument(); P position = documentWithPosition.getPosition(); index.add(document, position, this); }); } else { log.debug("Index is not empty"); } indexes.add(index); } private Index<P> findByName(String indexName) { return indexes.stream() .filter(index -> index.getName().equals(indexName)) .findFirst() .orElse(null); } @Override public void drop() { log.debug("Dropping collection {}", getFullName()); Assert.isEmpty(indexes); } @Override public void dropIndex(String indexName) { log.debug("Dropping index '{}'", indexName); List<Index<P>> indexesToDrop = indexes.stream() .filter(index -> index.getName().equals(indexName)) .collect(Collectors.toList()); if (indexesToDrop.isEmpty()) { return; } Index<P> indexToDrop = CollectionUtils.getSingleElement(indexesToDrop); indexToDrop.drop(); indexes.remove(indexToDrop); } private void modifyField(Document document, String modifier, Document update, ArrayFilters arrayFilters, Integer matchPos, boolean isUpsert) { Document change = (Document) update.get(modifier); UpdateOperator updateOperator = getUpdateOperator(modifier, change); FieldUpdates updates = new FieldUpdates(document, updateOperator, idField, isUpsert, matchPos, arrayFilters); updates.apply(change, modifier); } private UpdateOperator getUpdateOperator(String modifier, Document change) { final UpdateOperator op; try { op = UpdateOperator.fromValue(modifier); } catch (IllegalArgumentException e) { throw new FailedToParseException("Unknown modifier: " + modifier + ". Expected a valid update modifier or pipeline-style update specified as an array"); } return op; } private void applyUpdate(Document oldDocument, Document newDocument) { if (newDocument.equals(oldDocument)) { return; } Object oldId = oldDocument.get(idField); Object newId = newDocument.get(idField); if (newId != null && oldId != null && !Utils.nullAwareEquals(oldId, newId)) { throw new ImmutableFieldException("After applying the update, the (immutable) field '_id' was found to have been altered to _id: " + newId); } if (newId == null && oldId != null) { newDocument.put(idField, oldId); } cloneInto(oldDocument, newDocument); } Object deriveDocumentId(Document selector) { Object value = selector.get(idField); if (value != null) { if (!Utils.containsQueryExpression(value)) { return value; } else { return deriveIdFromExpression(value); } } return new ObjectId(); } private Object deriveIdFromExpression(Object value) { Document expression = (Document) value; for (String key : expression.keySet()) { Object expressionValue = expression.get(key); if (key.equals("$in")) { Collection<?> list = (Collection<?>) expressionValue; if (!list.isEmpty()) { return list.iterator().next(); } } } // fallback to random object id return new ObjectId(); } private Document calculateUpdateDocument(Document oldDocument, Document update, ArrayFilters arrayFilters, Integer matchPos, boolean isUpsert) { int numStartsWithDollar = 0; for (String key : update.keySet()) { if (key.startsWith("$")) { numStartsWithDollar++; } } Document newDocument = new Document(); if (idField != null) { newDocument.put(idField, oldDocument.get(idField)); } if (numStartsWithDollar == update.keySet().size()) { validateUpdateQuery(update); cloneInto(newDocument, oldDocument); for (String key : update.keySet()) { modifyField(newDocument, key, update, arrayFilters, matchPos, isUpsert); } } else if (numStartsWithDollar == 0) { applyUpdate(newDocument, update); } else { throw new MongoServerException("illegal update: " + update); } Utils.validateFieldNames(newDocument); return newDocument; } static void validateUpdateQuery(Document update) { Set<String> allModifiedPaths = new LinkedHashSet<>(); for (Object value : update.values()) { Document modification = (Document) value; for (String path : modification.keySet()) { for (String otherPath : allModifiedPaths) { String commonPathPrefix = Utils.getShorterPathIfPrefix(path, otherPath); if (commonPathPrefix != null) { throw new ConflictingUpdateOperatorsException(path, commonPathPrefix); } } allModifiedPaths.add(path); } } } @Override public synchronized Document findAndModify(Document query) { boolean returnNew = Utils.isTrue(query.get("new")); if (!query.containsKey("remove") && !query.containsKey("update")) { throw new FailedToParseException("Either an update or remove=true must be specified"); } Document queryObject = new Document(); if (query.containsKey("query")) { queryObject.put("query", query.get("query")); } else { queryObject.put("query", new Document()); } if (query.containsKey("sort")) { queryObject.put("orderby", query.get("sort")); } Document lastErrorObject = null; Document returnDocument = null; int num = 0; for (Document document : handleQuery(queryObject, 0, 1)) { num++; if (Utils.isTrue(query.get("remove"))) { removeDocument(document); returnDocument = document; } else if (query.get("update") != null) { Document updateQuery = (Document) query.get("update"); Integer matchPos = matcher.matchPosition(document, (Document) queryObject.get("query")); ArrayFilters arrayFilters = ArrayFilters.parse(query, updateQuery); Document oldDocument = updateDocument(document, updateQuery, arrayFilters, matchPos); if (returnNew) { returnDocument = document; } else { returnDocument = oldDocument; } lastErrorObject = new Document("updatedExisting", Boolean.TRUE); lastErrorObject.put("n", Integer.valueOf(1)); } } if (num == 0 && Utils.isTrue(query.get("upsert"))) { Document selector = (Document) query.get("query"); Document updateQuery = (Document) query.get("update"); ArrayFilters arrayFilters = ArrayFilters.parse(query, updateQuery); Document newDocument = handleUpsert(updateQuery, selector, arrayFilters); if (returnNew) { returnDocument = newDocument; } else { returnDocument = new Document(); } num++; } if (query.get("fields") != null) { Document fields = (Document) query.get("fields"); returnDocument = Projection.projectDocument(returnDocument, fields, idField); } Document result = new Document(); if (lastErrorObject != null) { result.put("lastErrorObject", lastErrorObject); } result.put("value", returnDocument); Utils.markOkay(result); return result; } @Override public synchronized QueryResult handleQuery(Document queryObject, int numberToSkip, int numberToReturn, Document fieldSelector) { final Document query; final Document orderBy; if (numberToReturn < 0) { // actually: request to close cursor automatically numberToReturn = -numberToReturn; } if (queryObject.containsKey("query")) { query = (Document) queryObject.get("query"); orderBy = (Document) queryObject.get("orderby"); } else if (queryObject.containsKey("$query")) { query = (Document) queryObject.get("$query"); orderBy = (Document) queryObject.get("$orderby"); } else { query = queryObject; orderBy = null; } QueryResult objs = queryDocuments(query, orderBy, numberToSkip, numberToReturn); if (fieldSelector != null && !fieldSelector.keySet().isEmpty()) { return new QueryResult(new ProjectingIterable(objs, fieldSelector, idField), 0); } return objs; } @Override public synchronized QueryResult handleGetMore(long cursorId, int numberToReturn) { if (!cursors.containsKey(cursorId)) { throw new CursorNotFoundException(String.format("Cursor id %d does not exists in collection %s", cursorId, collectionName)); } Cursor cursor = cursors.get(cursorId); List<Document> documents = new ArrayList<>(); while (!cursor.isEmpty() && documents.size() < numberToReturn) { documents.add(cursor.getDocuments().poll()); } if (cursor.isEmpty()) { cursors.remove(cursor.getCursorId()); } return new QueryResult(documents, cursor.isEmpty() ? 0 : cursorId); } @Override public synchronized void handleKillCursors(MongoKillCursors killCursors) { killCursors.getCursorIds().forEach(cursors::remove); } @Override public synchronized Document handleDistinct(Document query) { String key = (String) query.get("key"); Document filter = (Document) query.getOrDefault("query", new Document()); Set<Object> values = new TreeSet<>(ValueComparator.ascWithoutListHandling().withDefaultComparatorForUuids()); for (Document document : queryDocuments(filter, null, 0, 0)) { Object value = Utils.getSubdocumentValueCollectionAware(document, key); if (!(value instanceof Missing)) { if (value instanceof Collection) { values.addAll((Collection<?>) value); } else { values.add(value); } } } Document response = new Document("values", values); Utils.markOkay(response); return response; } @Override public synchronized void insertDocuments(List<Document> documents) { MongoCollection.super.insertDocuments(documents); } @Override public synchronized int deleteDocuments(Document selector, int limit) { int n = 0; for (Document document : handleQuery(selector, 0, limit)) { if (limit > 0 && n >= limit) { throw new MongoServerException("internal error: too many elements (" + n + " >= " + limit + ")"); } removeDocument(document); n++; } return n; } @Override public synchronized Document updateDocuments(Document selector, Document updateQuery, ArrayFilters arrayFilters, boolean isMulti, boolean isUpsert) { if (isMulti) { for (String key : updateQuery.keySet()) { if (!key.startsWith("$")) { throw new MongoServerError(10158, "multi update only works with $ operators"); } } } int nMatched = 0; int nModified = 0; for (Document document : queryDocuments(selector, null, 0, 0)) { Integer matchPos = matcher.matchPosition(document, selector); Document oldDocument = updateDocument(document, updateQuery, arrayFilters, matchPos); if (!Utils.nullAwareEquals(oldDocument, document)) { nModified++; } nMatched++; if (!isMulti) { break; } } Document result = new Document(); // insert? if (nMatched == 0 && isUpsert) { Document newDocument = handleUpsert(updateQuery, selector, arrayFilters); result.put("upserted", newDocument.get(idField)); } result.put("n", Integer.valueOf(nMatched)); result.put("nModified", Integer.valueOf(nModified)); return result; } private Document updateDocument(Document document, Document updateQuery, ArrayFilters arrayFilters, Integer matchPos) { // copy document Document oldDocument = new Document(); cloneInto(oldDocument, document); Document newDocument = calculateUpdateDocument(document, updateQuery, arrayFilters, matchPos, false); if (!newDocument.equals(oldDocument)) { for (Index<P> index : indexes) { index.checkUpdate(oldDocument, newDocument, this); } P position = getSinglePosition(oldDocument); for (Index<P> index : indexes) { index.updateInPlace(oldDocument, newDocument, position, this); } int oldSize = Utils.calculateSize(oldDocument); int newSize = Utils.calculateSize(newDocument); updateDataSize(newSize - oldSize); // only keep fields that are also in the updated document Set<String> fields = new LinkedHashSet<>(document.keySet()); fields.removeAll(newDocument.keySet()); for (String key : fields) { document.remove(key); } // update the fields for (String key : newDocument.keySet()) { if (key.contains(".")) { throw new MongoServerException( "illegal field name. must not happen as it must be caught by the driver"); } document.put(key, newDocument.get(key)); } handleUpdate(position, oldDocument, document); } return oldDocument; } private P getSinglePosition(Document document) { if (indexes.isEmpty()) { return findDocumentPosition(document); } Set<P> positions = indexes.stream() .map(index -> index.getPosition(document)) .filter(Objects::nonNull) .collect(Collectors.toSet()); return CollectionUtils.getSingleElement(positions); } protected abstract void handleUpdate(P position, Document oldDocument, Document newDocument); private void cloneInto(Document targetDocument, Document sourceDocument) { for (String key : sourceDocument.keySet()) { targetDocument.put(key, cloneValue(sourceDocument.get(key))); } } private Object cloneValue(Object value) { if (value instanceof Document) { Document newValue = new Document(); cloneInto(newValue, (Document) value); return newValue; } else if (value instanceof List<?>) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) value; List<Object> newValue = new ArrayList<>(); for (Object v : list) { newValue.add(cloneValue(v)); } return newValue; } else { return value; } } private Document handleUpsert(Document updateQuery, Document selector, ArrayFilters arrayFilters) { Document document = convertSelectorToDocument(selector); Document newDocument = calculateUpdateDocument(document, updateQuery, arrayFilters, null, true); if (newDocument.get(idField) == null) { newDocument.put(idField, deriveDocumentId(selector)); } addDocument(newDocument); return newDocument; } /** * convert selector used in an upsert statement into a document */ Document convertSelectorToDocument(Document selector) { Document document = new Document(); for (String key : selector.keySet()) { if (key.startsWith("$")) { continue; } Object value = selector.get(key); if (!Utils.containsQueryExpression(value)) { Utils.changeSubdocumentValue(document, key, value, (AtomicReference<Integer>) null); } } return document; } @Override public List<Index<P>> getIndexes() { return indexes; } @Override public int count(Document query, int skip, int limit) { if (query == null || query.keySet().isEmpty()) { int count = count(); if (skip > 0) { count = Math.max(0, count - skip); } if (limit > 0) { return Math.min(limit, count); } return count; } int numberToReturn = (limit >= 0) ? limit : 0; int count = 0; Iterator<?> it = queryDocuments(query, null, skip, numberToReturn).iterator(); while (it.hasNext()) { it.next(); count++; } return count; } @Override public Document getStats() { int dataSize = getDataSize(); int count = count(); Document response = new Document("ns", getFullName()); response.put("count", Integer.valueOf(count)); response.put("size", Integer.valueOf(dataSize)); int averageSize = 0; if (count > 0) { averageSize = dataSize / count; } response.put("avgObjSize", Integer.valueOf(averageSize)); response.put("storageSize", Integer.valueOf(0)); response.put("numExtents", Integer.valueOf(0)); response.put("nindexes", Integer.valueOf(indexes.size())); Document indexSizes = new Document(); for (Index<P> index : indexes) { indexSizes.put(index.getName(), Long.valueOf(index.getDataSize())); } response.put("indexSize", indexSizes); Utils.markOkay(response); return response; } @Override public synchronized void removeDocument(Document document) { P position = null; if (!indexes.isEmpty()) { for (Index<P> index : indexes) { P indexPosition = index.remove(document); if (indexPosition == null) { if (index.isSparse()) { continue; } else { throw new IllegalStateException("Found no position for " + document + " in " + index); } } if (position != null) { Assert.equals(position, indexPosition, () -> "Got different positions for " + document); } position = indexPosition; } } else { position = findDocumentPosition(document); } if (position == null) { // not found return; } updateDataSize(-Utils.calculateSize(document)); removeDocument(position); } @Override public Document validate() { Document response = new Document("ns", getFullName()); response.put("extentCount", Integer.valueOf(0)); response.put("datasize", Long.valueOf(getDataSize())); response.put("nrecords", Integer.valueOf(count())); response.put("nIndexes", Integer.valueOf(indexes.size())); Document keysPerIndex = new Document(); for (Index<P> index : indexes) { keysPerIndex.put(index.getName(), Long.valueOf(index.getCount())); } response.put("keysPerIndex", keysPerIndex); response.put("valid", Boolean.TRUE); response.put("errors", Collections.emptyList()); Utils.markOkay(response); return response; } @Override public void renameTo(MongoDatabase newDatabase, String newCollectionName) { this.database = newDatabase; this.collectionName = newCollectionName; } protected abstract void removeDocument(P position); protected static Iterable<Document> applySkipAndLimit(Collection<Document> documents, int numberToSkip, int numberToReturn) { return applySkipAndLimit(new ArrayList<>(documents), numberToSkip, numberToReturn); } protected static Iterable<Document> applySkipAndLimit(List<Document> documents, int numberToSkip, int numberToReturn) { if (numberToSkip > 0) { if (numberToSkip < documents.size()) { documents = documents.subList(numberToSkip, documents.size()); } else { return Collections.emptyList(); } } if (numberToReturn > 0 && documents.size() > numberToReturn) { documents = documents.subList(0, numberToReturn); } return documents; } protected P findDocumentPosition(Document document) { return streamAllDocumentsWithPosition() .filter(match -> documentMatchesQuery(match.getDocument(), document)) .map(DocumentWithPosition::getPosition) .findFirst() .orElse(null); } protected abstract Stream<DocumentWithPosition<P>> streamAllDocumentsWithPosition(); private boolean isSystemCollection() { return AbstractMongoDatabase.isSystemCollection(getCollectionName()); } protected Cursor createCursor(Collection<Document> matchedDocuments, int numberToSkip, int numberToReturn) { Cursor cursor = new Cursor(collectionName); if (numberToReturn > 0 && matchedDocuments.size() > numberToReturn) { cursor = new Cursor(matchedDocuments.stream().skip(numberToSkip + numberToReturn).collect(Collectors.toList()), getCollectionName()); if (cursor.getCursorId() > 0) { cursors.put(cursor.getCursorId(), cursor); } } return cursor; } }
package org.hisp.dhis.android.core.datavalue; import android.database.Cursor; import android.database.sqlite.SQLiteStatement; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.gabrielittner.auto.value.cursor.ColumnAdapter; import com.gabrielittner.auto.value.cursor.ColumnName; import com.google.auto.value.AutoValue; import org.hisp.dhis.android.core.common.BaseModel; import org.hisp.dhis.android.core.common.ModelFactory; import org.hisp.dhis.android.core.common.UpdateWhereStatementBinder; import org.hisp.dhis.android.core.data.database.DbDateColumnAdapter; import org.hisp.dhis.android.core.utils.Utils; import java.util.Date; import static org.hisp.dhis.android.core.utils.StoreUtils.sqLiteBind; @AutoValue public abstract class DataValueModel extends BaseModel implements UpdateWhereStatementBinder { public static final String TABLE = "DataValue"; public static class Columns extends BaseModel.Columns { public static final String DATA_ELEMENT = "dataElement"; public static final String PERIOD = "period"; public static final String ORGANISATION_UNIT = "organisationUnit"; public static final String CATEGORY_OPTION_COMBO = "categoryOptionCombo"; public static final String ATTRIBUTE_OPTION_COMBO = "attributeOptionCombo"; public static final String VALUE = "value"; public static final String STORED_BY = "storedBy"; public static final String CREATED = "created"; public static final String LAST_UPDATED = "lastUpdated"; public static final String COMMENT = "comment"; public static final String FOLLOW_UP = "followUp"; private Columns() {} public static String[] all() { return Utils.appendInNewArray(BaseModel.Columns.all(), DATA_ELEMENT, PERIOD, ORGANISATION_UNIT, CATEGORY_OPTION_COMBO, ATTRIBUTE_OPTION_COMBO, VALUE, STORED_BY, CREATED, LAST_UPDATED, COMMENT, FOLLOW_UP); } public static String[] whereUpdate() { return new String[]{DATA_ELEMENT, PERIOD, ORGANISATION_UNIT, CATEGORY_OPTION_COMBO, ATTRIBUTE_OPTION_COMBO}; } } static DataValueModel create(Cursor cursor) { return AutoValue_DataValueModel.createFromCursor(cursor); } public static final ModelFactory<DataValueModel, DataValue> factory = new ModelFactory<DataValueModel, DataValue>() { @Override public DataValueModel fromCursor(Cursor cursor) { return create(cursor); } @Override public DataValueModel fromPojo(DataValue dataValue) { return DataValueModel.builder() .dataElement(dataValue.dataElement()) .period(dataValue.period()) .organisationUnit(dataValue.organisationUnit()) .categoryOptionCombo(dataValue.categoryOptionCombo()) .attributeOptionCombo(dataValue.attributeOptionCombo()) .value(dataValue.value()) .storedBy(dataValue.storedBy()) .created(dataValue.created()) .lastUpdated(dataValue.lastUpdated()) .comment(dataValue.comment()) .followUp(dataValue.followUp()) .build(); } }; public static Builder builder() { return new $AutoValue_DataValueModel.Builder(); } @Nullable @ColumnName(Columns.DATA_ELEMENT) public abstract String dataElement(); @Nullable @ColumnName(Columns.PERIOD) public abstract String period(); @Nullable @ColumnName(Columns.ORGANISATION_UNIT) public abstract String organisationUnit(); @Nullable @ColumnName(Columns.CATEGORY_OPTION_COMBO) public abstract String categoryOptionCombo(); @Nullable @ColumnName(Columns.ATTRIBUTE_OPTION_COMBO) public abstract String attributeOptionCombo(); @Nullable @ColumnName(Columns.VALUE) public abstract String value(); @Nullable @ColumnName(Columns.STORED_BY) public abstract String storedBy(); @Nullable @ColumnName(Columns.CREATED) @ColumnAdapter(DbDateColumnAdapter.class) public abstract Date created(); @Nullable @ColumnName(Columns.LAST_UPDATED) @ColumnAdapter(DbDateColumnAdapter.class) public abstract Date lastUpdated(); @Nullable @ColumnName(Columns.COMMENT) public abstract String comment(); @Nullable @ColumnName(Columns.FOLLOW_UP) public abstract Boolean followUp(); @Override public void bindToStatement(@NonNull SQLiteStatement sqLiteStatement) { sqLiteBind(sqLiteStatement, 1, dataElement()); sqLiteBind(sqLiteStatement, 2, period()); sqLiteBind(sqLiteStatement, 3, organisationUnit()); sqLiteBind(sqLiteStatement, 4, categoryOptionCombo()); sqLiteBind(sqLiteStatement, 5, attributeOptionCombo()); sqLiteBind(sqLiteStatement, 6, value()); sqLiteBind(sqLiteStatement, 7, storedBy()); sqLiteBind(sqLiteStatement, 8, created()); sqLiteBind(sqLiteStatement, 9, lastUpdated()); sqLiteBind(sqLiteStatement, 10, comment()); sqLiteBind(sqLiteStatement, 11, followUp()); } @Override public void bindToUpdateWhereStatement(@NonNull SQLiteStatement sqLiteStatement) { sqLiteBind(sqLiteStatement, 12, dataElement()); sqLiteBind(sqLiteStatement, 13, period()); sqLiteBind(sqLiteStatement, 14, organisationUnit()); sqLiteBind(sqLiteStatement, 15, categoryOptionCombo()); sqLiteBind(sqLiteStatement, 16, attributeOptionCombo()); } @AutoValue.Builder public static abstract class Builder extends BaseModel.Builder<Builder> { public abstract Builder dataElement(String dataElement); public abstract Builder period(String period); public abstract Builder organisationUnit(String organisationUnit); public abstract Builder categoryOptionCombo(String categoryOptionCombo); public abstract Builder attributeOptionCombo(String attributeOptionCombo); public abstract Builder value(String value); public abstract Builder storedBy(String storedBy); public abstract Builder created(Date created); public abstract Builder lastUpdated(Date lastUpdated); public abstract Builder comment(String comment); public abstract Builder followUp(Boolean followUp); public abstract DataValueModel build(); } }
package org.hisp.dhis.android.core.event; import android.support.annotation.NonNull; import org.hisp.dhis.android.core.common.D2CallException; import org.hisp.dhis.android.core.common.D2CallExecutor; import org.hisp.dhis.android.core.common.SyncCall; import org.hisp.dhis.android.core.data.api.OuMode; import org.hisp.dhis.android.core.data.database.DatabaseAdapter; import org.hisp.dhis.android.core.organisationunit.OrganisationUnitModel; import org.hisp.dhis.android.core.program.ProgramStore; import org.hisp.dhis.android.core.program.ProgramStoreInterface; import org.hisp.dhis.android.core.user.UserOrganisationUnitLinkModel; import org.hisp.dhis.android.core.user.UserOrganisationUnitLinkStore; import org.hisp.dhis.android.core.user.UserOrganisationUnitLinkStoreInterface; import org.hisp.dhis.android.core.utils.services.ApiPagingEngine; import org.hisp.dhis.android.core.utils.services.Paging; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import retrofit2.Retrofit; public final class EventWithLimitCall extends SyncCall<List<Event>> { private final int eventLimit; private final boolean limitByOrgUnit; private final DatabaseAdapter databaseAdapter; private final Retrofit retrofit; private final UserOrganisationUnitLinkStoreInterface userOrganisationUnitLinkStore; private final ProgramStoreInterface programStore; private EventWithLimitCall( @NonNull DatabaseAdapter databaseAdapter, @NonNull Retrofit retrofit, @NonNull UserOrganisationUnitLinkStoreInterface userOrganisationUnitLinkStore, @NonNull ProgramStoreInterface programStore, int eventLimit, boolean limitByOrgUnit) { this.databaseAdapter = databaseAdapter; this.retrofit = retrofit; this.userOrganisationUnitLinkStore = userOrganisationUnitLinkStore; this.programStore = programStore; this.eventLimit = eventLimit; this.limitByOrgUnit = limitByOrgUnit; } @Override public List<Event> call() throws D2CallException { setExecuted(); return new D2CallExecutor().executeD2CallTransactionally(databaseAdapter, new Callable<List<Event>>() { @Override public List<Event> call() throws Exception { return getEvents(); } }); } private List<Event> getEvents() throws D2CallException { Set<String> organisationUnitUids; Set<String> programUids = programStore.queryWithoutRegistrationProgramUids(); List<Event> events = new ArrayList<>(); EventQuery.Builder eventQueryBuilder = EventQuery.Builder.create(); int pageSize = eventQueryBuilder.build().getPageSize(); if (limitByOrgUnit) { organisationUnitUids = getOrgUnitUids(); } else { organisationUnitUids = userOrganisationUnitLinkStore.queryRootOrganisationUnitUids(); eventQueryBuilder.withOuMode(OuMode.DESCENDANTS); } for (String orgUnitUid : organisationUnitUids) { if (!limitByOrgUnit && events.size() == eventLimit) { break; } eventQueryBuilder.withOrgUnit(orgUnitUid); events.addAll(getEventsWithPaging(eventQueryBuilder, pageSize, programUids, events.size())); } return events; } private List<Event> getEventsWithPaging(EventQuery.Builder eventQueryBuilder, int pageSize, Set<String> programUids, int globalEventsSize) throws D2CallException { List<Event> events = new ArrayList<>(); D2CallExecutor executor = new D2CallExecutor(); for (String programUid : programUids) { eventQueryBuilder.withProgram(programUid); int eventLimitForProgram = limitByOrgUnit ? eventLimit -events.size() : eventLimit - globalEventsSize - events.size(); List<Paging> pagingList = ApiPagingEngine.getPaginationList(pageSize, eventLimitForProgram); for (Paging paging : pagingList) { eventQueryBuilder.withPageSize(paging.pageSize()); eventQueryBuilder.withPage(paging.page()); List<Event> pageEvents = executor.executeD2Call( EventEndpointCall.create(retrofit, eventQueryBuilder.build())); if (paging.isLastPage()) { int previousItemsToSkip = pageEvents.size() + paging.previousItemsToSkipCount() - paging.pageSize(); int toIndex = previousItemsToSkip < 0 ? pageEvents.size() : pageEvents.size() - previousItemsToSkip; events.addAll(pageEvents.subList(paging.previousItemsToSkipCount(), toIndex)); } else { events.addAll(pageEvents); } if (pageEvents.size() < paging.pageSize()) { break; } } if (events.size() == eventLimit) { break; } } executor.executeD2Call(EventPersistenceCall.create(databaseAdapter, retrofit, events)); return events; } private Set<String> getOrgUnitUids() { Set<UserOrganisationUnitLinkModel> userOrganisationUnitLinks = userOrganisationUnitLinkStore.selectAll( UserOrganisationUnitLinkModel.factory); Set<String> organisationUnitUids = new HashSet<>(); for (UserOrganisationUnitLinkModel linkModel: userOrganisationUnitLinks) { if (linkModel.organisationUnitScope().equals( OrganisationUnitModel.Scope.SCOPE_DATA_CAPTURE.name())) { organisationUnitUids.add(linkModel.organisationUnit()); } } return organisationUnitUids; } public static EventWithLimitCall create(DatabaseAdapter databaseAdapter, Retrofit retrofit, int eventLimit, boolean limitByOrgUnit) { return new EventWithLimitCall( databaseAdapter, retrofit, UserOrganisationUnitLinkStore.create(databaseAdapter), ProgramStore.create(databaseAdapter), eventLimit, limitByOrgUnit ); } }
package org.mskcc.cbio.portal.scripts; import java.io.*; import java.net.URL; import java.net.URLConnection; import java.util.Date; import java.util.HashSet; import java.util.Set; import org.mskcc.cbio.portal.util.ConsoleUtil; import org.mskcc.cbio.portal.util.ProgressMonitor; /** * Fetches PFAM graphic data. * * @author Selcuk Onur Sumer */ public class FetchPfamGraphicsData { public static final String URL_PREFIX = "http://pfam.sanger.ac.uk/protein/"; public static final String URL_SUFFIX = "/graphic"; /** * Parses the given input file and creates an output with pfam graphics data * for each uniprot id. * * @param inputFilename name of the uniprot id mapping file * @param outputFilename name of the output pfam graphics file * @param incremental indicates incremental fetching * @return total number of errors */ public static int driver(String outputFilename, boolean incremental) throws IOException { BufferedWriter out = new BufferedWriter(new FileWriter(outputFilename)); int numErrors = 0; // TODO if incremental: // 1. open the file in append mode, do not overwrite // 2. check if a certain uniprot id is already mapped in the file // 3. populate key set if incremental option is selected Set<String> keySet = initKeySet(outputFilename, incremental); Set<String> uniprotAccs = ImportUniProtIdMapping.getSwissProtAccessionHuman(); ProgressMonitor pMonitor = new ProgressMonitor(); pMonitor.setConsoleMode(true); pMonitor.setMaxValue(uniprotAccs.size()); // read all for (String uniprotId : uniprotAccs) { pMonitor.incrementCurValue(); ConsoleUtil.showProgress(pMonitor); // avoid to add a duplicate entry if (keySet.contains(uniprotId)) { continue; } String pfamJson = fetch(uniprotId); keySet.add(uniprotId); // replace all tabs and new lines with a single space pfamJson = pfamJson.trim().replaceAll("\t", " ").replaceAll("\n", " "); // verify if it is really a JSON object // TODO this verification may not be safe... if (pfamJson.startsWith("[") || pfamJson.startsWith("{")) { out.write(uniprotId); out.write("\t"); out.write(pfamJson); out.write("\n"); } else { System.out.println("Invalid data for: " + uniprotId); numErrors++; } } out.close(); return numErrors; } private static Set<String> initKeySet(String outputFilename, boolean incremental) { HashSet<String> keySet = new HashSet<String>(); if (incremental) { // TODO populate keyset by processing output file } return keySet; } /** * Fetches the JSON data from the PFAM graphics service for the * specified uniprot accession. * * @param uniprotAcc a uniprot accession * @return pfam graphic data as a JSON string * @throws IOException */ private static String fetch(String uniprotAcc) throws IOException { URL url = new URL(URL_PREFIX + uniprotAcc + URL_SUFFIX); URLConnection pfamConn = url.openConnection(); BufferedReader in = new BufferedReader( new InputStreamReader(pfamConn.getInputStream())); String line; StringBuilder sb = new StringBuilder(); // read all while((line = in.readLine()) != null) { sb.append(line); } in.close(); return sb.toString(); } public static void main(String[] args) throws Exception { // default config params boolean noFetch = false; // skip fetching boolean incremental = false; // overwrite or append data // process program arguments int i; // this is for program arguments starting with a dash // these arguments must come before IO file names for (i = 0; i < args.length; i++) { if (args[i].startsWith("-")) { if (args[i].equalsIgnoreCase("-nofetch")) { noFetch = true; } else if (args[i].equalsIgnoreCase("-append")) { incremental = true; } } else { break; } } // check IO file name args if (args.length - i < 1) { System.out.println("command line usage: fetchPfamGraphicsData.sh <output_pfam_mapping_file>"); return; } String output = args[i]; if (noFetch) { // do nothing, just terminate System.out.println("-nofetch argument provided, terminating..."); return; } System.out.println("Fetching started..."); Date start = new Date(); int numErrors = driver(output, incremental); Date end = new Date(); System.out.println("Fetching finished."); double timeElapsed = (end.getTime() - start.getTime()) / 1000.0; System.out.println("\nTotal time elapsed: " + timeElapsed + " seconds"); if (numErrors > 0) { System.out.println("Total number of errors: " + numErrors); } } }
package ai.h2o.automl; import ai.h2o.automl.UserFeedbackEvent.Stage; import ai.h2o.automl.utils.AutoMLUtils; import hex.Model; import hex.ModelBuilder; import hex.StackedEnsembleModel; import hex.deeplearning.DeepLearningModel; import hex.glm.GLMModel; import hex.grid.Grid; import hex.grid.GridSearch; import hex.grid.HyperSpaceSearchCriteria; import hex.splitframe.ShuffleSplitFrame; import hex.tree.SharedTreeModel; import hex.tree.drf.DRFModel; import hex.tree.gbm.GBMModel; import water.*; import water.api.schemas3.ImportFilesV3; import water.api.schemas3.KeyV3; import water.exceptions.H2OAbstractRuntimeException; import water.exceptions.H2OIllegalArgumentException; import water.fvec.Frame; import water.fvec.Vec; import water.nbhm.NonBlockingHashMap; import water.parser.ParseDataset; import water.parser.ParseSetup; import water.util.ArrayUtils; import water.util.IcedHashMapGeneric; import water.util.Log; import java.io.File; import java.text.SimpleDateFormat; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import static hex.deeplearning.DeepLearningModel.DeepLearningParameters.Activation.RectifierWithDropout; import static water.Key.make; /** * H2O AutoML * * AutoML is used for automating the machine learning workflow, which includes automatic training and * tuning of many models within a user-specified time-limit. Stacked Ensembles will be automatically * trained on collections of individual models to produce highly predictive ensemble models which, in most cases, * will be the top performing models in the AutoML Leaderboard. */ public final class AutoML extends Lockable<AutoML> implements TimedH2ORunnable { private final static boolean verifyImmutability = true; // check that trainingFrame hasn't been messed with private final static SimpleDateFormat fullTimestampFormat = new SimpleDateFormat("yyyy.MM.dd HH:mm:ss.S"); private final static SimpleDateFormat timestampFormatForKeys = new SimpleDateFormat("yyyyMMdd_HHmmss"); private AutoMLBuildSpec buildSpec; // all parameters for doing this AutoML build private Frame origTrainingFrame; // untouched original training frame private boolean didValidationSplit = false; private boolean didLeaderboardSplit = false; public AutoMLBuildSpec getBuildSpec() { return buildSpec; } public Frame getTrainingFrame() { return trainingFrame; } public Frame getValidationFrame() { return validationFrame; } public Frame getLeaderboardFrame() { return leaderboardFrame; } public Vec getResponseColumn() { return responseColumn; } public Vec getFoldColumn() { return foldColumn; } public Vec getWeightsColumn() { return weightsColumn; } // Disabling metadata collection for now as there is no use for it... // public FrameMetadata getFrameMetadata() { //// return frameMetadata; private Frame trainingFrame; // required training frame: can add and remove Vecs, but not mutate Vec data in place private Frame validationFrame; // optional validation frame; the training_frame is split automagically if it's not specified private Frame leaderboardFrame; // optional test frame used for leaderboard scoring; if not specified, leaderboard will use xval metrics private Vec responseColumn; private Vec foldColumn; private Vec weightsColumn; // Disabling metadata collection for now as there is no use for it... // private FrameMetadata frameMetadata; // metadata for trainingFrame private Key<Grid> gridKeys[] = new Key[0]; // Grid key for the GridSearches // private boolean isClassification; private Date startTime; private static Date lastStartTime; // protect against two runs with the same second in the timestamp; be careful about races private long stopTimeMs; private Job job; // the Job object for the build of this AutoML. TODO: can we have > 1? private transient ArrayList<Job> jobs; private transient ArrayList<Frame> tempFrames; private AtomicInteger modelCount = new AtomicInteger(); // prepare for concurrency private Leaderboard leaderboard; private UserFeedback userFeedback; // check that we haven't messed up the original Frame private Vec[] originalTrainingFrameVecs; private String[] originalTrainingFrameNames; private long[] originalTrainingFrameChecksums; private String[] skipAlgosList = new String[0]; private String sort_metric; // TODO: UGH: this should be dynamic, and it's easy to make it so // NOTE: make sure that this is in sync with the exclude option in AutoMLBuildSpecV99 public enum algo { GLM, DRF, GBM, DeepLearning, StackedEnsemble; //removed XGBoost until we add that } public AutoML() { super(null); } public AutoML(Key<AutoML> key, Date startTime, AutoMLBuildSpec buildSpec) { super(key); this.startTime = startTime; userFeedback = new UserFeedback(this); // Don't use until we set this.project_name this.buildSpec = buildSpec; userFeedback.info(Stage.Workflow, "AutoML job created: " + fullTimestampFormat.format(this.startTime)); handleDatafileParameters(buildSpec); if (null != buildSpec.input_spec.fold_column && 5 != buildSpec.build_control.nfolds) throw new H2OIllegalArgumentException("Cannot specify fold_column and a non-default nfolds value at the same time."); if (null != buildSpec.input_spec.fold_column) userFeedback.warn(Stage.Workflow, "Custom fold column, " + buildSpec.input_spec.fold_column + ", will be used. nfolds value will be ignored."); userFeedback.info(Stage.Workflow, "Build control seed: " + buildSpec.build_control.stopping_criteria.seed() + (buildSpec.build_control.stopping_criteria.seed() == -1 ? " (random)" : "")); // By default, stopping tolerance is adaptive to the training frame if (this.buildSpec.build_control.stopping_criteria._stopping_tolerance == -1) { this.buildSpec.build_control.stopping_criteria.set_default_stopping_tolerance_for_frame(this.trainingFrame); userFeedback.info(Stage.Workflow, "Setting stopping tolerance adaptively based on the training frame: " + this.buildSpec.build_control.stopping_criteria._stopping_tolerance); } else { userFeedback.info(Stage.Workflow, "Stopping tolerance set by the user: " + this.buildSpec.build_control.stopping_criteria._stopping_tolerance); double default_tolerance = HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria.default_stopping_tolerance_for_frame(this.trainingFrame); if (this.buildSpec.build_control.stopping_criteria._stopping_tolerance < 0.7 * default_tolerance){ userFeedback.warn(Stage.Workflow, "Stopping tolerance set by the user is < 70% of the recommended default of " + default_tolerance + ", so models may take a long time to converge or may not converge at all."); } } userFeedback.info(Stage.Workflow, "Project: " + projectName()); // TODO: does this need to be updated? I think its okay to pass a null leaderboardFrame leaderboard = Leaderboard.getOrMakeLeaderboard(projectName(), userFeedback, this.leaderboardFrame, this.sort_metric); this.jobs = new ArrayList<>(); this.tempFrames = new ArrayList<>(); } /** * If the user hasn't specified validation data, split it off for them. * <p> * For nfolds > 1, the user can specify: <p> * 1. training only <p> * 2. training + leaderboard <p> * 3. training + validation <p> * 4. training + validation + leaderboard <p> * <p> * In the top two cases we auto-split: <p> * training -> training:validation 80:20 <p> * <p> * For nfolds = 0, we have different rules: <p> * 5. training only <p> * 6. training + leaderboard <p> * 7. training + validation <p> * 8. training + validation + leaderboard <p> * <p> * TODO: should the size of the splits adapt to origTrainingFrame.numRows()? */ private void optionallySplitDatasets() { // TODO: Maybe clean this up a bit -- use else if instead of nested if/else // If using cross-validation (via nfolds or fold_column), we can use CV metrics for the Leaderboard // therefore we don't need to auto-gen a leaderboard frame if (this.buildSpec.build_control.nfolds > 1 || null != this.buildSpec.input_spec.fold_column) { if (null == this.validationFrame) { // case 1 and 2: missing validation frame -- need to create validation frame Frame[] splits = ShuffleSplitFrame.shuffleSplitFrame(origTrainingFrame, new Key[] { Key.make("automl_training_" + origTrainingFrame._key), Key.make("automl_validation_" + origTrainingFrame._key)}, new double[] { 0.8, 0.2 }, buildSpec.build_control.stopping_criteria.seed()); this.trainingFrame = splits[0]; this.validationFrame = splits[1]; this.didValidationSplit = true; this.didLeaderboardSplit = false; userFeedback.info(Stage.DataImport, "Automatically split the training data into training and validation frames in the ratio 80/20"); } else { // case 3 and 4: nothing to do here userFeedback.info(Stage.DataImport, "Training and validation were both specified; no auto-splitting."); } if (null == this.leaderboardFrame) { // Extra logging for null leaderboard_frame (case 1 and 3) userFeedback.info(Stage.DataImport, "Leaderboard frame not provided by the user; leaderboard will use cross-validation metrics instead."); } } else { // If not using cross-validation, then we must auto-gen a leaderboard frame (and validation frame if missing) if (null == this.leaderboardFrame) { if (null == this.validationFrame) { // case 5: no CV, missing validation and leaderboard frames -- need to create them both from train Frame[] splits = ShuffleSplitFrame.shuffleSplitFrame(origTrainingFrame, new Key[] { Key.make("automl_training_" + origTrainingFrame._key), Key.make("automl_validation_" + origTrainingFrame._key), Key.make("automl_leaderboard_" + origTrainingFrame._key)}, new double[] { 0.8, 0.1, 0.1 }, buildSpec.build_control.stopping_criteria.seed()); this.trainingFrame = splits[0]; this.validationFrame = splits[1]; this.leaderboardFrame = splits[2]; this.didValidationSplit = true; this.didLeaderboardSplit = true; userFeedback.info(Stage.DataImport, "Automatically split the training data into training, validation and leaderboard frames in the ratio 80/10/10"); } else { // case 7: no CV, missing leaderboard frame but validation exists -- need to create leaderboard frame from valid Frame[] splits = ShuffleSplitFrame.shuffleSplitFrame(validationFrame, new Key[] { Key.make("automl_validation_" + origTrainingFrame._key), Key.make("automl_leaderboard_" + origTrainingFrame._key)}, new double[] { 0.5, 0.5 }, buildSpec.build_control.stopping_criteria.seed()); this.validationFrame = splits[0]; this.leaderboardFrame = splits[1]; this.didValidationSplit = true; this.didLeaderboardSplit = true; userFeedback.info(Stage.DataImport, "Automatically split the validation data into validation and leaderboard frames in the ratio 50/50"); } } else { // leaderboard frame is there, so if missing valid, then we just need to do a 80/20 split, else do nothing if (null == this.validationFrame) { // case 6: no CV, missing validation -- need to create it from train Frame[] splits = ShuffleSplitFrame.shuffleSplitFrame(origTrainingFrame, new Key[] { Key.make("automl_training_" + origTrainingFrame._key), Key.make("automl_validation_" + origTrainingFrame._key)}, new double[] { 0.8, 0.2 }, buildSpec.build_control.stopping_criteria.seed()); this.trainingFrame = splits[0]; this.validationFrame = splits[1]; this.didValidationSplit = true; this.didLeaderboardSplit = false; userFeedback.info(Stage.DataImport, "Automatically split the training data into training and validation frames in the ratio 80/20"); } else { // case 8: all frames are there, no need to do anything userFeedback.info(Stage.DataImport, "Training, validation and leaderboard datasets were all specified; not auto-splitting."); } } } } private void handleDatafileParameters(AutoMLBuildSpec buildSpec) { this.origTrainingFrame = DKV.getGet(buildSpec.input_spec.training_frame); this.validationFrame = DKV.getGet(buildSpec.input_spec.validation_frame); this.leaderboardFrame = DKV.getGet(buildSpec.input_spec.leaderboard_frame); this.sort_metric = buildSpec.input_spec.sort_metric; if (this.origTrainingFrame.find(buildSpec.input_spec.response_column) == -1) { throw new H2OIllegalArgumentException("Response column '" + buildSpec.input_spec.response_column + "' is not in " + "the training frame."); } if (buildSpec.input_spec.fold_column != null && this.origTrainingFrame.find(buildSpec.input_spec.fold_column) == -1) { throw new H2OIllegalArgumentException("Fold column '" + buildSpec.input_spec.fold_column + "' is not in " + "the training frame."); } if (buildSpec.input_spec.weights_column != null && this.origTrainingFrame.find(buildSpec.input_spec.weights_column) == -1) { throw new H2OIllegalArgumentException("Weights column '" + buildSpec.input_spec.weights_column + "' is not in " + "the training frame."); } optionallySplitDatasets(); if (null == this.trainingFrame) { // we didn't need to split off the validation_frame or leaderboard_frame ourselves this.trainingFrame = new Frame(origTrainingFrame); DKV.put(this.trainingFrame); } this.responseColumn = trainingFrame.vec(buildSpec.input_spec.response_column); this.foldColumn = trainingFrame.vec(buildSpec.input_spec.fold_column); this.weightsColumn = trainingFrame.vec(buildSpec.input_spec.weights_column); this.userFeedback.info(Stage.DataImport, "training frame: " + this.trainingFrame.toString().replace("\n", " ") + " checksum: " + this.trainingFrame.checksum()); this.userFeedback.info(Stage.DataImport, "validation frame: " + this.validationFrame.toString().replace("\n", " ") + " checksum: " + this.validationFrame.checksum()); if (null != this.leaderboardFrame) { this.userFeedback.info(Stage.DataImport, "leaderboard frame: " + this.leaderboardFrame.toString().replace("\n", " ") + " checksum: " + this.leaderboardFrame.checksum()); } else { this.userFeedback.info(Stage.DataImport, "leaderboard frame: NULL"); } this.userFeedback.info(Stage.DataImport, "response column: " + buildSpec.input_spec.response_column); this.userFeedback.info(Stage.DataImport, "fold column: " + this.foldColumn); this.userFeedback.info(Stage.DataImport, "weights column: " + this.weightsColumn); if (verifyImmutability) { // check that we haven't messed up the original Frame originalTrainingFrameVecs = origTrainingFrame.vecs().clone(); originalTrainingFrameNames = origTrainingFrame.names().clone(); originalTrainingFrameChecksums = new long[originalTrainingFrameVecs.length]; for (int i = 0; i < originalTrainingFrameVecs.length; i++) originalTrainingFrameChecksums[i] = originalTrainingFrameVecs[i].checksum(); } DKV.put(this); } public static AutoML makeAutoML(Key<AutoML> key, Date startTime, AutoMLBuildSpec buildSpec) { AutoML autoML = new AutoML(key, startTime, buildSpec); if (null == autoML.trainingFrame) throw new H2OIllegalArgumentException("No training data has been specified, either as a path or a key."); return autoML; } private static Frame importParseFrame(ImportFilesV3.ImportFiles importFiles, ParseSetup userSetup) { ArrayList<String> files = new ArrayList(); ArrayList<String> keys = new ArrayList(); ArrayList<String> fails = new ArrayList(); ArrayList<String> dels = new ArrayList(); H2O.getPM().importFiles(importFiles.path, null, files, keys, fails, dels); importFiles.files = files.toArray(new String[0]); importFiles.destination_frames = keys.toArray(new String[0]); importFiles.fails = fails.toArray(new String[0]); importFiles.dels = dels.toArray(new String[0]); String datasetName = importFiles.path.split("\\.(?=[^\\.]+$)")[0]; String separatorRegex = (File.separator.equals("/") ? "/" : "\\"); String[] pathPieces = datasetName.split(separatorRegex); datasetName = pathPieces[pathPieces.length - 1]; Key[] realKeys = new Key[keys.size()]; for (int i = 0; i < keys.size(); i++) realKeys[i] = make(keys.get(i)); // TODO: we always have to tell guessSetup about single quotes?! ParseSetup guessedParseSetup = ParseSetup.guessSetup(realKeys, false, ParseSetup.GUESS_HEADER); return ParseDataset.parse(make(datasetName), realKeys, true, guessedParseSetup); } // used to launch the AutoML asynchronously @Override public void run() { stopTimeMs = System.currentTimeMillis() + Math.round(1000 * buildSpec.build_control.stopping_criteria.max_runtime_secs()); learn(); } @Override public void stop() { for (Frame f : tempFrames) f.delete(); tempFrames = null; if (null == jobs) return; // already stopped for (Job j : jobs) j.stop(); for (Job j : jobs) j.get(); // Hold until they all completely stop. jobs = null; // TODO: add a failsafe, if we haven't marked off as much work as we originally intended? // If we don't, we end up with an exceptional completion. } public long getStopTimeMs() { return stopTimeMs; } public long timeRemainingMs() { long remaining = getStopTimeMs() - System.currentTimeMillis(); return Math.max(0, remaining); } public int remainingModels() { if (buildSpec.build_control.stopping_criteria.max_models() == 0) return Integer.MAX_VALUE; return buildSpec.build_control.stopping_criteria.max_models() - modelCount.get(); } @Override public boolean keepRunning() { return timeRemainingMs() > 0 && remainingModels() > 0; } private enum JobType { Unknown, ModelBuild, HyperparamSearch } public void pollAndUpdateProgress(Stage stage, String name, long workContribution, Job parentJob, Job subJob, JobType subJobType) { if (null == subJob) { if (null != parentJob) { parentJob.update(workContribution, "SKIPPED: " + name); Log.info("AutoML skipping " + name); } return; } userFeedback.info(stage, name + " started"); jobs.add(subJob); long lastWorkedSoFar = 0; long cumulative = 0; int gridLastCount = 0; while (subJob.isRunning()) { if (null != parentJob) { if (parentJob.stop_requested()) { Log.info("Skipping " + name + " due to Job cancel"); subJob.stop(); } } long workedSoFar = Math.round(subJob.progress() * workContribution); cumulative += workedSoFar; if (null != parentJob) { parentJob.update(Math.round(workedSoFar - lastWorkedSoFar), name); } if (JobType.HyperparamSearch == subJobType) { Grid grid = (Grid)subJob._result.get(); int gridCount = grid.getModelCount(); if (gridCount > gridLastCount) { userFeedback.info(Stage.ModelTraining, "Built: " + gridCount + " models for search: " + name); this.addModels(grid.getModelKeys()); gridLastCount = gridCount; } } try { Thread.currentThread().sleep(1000); } catch (InterruptedException e) { // keep going } lastWorkedSoFar = workedSoFar; } // pick up any stragglers: if (JobType.HyperparamSearch == subJobType) { if (subJob.isCrashed()) { userFeedback.info(stage, name + " failed: " + subJob.ex().toString()); } else { Grid grid = (Grid) subJob._result.get(); int gridCount = grid.getModelCount(); if (gridCount > gridLastCount) { userFeedback.info(Stage.ModelTraining, "Built: " + gridCount + " models for search: " + name); this.addModels(grid.getModelKeys()); gridLastCount = gridCount; } userFeedback.info(stage, name + " complete"); } } else if (JobType.ModelBuild == subJobType) { if (subJob.isCrashed()) { userFeedback.info(stage, name + " failed: " + subJob.ex().toString()); } else { userFeedback.info(stage, name + " complete"); this.addModel((Model) subJob._result.get()); } } // add remaining work if (null != parentJob) { parentJob.update(workContribution - lastWorkedSoFar); } jobs.remove(subJob); } // These are per (possibly concurrent) AutoML run. // All created keys for a run use the unique AutoML // run timestamp, so we can't have name collisions. private int individualModelsTrained = 0; private NonBlockingHashMap<String, Integer> algoInstanceCounters = new NonBlockingHashMap<>(); private NonBlockingHashMap<String, Integer> gridInstanceCounters = new NonBlockingHashMap<>(); private int nextInstanceCounter(String algoName, NonBlockingHashMap<String, Integer> instanceCounters) { synchronized (instanceCounters) { int instanceNum = 0; if (instanceCounters.containsKey(algoName)) instanceNum = instanceCounters.get(algoName) + 1; instanceCounters.put(algoName, instanceNum); return instanceNum; } } private Key<Model> modelKey(String algoName) { return Key.make(algoName + "_" + nextInstanceCounter(algoName, algoInstanceCounters) + "_AutoML_" + timestampFormatForKeys.format(this.startTime)); } /** * Helper for hex.ModelBuilder. * @return */ public Job trainModel(Key<Model> key, String algoURLName, Model.Parameters parms) { String algoName = ModelBuilder.algoName(algoURLName); if (null == key) key = modelKey(algoName); Job job = new Job<>(key, ModelBuilder.javaName(algoURLName), algoName); ModelBuilder builder = ModelBuilder.make(algoURLName, job, key); Model.Parameters defaults = builder._parms; builder._parms = parms; setCommonModelBuilderParams(builder._parms); if (builder._parms._max_runtime_secs == 0) builder._parms._max_runtime_secs = Math.round(timeRemainingMs() / 1000.0); else builder._parms._max_runtime_secs = Math.min(builder._parms._max_runtime_secs, Math.round(timeRemainingMs() / 1000.0)); // If we have set a seed for the search and not for the individual model params // then use a sequence starting with the same seed given for the model build. // Don't use the same exact seed so that, e.g., if we build two GBMs they don't // do the same row and column sampling. if (builder._parms._seed == defaults._seed && buildSpec.build_control.stopping_criteria.seed() != -1) builder._parms._seed = buildSpec.build_control.stopping_criteria.seed() + individualModelsTrained++; // If the caller hasn't set ModelBuilder stopping criteria, set it from our global criteria. if (builder._parms._stopping_metric == defaults._stopping_metric) builder._parms._stopping_metric = buildSpec.build_control.stopping_criteria.stopping_metric(); if (builder._parms._stopping_rounds == defaults._stopping_rounds) builder._parms._stopping_rounds = buildSpec.build_control.stopping_criteria.stopping_rounds(); if (builder._parms._stopping_tolerance == defaults._stopping_tolerance) builder._parms._stopping_tolerance = buildSpec.build_control.stopping_criteria.stopping_tolerance(); builder.init(false); // validate parameters // TODO: handle error_count and messages return builder.trainModel(); } private Key<Grid> gridKey(String algoName) { return Key.make(algoName + "_grid_" + nextInstanceCounter(algoName, gridInstanceCounters) + "_AutoML_" + timestampFormatForKeys.format(this.startTime)); } private void addGridKey(Key<Grid> gridKey) { gridKeys = Arrays.copyOf(gridKeys, gridKeys.length + 1); gridKeys[gridKeys.length - 1] = gridKey; } /** * Do a random hyperparameter search. Caller must eventually do a <i>get()</i> * on the returned Job to ensure that it's complete. * @param algoName name of the algo, e.g. "GBM"; used for messages and for building the grid key * @param baseParms ModelBuilder parameter values that are common across all models in the search * @param searchParms hyperparameter search space * @return the started hyperparameter search job */ public Job<Grid> hyperparameterSearch(String algoName, Model.Parameters baseParms, Map<String, Object[]> searchParms) { return hyperparameterSearch(null, algoName, baseParms, searchParms); } /** * Do a random hyperparameter search. Caller must eventually do a <i>get()</i> * on the returned Job to ensure that it's complete. * @param gridKey optional grid key * @param algoName name of the algo, e.g. "GBM"; used for messages and for building the grid key if it's not specified * @param baseParms ModelBuilder parameter values that are common across all models in the search * @param searchParms hyperparameter search space * @return the started hyperparameter search job */ public Job<Grid> hyperparameterSearch(Key<Grid> gridKey, String algoName, Model.Parameters baseParms, Map<String, Object[]> searchParms) { if (ArrayUtils.contains(skipAlgosList, algoName)) { userFeedback.info(Stage.ModelTraining,"AutoML: skipping algo " + algoName + " hyperparameter search"); return null; } setCommonModelBuilderParams(baseParms); if (remainingModels() <= 0) { userFeedback.info(Stage.ModelTraining,"AutoML: hit the max_models limit; skipping " + algoName + " hyperparameter search"); return null; } HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria searchCriteria = (HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria)buildSpec.build_control.stopping_criteria.clone(); if (searchCriteria.max_runtime_secs() == 0) searchCriteria.set_max_runtime_secs(this.timeRemainingMs() / 1000.0); else searchCriteria.set_max_runtime_secs(Math.min(searchCriteria.max_runtime_secs(), timeRemainingMs() / 1000.0)); if (searchCriteria.max_runtime_secs() <= 0.001) { userFeedback.info(Stage.ModelTraining,"AutoML: out of time; skipping " + algoName + " hyperparameter search"); return null; } if (searchCriteria.max_models() == 0) searchCriteria.set_max_models(remainingModels()); else searchCriteria.set_max_models(Math.min(searchCriteria.max_models(), remainingModels())); userFeedback.info(Stage.ModelTraining, "AutoML: starting " + algoName + " hyperparameter search"); // If the caller hasn't set ModelBuilder stopping criteria, set it from our global criteria. Model.Parameters defaults; try { defaults = baseParms.getClass().newInstance(); } catch (Exception e) { userFeedback.warn(Stage.ModelTraining, "Internal error doing hyperparameter search"); throw new H2OIllegalArgumentException("Hyperparameter search can't create a new instance of Model.Parameters subclass: " + baseParms.getClass()); } if (baseParms._stopping_metric == defaults._stopping_metric) baseParms._stopping_metric = buildSpec.build_control.stopping_criteria.stopping_metric(); if (baseParms._stopping_rounds == defaults._stopping_rounds) baseParms._stopping_rounds = buildSpec.build_control.stopping_criteria.stopping_rounds(); if (baseParms._stopping_tolerance == defaults._stopping_tolerance) baseParms._stopping_tolerance = buildSpec.build_control.stopping_criteria.stopping_tolerance(); // NOTE: // RandomDiscrete Hyperparameter Search matches the logic used in #trainModel(): // If we have set a seed for the search and not for the individual model params // then use a sequence starting with the same seed given for the model build. // Don't use the same exact seed so that, e.g., if we build two GBMs they don't // do the same row and column sampling. if (null == gridKey) gridKey = gridKey(algoName); addGridKey(gridKey); Job<Grid> gridJob = GridSearch.startGridSearch(gridKey, baseParms, searchParms, new GridSearch.SimpleParametersBuilderFactory(), searchCriteria); return gridJob; } private void setCommonModelBuilderParams(Model.Parameters params) { params._train = trainingFrame._key; if (null != validationFrame) params._valid = validationFrame._key; params._response_column = buildSpec.input_spec.response_column; params._ignored_columns = buildSpec.input_spec.ignored_columns; params._seed = buildSpec.build_control.stopping_criteria.seed(); // currently required, for the base_models, for stacking: if (! (params instanceof StackedEnsembleModel.StackedEnsembleParameters)) { params._keep_cross_validation_predictions = true; // TODO: StackedEnsemble doesn't support weights yet in score0 params._fold_column = buildSpec.input_spec.fold_column; params._weights_column = buildSpec.input_spec.weights_column; if (buildSpec.input_spec.fold_column == null) { params._nfolds = buildSpec.build_control.nfolds; if (buildSpec.build_control.nfolds > 1) { // TODO: below allow the user to specify this (vs Modulo) // TODO: also, the docs currently say that we use Random folds... not Modulo params._fold_assignment = Model.Parameters.FoldAssignmentScheme.Modulo; } } if (buildSpec.build_control.balance_classes == true) { params._balance_classes = buildSpec.build_control.balance_classes; params._class_sampling_factors = buildSpec.build_control.class_sampling_factors; params._max_after_balance_size = buildSpec.build_control.max_after_balance_size; } //TODO: add a check that gives an error when class_sampling_factors, max_after_balance_size is set and balance_classes = false } } private boolean exceededSearchLimits(String whatWeAreSkipping) { if (ArrayUtils.contains(skipAlgosList, whatWeAreSkipping)) { userFeedback.info(Stage.ModelTraining,"AutoML: skipping algo " + whatWeAreSkipping + " build"); return true; } if (timeRemainingMs() <= 0.001) { userFeedback.info(Stage.ModelTraining, "AutoML: out of time; skipping " + whatWeAreSkipping); return true; } if (remainingModels() <= 0) { userFeedback.info(Stage.ModelTraining, "AutoML: hit the max_models limit; skipping " + whatWeAreSkipping); return true; } return false; } Job<DRFModel>defaultRandomForest() { if (exceededSearchLimits("DRF")) return null; DRFModel.DRFParameters drfParameters = new DRFModel.DRFParameters(); setCommonModelBuilderParams(drfParameters); drfParameters._stopping_tolerance = this.buildSpec.build_control.stopping_criteria.stopping_tolerance(); Job randomForestJob = trainModel(null, "drf", drfParameters); return randomForestJob; } Job<DRFModel>defaultExtremelyRandomTrees() { if (exceededSearchLimits("DRF (XRT)")) return null; DRFModel.DRFParameters drfParameters = new DRFModel.DRFParameters(); setCommonModelBuilderParams(drfParameters); drfParameters._histogram_type = SharedTreeModel.SharedTreeParameters.HistogramType.Random; drfParameters._stopping_tolerance = this.buildSpec.build_control.stopping_criteria.stopping_tolerance(); Job randomForestJob = trainModel(modelKey("XRT"), "drf", drfParameters); return randomForestJob; } /** * Build Arno's magical 5 default GBMs. * @param gridKey */ void defaultGBMs(Key<Grid> gridKey) { if (exceededSearchLimits("default GBMs")) return; GBMModel.GBMParameters gbmParameters = new GBMModel.GBMParameters(); setCommonModelBuilderParams(gbmParameters); gbmParameters._score_tree_interval = 5; gbmParameters._histogram_type = SharedTreeModel.SharedTreeParameters.HistogramType.AUTO; Map<String, Object[]> searchParams = new HashMap<>(); searchParams.put("_ntrees", new Integer[]{10000}); searchParams.put("_sample_rate", new Double[]{ 0.80 }); searchParams.put("_col_sample_rate", new Double[]{ 0.8 }); searchParams.put("_col_sample_rate_per_tree", new Double[]{ 0.8 }); // searchParams.put("_learn_rate", new Double[]{0.001, 0.005, 0.008, 0.01, 0.05, 0.08, 0.1, 0.5, 0.8}); // searchParams.put("_min_split_improvement", new Double[]{1e-4, 1e-5}); Job<Grid>gbmJob = null; // Default 1: searchParams.put("_max_depth", new Integer[]{ 6 }); searchParams.put("_min_rows", new Integer[]{ 1 }); gbmJob = hyperparameterSearch(gridKey, "GBM", gbmParameters, searchParams); pollAndUpdateProgress(Stage.ModelTraining, "GBM 1", 10, this.job(), gbmJob, JobType.HyperparamSearch); // Default 2: searchParams.put("_max_depth", new Integer[]{ 7 }); searchParams.put("_min_rows", new Integer[]{ 10 }); gbmJob = hyperparameterSearch(gridKey, "GBM", gbmParameters, searchParams); pollAndUpdateProgress(Stage.ModelTraining, "GBM 2", 10, this.job(), gbmJob, JobType.HyperparamSearch); // Default 3: searchParams.put("_max_depth", new Integer[]{ 8 }); searchParams.put("_min_rows", new Integer[]{ 10 }); gbmJob = hyperparameterSearch(gridKey, "GBM", gbmParameters, searchParams); pollAndUpdateProgress(Stage.ModelTraining, "GBM 3", 10, this.job(), gbmJob, JobType.HyperparamSearch); // Default 4: searchParams.put("_max_depth", new Integer[]{ 10 }); searchParams.put("_min_rows", new Integer[]{ 10 }); gbmJob = hyperparameterSearch(gridKey, "GBM", gbmParameters, searchParams); pollAndUpdateProgress(Stage.ModelTraining, "GBM 4", 10, this.job(), gbmJob, JobType.HyperparamSearch); // Default 5: searchParams.put("_max_depth", new Integer[]{ 15 }); searchParams.put("_min_rows", new Integer[]{ 100 }); gbmJob = hyperparameterSearch(gridKey, "GBM", gbmParameters, searchParams); pollAndUpdateProgress(Stage.ModelTraining, "GBM 5", 10, this.job(), gbmJob, JobType.HyperparamSearch); return; } Job<DeepLearningModel>defaultDeepLearning() { if (exceededSearchLimits("DeepLearning")) return null; DeepLearningModel.DeepLearningParameters deepLearningParameters = new DeepLearningModel.DeepLearningParameters(); setCommonModelBuilderParams(deepLearningParameters); deepLearningParameters._stopping_tolerance = this.buildSpec.build_control.stopping_criteria.stopping_tolerance(); deepLearningParameters._hidden = new int[]{ 10, 10, 10 }; Job deepLearningJob = trainModel(null, "deeplearning", deepLearningParameters); return deepLearningJob; } public Job<Grid> defaultSearchGLM() { // do a random hyperparameter search with GLM // TODO: convert to using the REST API Key<Grid> gridKey = Key.make("GLM_grid_default_" + this._key.toString()); HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria searchCriteria = buildSpec.build_control.stopping_criteria; // TODO: put this into a Provider, which can return multiple searches GLMModel.GLMParameters glmParameters = new GLMModel.GLMParameters(); setCommonModelBuilderParams(glmParameters); glmParameters._lambda_search = true; glmParameters._family = getResponseColumn().isBinary() && !(getResponseColumn().isNumeric()) ? GLMModel.GLMParameters.Family.binomial : getResponseColumn().isCategorical() ? GLMModel.GLMParameters.Family.multinomial : GLMModel.GLMParameters.Family.gaussian; // TODO: other continuous distributions! Map<String, Object[]> searchParams = new HashMap<>(); glmParameters._alpha = new double[] {0.0, 0.2, 0.4, 0.6, 0.8, 1.0}; // Note: standard GLM parameter is an array; don't use searchParams! searchParams.put("_missing_values_handling", new DeepLearningModel.DeepLearningParameters.MissingValuesHandling[] {DeepLearningModel.DeepLearningParameters.MissingValuesHandling.MeanImputation /* , DeepLearningModel.DeepLearningParameters.MissingValuesHandling.Skip */}); Job<Grid>glmJob = hyperparameterSearch("GLM", glmParameters, searchParams); return glmJob; } public Job<Grid> defaultSearchGBM(Key<Grid> gridKey) { // do a random hyperparameter search with GBM HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria searchCriteria = buildSpec.build_control.stopping_criteria; // TODO: put this into a Provider, which can return multiple searches GBMModel.GBMParameters gbmParameters = new GBMModel.GBMParameters(); setCommonModelBuilderParams(gbmParameters); gbmParameters._score_tree_interval = 5; gbmParameters._histogram_type = SharedTreeModel.SharedTreeParameters.HistogramType.AUTO; Map<String, Object[]> searchParams = new HashMap<>(); searchParams.put("_ntrees", new Integer[]{10000}); searchParams.put("_max_depth", new Integer[]{3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17}); searchParams.put("_min_rows", new Integer[]{1, 5, 10, 15, 30, 100}); searchParams.put("_learn_rate", new Double[]{0.001, 0.005, 0.008, 0.01, 0.05, 0.08, 0.1, 0.5, 0.8}); searchParams.put("_sample_rate", new Double[]{0.50, 0.60, 0.70, 0.80, 0.90, 1.00}); searchParams.put("_col_sample_rate", new Double[]{ 0.4, 0.7, 1.0}); searchParams.put("_col_sample_rate_per_tree", new Double[]{ 0.4, 0.7, 1.0}); searchParams.put("_min_split_improvement", new Double[]{1e-4, 1e-5}); Job<Grid>gbmJob = hyperparameterSearch(gridKey, "GBM", gbmParameters, searchParams); return gbmJob; } public Job<Grid> defaultSearchDL1(Key<Grid> gridKey) { // do a random hyperparameter search with DL HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria searchCriteria = buildSpec.build_control.stopping_criteria; // TODO: put this into a Provider, which can return multiple searches DeepLearningModel.DeepLearningParameters dlParameters = new DeepLearningModel.DeepLearningParameters(); setCommonModelBuilderParams(dlParameters); dlParameters._epochs = 10000; // early stopping takes care of epochs - no need to tune! dlParameters._adaptive_rate = true; dlParameters._activation = RectifierWithDropout; Map<String, Object[]> searchParams = new HashMap<>(); // common: searchParams.put("_rho", new Double[] { 0.9, 0.95, 0.99 }); searchParams.put("_epsilon", new Double[] { 1e-6, 1e-7, 1e-8, 1e-9 }); searchParams.put("_input_dropout_ratio", new Double[] { 0.0, 0.05, 0.1, 0.15, 0.2 }); // unique: searchParams.put("_hidden", new Integer[][] { {50}, {200}, {500} }); searchParams.put("_hidden_dropout_ratios", new Double[][] { { 0.0 }, { 0.1 }, { 0.2 }, { 0.3 }, { 0.4 }, { 0.5 } }); Job<Grid>dlJob = hyperparameterSearch(gridKey, "DL", dlParameters, searchParams); return dlJob; } public Job<Grid> defaultSearchDL2(Key<Grid> gridKey) { // do a random hyperparameter search with DL HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria searchCriteria = buildSpec.build_control.stopping_criteria; // TODO: put this into a Provider, which can return multiple searches DeepLearningModel.DeepLearningParameters dlParameters = new DeepLearningModel.DeepLearningParameters(); setCommonModelBuilderParams(dlParameters); dlParameters._epochs = 10000; // early stopping takes care of epochs - no need to tune! dlParameters._adaptive_rate = true; dlParameters._activation = RectifierWithDropout; Map<String, Object[]> searchParams = new HashMap<>(); // common: searchParams.put("_rho", new Double[] { 0.9, 0.95, 0.99 }); searchParams.put("_epsilon", new Double[] { 1e-6, 1e-7, 1e-8, 1e-9 }); searchParams.put("_input_dropout_ratio", new Double[] { 0.0, 0.05, 0.1, 0.15, 0.2 }); // unique: searchParams.put("_hidden", new Integer[][] { {50, 50}, {200, 200}, {500, 500} }); searchParams.put("_hidden_dropout_ratios", new Double[][] { { 0.0, 0.0 }, { 0.1, 0.1 }, { 0.2, 0.2 }, { 0.3, 0.3 }, { 0.4, 0.4 }, { 0.5, 0.5 } }); Job<Grid>dlJob = hyperparameterSearch(gridKey, "DL", dlParameters, searchParams); return dlJob; } public Job<Grid> defaultSearchDL3(Key<Grid> gridKey) { // do a random hyperparameter search with DL HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria searchCriteria = buildSpec.build_control.stopping_criteria; // TODO: put this into a Provider, which can return multiple searches DeepLearningModel.DeepLearningParameters dlParameters = new DeepLearningModel.DeepLearningParameters(); setCommonModelBuilderParams(dlParameters); dlParameters._epochs = 10000; // early stopping takes care of epochs - no need to tune! dlParameters._adaptive_rate = true; dlParameters._activation = RectifierWithDropout; Map<String, Object[]> searchParams = new HashMap<>(); // common: searchParams.put("_rho", new Double[] { 0.9, 0.95, 0.99 }); searchParams.put("_epsilon", new Double[] { 1e-6, 1e-7, 1e-8, 1e-9 }); searchParams.put("_input_dropout_ratio", new Double[] { 0.0, 0.05, 0.1, 0.15, 0.2 }); // unique: searchParams.put("_hidden", new Integer[][] { {50, 50, 50}, {200, 200, 200}, {500, 500, 500} }); searchParams.put("_hidden_dropout_ratios", new Double[][] { { 0.0, 0.0, 0.0 }, { 0.1, 0.1, 0.1 }, { 0.2, 0.2, 0.2 }, { 0.3, 0.3, 0.3 }, { 0.4, 0.4, 0.4 }, { 0.5, 0.5, 0.5 } }); Job<Grid>dlJob = hyperparameterSearch(gridKey, "DL", dlParameters, searchParams); return dlJob; } Job<StackedEnsembleModel>stack(String modelName, Key<Model>[]... modelKeyArrays) { List<Key<Model>> allModelKeys = new ArrayList<>(); for (Key<Model>[] modelKeyArray : modelKeyArrays) allModelKeys.addAll(Arrays.asList(modelKeyArray)); // Set up Stacked Ensemble StackedEnsembleModel.StackedEnsembleParameters stackedEnsembleParameters = new StackedEnsembleModel.StackedEnsembleParameters(); stackedEnsembleParameters._base_models = allModelKeys.toArray(new Key[0]); stackedEnsembleParameters._valid = (getValidationFrame() == null ? null : getValidationFrame()._key); stackedEnsembleParameters._keep_levelone_frame = true; //TODO Why is this true? Can be optionally turned off // Add cross-validation args if (buildSpec.input_spec.fold_column != null) { stackedEnsembleParameters._metalearner_fold_column = buildSpec.input_spec.fold_column; stackedEnsembleParameters._metalearner_nfolds = 0; //if fold_column is used, set nfolds to 0 (default) } else { stackedEnsembleParameters._metalearner_nfolds = buildSpec.build_control.nfolds; } // TODO: Add fold_assignment Key modelKey = modelKey(modelName); Job ensembleJob = trainModel(modelKey, "stackedensemble", stackedEnsembleParameters); return ensembleJob; } public void learn() { userFeedback.info(Stage.Workflow, "AutoML build started: " + fullTimestampFormat.format(new Date())); if (buildSpec.build_models.exclude_algos != null) for (AutoML.algo algo : buildSpec.build_models.exclude_algos) skipAlgosList = ArrayUtils.append(skipAlgosList, algo.toString()); // This is useful during debugging. skipAlgosList = ArrayUtils.append(skipAlgosList, new String[] { /* "GLM", "DRF", "GBM", "DeepLearning", "StackedEnsemble" */ }); // Inform the user about skipped algos. // Note: to make the keys short we use "DL" for the "DeepLearning" searches: for (String skippedAlgo : skipAlgosList) userFeedback.info(Stage.ModelTraining, "Disabling algo: " + skippedAlgo + " as requested by the user."); if (ArrayUtils.contains(skipAlgosList, "DeepLearning")) skipAlgosList = ArrayUtils.append(skipAlgosList,"DL"); if (ArrayUtils.contains(skipAlgosList, "GBM")) skipAlgosList = ArrayUtils.append(skipAlgosList, "default GBMs"); if (ArrayUtils.contains(skipAlgosList, "DRF")) skipAlgosList = ArrayUtils.append(skipAlgosList, "DRF (XRT)"); // gather initial frame metadata and guess the problem type // Disabling metadata collection for now as there is no use for it... // // TODO: Nishant says sometimes frameMetadata is null, so maybe we need to wait for it? // // null FrameMetadata arises when delete() is called without waiting for start() to finish. // frameMetadata = new FrameMetadata(userFeedback, trainingFrame, // trainingFrame.find(buildSpec.input_spec.response_column), // trainingFrame._key.toString()).computeFrameMetaPass1(); // HashMap<String, Object> frameMeta = FrameMetadata.makeEmptyFrameMeta(); // frameMetadata.fillSimpleMeta(frameMeta); // giveDatasetFeedback(trainingFrame, userFeedback, frameMeta); // job.update(20, "Computed dataset metadata"); // isClassification = frameMetadata.isClassification(); // build a fast RF with default settings... Job<DRFModel>defaultRandomForestJob = defaultRandomForest(); pollAndUpdateProgress(Stage.ModelTraining, "Default Random Forest build", 50, this.job(), defaultRandomForestJob, JobType.ModelBuild); // ... and another with "XRT" / extratrees settings Job<DRFModel>defaultExtremelyRandomTreesJob = defaultExtremelyRandomTrees(); pollAndUpdateProgress(Stage.ModelTraining, "Extremely Randomized Trees (XRT) Random Forest build", 50, this.job(), defaultExtremelyRandomTreesJob, JobType.ModelBuild); // build GLMs with the default search parameters // TODO: run for only part of the remaining time? Job<Grid>glmJob = defaultSearchGLM(); pollAndUpdateProgress(Stage.ModelTraining, "GLM hyperparameter search", 50, this.job(), glmJob, JobType.HyperparamSearch); // build five GBMs with Arno's default settings, using 1-grid // Cartesian searches into the same grid object as the search // below. Key<Grid> gbmGridKey = gridKey("GBM"); defaultGBMs(gbmGridKey); // NOTE: does its own polling, 5 models with 10 work units each // build a fast DL model with almost default settings... Job<DeepLearningModel>defaultDeepLearningJob = defaultDeepLearning(); pollAndUpdateProgress(Stage.ModelTraining, "Default Deep Learning build", 20, this.job(), defaultDeepLearningJob, JobType.ModelBuild); // build GBMs with the default search parameters // TODO: run for only part of the remaining time? Job<Grid> gbmJob = defaultSearchGBM(gbmGridKey); pollAndUpdateProgress(Stage.ModelTraining, "GBM hyperparameter search", 80, this.job(), gbmJob, JobType.HyperparamSearch); // Build DL models Key<Grid> dlGridKey = gridKey("DeepLearning"); // build DL models with default search parameter set 1 // TODO: run for only part of the remaining time? Job<Grid>dlJob1 = defaultSearchDL1(dlGridKey); pollAndUpdateProgress(Stage.ModelTraining, "DeepLearning hyperparameter search 1", 150, this.job(), dlJob1, JobType.HyperparamSearch); // build DL models with default search parameter set 2 // TODO: run for only part of the remaining time? Job<Grid>dlJob2 = defaultSearchDL2(dlGridKey); pollAndUpdateProgress(Stage.ModelTraining, "DeepLearning hyperparameter search 2", 200, this.job(), dlJob2, JobType.HyperparamSearch); // build DL models with default search parameter set 3 // TODO: run for only part of the remaining time? Job<Grid>dlJob3 = defaultSearchDL3(dlGridKey); pollAndUpdateProgress(Stage.ModelTraining, "DeepLearning hyperparameter search 3", 300, this.job(), dlJob3, JobType.HyperparamSearch); // (optionally) build StackedEnsemble Model[] allModels = leaderboard().getModels(); if (allModels.length == 0) { this.job.update(50, "No models built; StackedEnsemble builds skipped"); userFeedback.info(Stage.ModelTraining, "No models were built, due to timeouts or the exclude_algos option. StackedEnsemble builds skipped."); } else if (allModels.length == 1) { this.job.update(50, "One model built; StackedEnsemble builds skipped"); userFeedback.info(Stage.ModelTraining, "StackedEnsemble builds skipped since there is only one model built"); } else if (ArrayUtils.contains(skipAlgosList, "StackedEnsemble")) { this.job.update(50, "StackedEnsemble builds skipped"); userFeedback.info(Stage.ModelTraining, "StackedEnsemble builds skipped due to the exclude_algos option."); } else { Model m = allModels[0]; // If nfolds == 0, then skip the Stacked Ensemble if (buildSpec.build_control.nfolds == 0) { this.job.update(50, "Cross-validation disabled by the user; StackedEnsemble build skipped"); userFeedback.info(Stage.ModelTraining,"Cross-validation disabled by the user; StackedEnsemble build skipped"); } else { // stack all models // Also stack models from other AutoML runs, by using the Leaderboard! (but don't stack stacks) int nonEnsembleCount = 0; for (Model aModel : allModels) if (!(aModel instanceof StackedEnsembleModel)) nonEnsembleCount++; Key<Model>[] notEnsembles = new Key[nonEnsembleCount]; int notEnsembleIndex = 0; for (Model aModel : allModels) if (!(aModel instanceof StackedEnsembleModel)) notEnsembles[notEnsembleIndex++] = aModel._key; Job<StackedEnsembleModel> ensembleJob = stack("StackedEnsemble_AllModels", notEnsembles); pollAndUpdateProgress(Stage.ModelTraining, "StackedEnsemble build using all AutoML models", 50, this.job(), ensembleJob, JobType.ModelBuild); // Set aside List<Model> for best models per model type. Meaning best GLM, GBM, DRF, XRT, and DL (5 models). // This will give another ensemble that is smaller than the original which takes all models into consideration. List<Model> bestModelsOfEachType = new ArrayList(); Set<String> typesOfGatheredModels = new HashSet(); for (Model aModel : allModels) { String type = getModelType(aModel); if (typesOfGatheredModels.contains(type)) continue; typesOfGatheredModels.add(type); bestModelsOfEachType.add(aModel); } Key<Model>[] bestModelKeys = new Key[bestModelsOfEachType.size()]; for (int i = 0; i < bestModelsOfEachType.size(); i++) bestModelKeys[i] = bestModelsOfEachType.get(i)._key; Job<StackedEnsembleModel> bestEnsembleJob = stack("StackedEnsemble_BestOfFamily", bestModelKeys); pollAndUpdateProgress(Stage.ModelTraining, "StackedEnsemble build using top model from each algorithm type", 50, this.job(), bestEnsembleJob, JobType.ModelBuild); } } userFeedback.info(Stage.Workflow, "AutoML: build done; built " + modelCount + " models"); Log.info(userFeedback.toString("User Feedback for AutoML Run " + this._key + ":")); for (UserFeedbackEvent event : userFeedback.feedbackEvents) Log.info(event); if (0 < this.leaderboard().getModelKeys().length) { //TODO Below should really be a parameter, but needs more thought... // We should not spend time computing train/valid leaderboards until we are ready to expose them to the user // Commenting this section out for now /* // Use a throwaway AutoML instance so the "New leader" message doesn't pollute our feedback AutoML dummyAutoML = new AutoML(); UserFeedback dummyUF = new UserFeedback(dummyAutoML); dummyAutoML.userFeedback = dummyUF; Leaderboard trainingLeaderboard = Leaderboard.getOrMakeLeaderboard(projectName() + "_training", dummyUF, this.trainingFrame); trainingLeaderboard.addModels(this.leaderboard().getModelKeys()); Log.info(trainingLeaderboard.toTwoDimTable("TRAINING FRAME Leaderboard for project " + projectName(), true).toString()); Log.info(); // Use a throwawayUserFeedback instance so the "New leader" message doesn't pollute our feedback Leaderboard validationLeaderboard = Leaderboard.getOrMakeLeaderboard(projectName() + "_validation", dummyUF, this.validationFrame); validationLeaderboard.addModels(this.leaderboard().getModelKeys()); Log.info(validationLeaderboard.toTwoDimTable("VALIDATION FRAME Leaderboard for project " + projectName(), true).toString()); Log.info(); */ Log.info(leaderboard().toTwoDimTable("Leaderboard for project " + projectName(), true).toString()); } possiblyVerifyImmutability(); if (!buildSpec.build_control.keep_cross_validation_predictions) { cleanUpModelsCVPreds(); } if (!buildSpec.build_control.keep_cross_validation_models) { cleanUpModelsCVModels(); } // gather more data? build more models? start applying transforms? what next ...? stop(); } // end of learn() /** * Instantiate an AutoML object and start it running. Progress can be tracked via its job(). * * @param buildSpec * @return */ public static AutoML startAutoML(AutoMLBuildSpec buildSpec) { Date startTime = new Date(); // this is the one and only startTime for this run synchronized (AutoML.class) { // protect against two runs whose startTime is the same second if (lastStartTime != null) { while (lastStartTime.getYear() == startTime.getYear() && lastStartTime.getMonth() == startTime.getMonth() && lastStartTime.getDate() == startTime.getDate() && lastStartTime.getHours() == startTime.getHours() && lastStartTime.getMinutes() == startTime.getMinutes() && lastStartTime.getSeconds() == startTime.getSeconds()) startTime = new Date(); } lastStartTime = startTime; } String keyString = buildSpec.build_control.project_name; AutoML aml = AutoML.makeAutoML(Key.<AutoML>make(keyString), startTime, buildSpec); DKV.put(aml); startAutoML(aml); return aml; } /** * Takes in an AutoML instance and starts running it. Progress can be tracked via its job(). * @param aml * @return */ public static void startAutoML(AutoML aml) { // Currently AutoML can only run one job at a time if (aml.job == null || !aml.job.isRunning()) { Job job = new /* Timed */ H2OJob(aml, aml._key, aml.timeRemainingMs()).start(); aml.job = job; job._work = 1000; DKV.put(aml); } } /** * Holds until AutoML's job is completed, if a job exists. */ public void get() { if (job != null) job.get(); } /** * Delete the AutoML-related objects, but leave the grids and models that it built. */ public void delete() { //if (frameMetadata != null) frameMetadata.delete(); //TODO: We shouldn't have to worry about FrameMetadata being null AutoMLUtils.cleanup_adapt(trainingFrame, origTrainingFrame); leaderboard.delete(); userFeedback.delete(); remove(); } /** * Same as delete() but also deletes all Objects made from this instance. */ public void deleteWithChildren() { leaderboard.deleteWithChildren(); // implicit: feedback.delete(); delete(); // is it safe to do leaderboard.delete() now? for (Key<Grid> gridKey : gridKeys) gridKey.remove(); // If the Frame was made here (e.g. buildspec contained a path, then it will be deleted if (buildSpec.input_spec.training_frame == null) { origTrainingFrame.delete(); } if (buildSpec.input_spec.validation_frame == null) { validationFrame.delete(); } } public Job job() { if (null == this.job) return null; return DKV.getGet(this.job._key); } public Leaderboard leaderboard() { return (leaderboard == null ? null : leaderboard._key.get()); } public Model leader() { return (leaderboard() == null ? null : leaderboard().getLeader()); } public UserFeedback userFeedback() { return userFeedback == null ? null : userFeedback._key.get(); } public String projectName() { return buildSpec == null ? null : buildSpec.project(); } // If we have multiple AutoML engines running on the same // project they will be updating the Leaderboard concurrently, // so always use leaderboard() instead of the raw field, to get // it from the DKV. // Also, the leaderboard will reject duplicate models, so use // the difference in Leaderboard length here: public void addModels(final Key<Model>[] newModels) { int before = leaderboard().getModelCount(); leaderboard().addModels(newModels); int after = leaderboard().getModelCount(); modelCount.addAndGet(after - before); } public void addModel(final Key<Model> newModel) { int before = leaderboard().getModelCount(); leaderboard().addModel(newModel); int after = leaderboard().getModelCount(); modelCount.addAndGet(after - before); } public void addModel(final Model newModel) { int before = leaderboard().getModelCount(); leaderboard().addModel(newModel); int after = leaderboard().getModelCount(); modelCount.addAndGet(after - before); } // satisfy typing for job return type... public static class AutoMLKeyV3 extends KeyV3<Iced, AutoMLKeyV3, AutoML> { public AutoMLKeyV3() { } public AutoMLKeyV3(Key<AutoML> key) { super(key); } } @Override public Class<AutoMLKeyV3> makeSchema() { return AutoMLKeyV3.class; } private class AutoMLDoneException extends H2OAbstractRuntimeException { public AutoMLDoneException() { this("done", "done"); } public AutoMLDoneException(String msg, String dev_msg) { super(msg, dev_msg, new IcedHashMapGeneric.IcedHashMapStringObject()); } } public boolean possiblyVerifyImmutability() { boolean warning = false; if (verifyImmutability) { // check that we haven't messed up the original Frame userFeedback.debug(Stage.Workflow, "Verifying training frame immutability. . ."); Vec[] vecsRightNow = origTrainingFrame.vecs(); String[] namesRightNow = origTrainingFrame.names(); if (originalTrainingFrameVecs.length != vecsRightNow.length) { Log.warn("Training frame vec count has changed from: " + originalTrainingFrameVecs.length + " to: " + vecsRightNow.length); warning = true; } if (originalTrainingFrameNames.length != namesRightNow.length) { Log.warn("Training frame vec count has changed from: " + originalTrainingFrameNames.length + " to: " + namesRightNow.length); warning = true; } for (int i = 0; i < originalTrainingFrameVecs.length; i++) { if (!originalTrainingFrameVecs[i].equals(vecsRightNow[i])) { Log.warn("Training frame vec number " + i + " has changed keys. Was: " + originalTrainingFrameVecs[i] + " , now: " + vecsRightNow[i]); warning = true; } if (!originalTrainingFrameNames[i].equals(namesRightNow[i])) { Log.warn("Training frame vec number " + i + " has changed names. Was: " + originalTrainingFrameNames[i] + " , now: " + namesRightNow[i]); warning = true; } if (originalTrainingFrameChecksums[i] != vecsRightNow[i].checksum()) { Log.warn("Training frame vec number " + i + " has changed checksum. Was: " + originalTrainingFrameChecksums[i] + " , now: " + vecsRightNow[i].checksum()); warning = true; } } if (warning) userFeedback.warn(Stage.Workflow, "Training frame was mutated! This indicates a bug in the AutoML software."); else userFeedback.debug(Stage.Workflow, "Training frame was not mutated (as expected)."); } else { userFeedback.debug(Stage.Workflow, "Not verifying training frame immutability. . . This is turned off for efficiency."); } return warning; } private void giveDatasetFeedback(Frame frame, UserFeedback userFeedback, HashMap<String, Object> frameMeta) { userFeedback.info(Stage.FeatureAnalysis, "Metadata for Frame: " + frame._key.toString()); for (Map.Entry<String, Object> entry : frameMeta.entrySet()) { if (entry.getKey().startsWith("Dummy")) continue; Object val = entry.getValue(); if (val instanceof Double || val instanceof Float) userFeedback.info(Stage.FeatureAnalysis, entry.getKey() + ": " + String.format("%.6f", val)); else userFeedback.info(Stage.FeatureAnalysis, entry.getKey() + ": " + entry.getValue()); } } private String getModelType(Model m) { return m._key.toString().startsWith("XRT_") ? "XRT" : m._parms.algoName(); } private void cleanUpModelsCVPreds() { //Clear out all CV preds and CV models for (Model model : leaderboard().getModels()) { Log.info("Remove CV Preds for " + model._key.toString()); model.deleteCrossValidationPreds(); } } private void cleanUpModelsCVModels() { for (Model model : leaderboard().getModels()) { Log.info("Remove CV Models for " + model._key.toString()); model.deleteCrossValidationModels(); } } }
package edu.utah.sci.cyclist.neup.ui.views.inventory; import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; import javafx.animation.Animation; import javafx.animation.RotateTransition; import javafx.beans.Observable; import javafx.beans.property.ListProperty; import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.SimpleListProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.concurrent.Task; import javafx.scene.Node; import javafx.scene.control.ChoiceBox; import javafx.scene.control.Label; import javafx.scene.control.TextField; import javafx.scene.input.TransferMode; import javafx.scene.layout.BorderPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.Priority; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.text.Text; import javafx.util.Duration; import edu.utah.sci.cyclist.core.event.Pair; import edu.utah.sci.cyclist.core.event.dnd.DnD; import edu.utah.sci.cyclist.core.model.Configuration; import edu.utah.sci.cyclist.core.model.Field; import edu.utah.sci.cyclist.core.model.Simulation; import edu.utah.sci.cyclist.core.ui.components.CyclistViewBase; import edu.utah.sci.cyclist.core.ui.panels.TitledPanel; import edu.utah.sci.cyclist.core.util.AwesomeIcon; import edu.utah.sci.cyclist.core.util.GlyphRegistry; import edu.utah.sci.cyclist.neup.model.Inventory; import edu.utah.sci.cyclist.neup.model.proxy.SimulationProxy; public class InventoryView extends CyclistViewBase { public static final String ID = "inventory-view"; public static final String TITLE = "Inventory"; private static final String NET_CHART_LABEL = "Net"; private static final String COMMULATIVE_CHART_LABEL = "Commulative"; private ObservableList<AgentInfo> _agents = FXCollections.observableArrayList(); private List<String> _acceptableFields = new ArrayList<>(); private Simulation _currentSim = null; private SimulationProxy _simProxy = null; private InventoryChart _chart; class AgentInfo { public String field; public String value; public Color color; public ListProperty<Inventory> inventory = new SimpleListProperty<Inventory>(); public AgentInfo(String field, String value) { this.field = field; this.value = value; color = Configuration.getInstance().getColor(field);; } public String getName() { return field+"="+value; } } public InventoryView() { super(); init(); build(); } private void selectChartType(String value) { } @Override public void selectSimulation(Simulation sim, boolean active) { super.selectSimulation(sim, active); if (!active && sim != _currentSim) { return; // ignore } _currentSim = active? sim : null; _simProxy = _currentSim == null ? null : new SimulationProxy(_currentSim); //TODO: re-fetch inventories } private void init() { _acceptableFields.add("Implementation"); _acceptableFields.add("Prototype"); _acceptableFields.add("AgentID"); _acceptableFields.add("InstitutionID"); } private void build() { setTitle(TITLE); getStyleClass().add("inventory"); BorderPane pane = new BorderPane(); pane.setLeft(buildCtrl()); pane.setCenter(buildChart()); setContent(pane); } private Node buildCtrl() { VBox vbox = new VBox(); vbox.getStyleClass().add("ctrl"); vbox.getChildren().addAll( // buildChartCtrl(), buildAgentCtrl(), buildNuclideCtrl() ); return vbox; } private Node buildChartCtrl() { VBox vbox = new VBox(); vbox.getStyleClass().add("ctrl"); ChoiceBox<String> type = new ChoiceBox<>(); type.getStyleClass().add("choice"); type.getItems().addAll(COMMULATIVE_CHART_LABEL, NET_CHART_LABEL); type.valueProperty().addListener(e->{ selectChartType(type.getValue()); }); type.setValue(COMMULATIVE_CHART_LABEL); vbox.getChildren().add(type); return vbox; } public Node buildAgentCtrl() { TitledPanel panel = new TitledPanel("Agents", GlyphRegistry.get(AwesomeIcon.BUILDING)); Node pane = panel.getPane(); panel.setFillWidth(true); pane.setOnDragOver(e->{ DnD.LocalClipboard clipboard = getLocalClipboard(); if (clipboard.hasContent(DnD.VALUE_FORMAT)) { Field field = clipboard.get(DnD.FIELD_FORMAT, Field.class); if (_acceptableFields.contains(field.getName())) { e.acceptTransferModes(TransferMode.COPY); e.consume(); } } }); pane.setOnDragDropped(e->{ DnD.LocalClipboard clipboard = getLocalClipboard(); String value = clipboard.get(DnD.VALUE_FORMAT, Object.class).toString(); String field = clipboard.get(DnD.FIELD_FORMAT, Field.class).getName(); // ensure we don't already have this field for (AgentInfo agent : _agents) { if (agent.field.equals(field) && agent.value.equals(value)) { e.consume(); return; } } AgentInfo info = new AgentInfo(field, value); AgentEntry entry = new AgentEntry(info); addAgent(entry); panel.getContent().getChildren().add(entry); // entry.setOnClose(item->{ // _agents.remove(item.info); // panel.getContent().getChildren().remove(item); e.setDropCompleted(true); e.consume(); }); return panel; } public Node buildNuclideCtrl() { VBox vbox = new VBox(); vbox.getStyleClass().add("infobar"); Text title = new Text("Nuclide"); title.getStyleClass().add("title"); TextField entry = new TextField(); entry.getStyleClass().add("nuclide"); entry.setPromptText("filter"); vbox.getChildren().addAll( title, entry ); //entry.setOnAction(e->isoFilterChanged(entry.getText())); return vbox; } private void addAgent(final AgentEntry entry) { _agents.add(entry.info); entry.info.inventory.addListener((Observable o)->{ addToChart(entry.info); }); entry.info.inventory.bind(fetchInventory(entry)); } private ReadOnlyObjectProperty<ObservableList<Inventory>> fetchInventory(AgentEntry entry) { final String field = entry.info.field; final String value = entry.info.value; Task<ObservableList<Inventory>> task = new Task<ObservableList<Inventory>>() { @Override protected ObservableList<Inventory> call() throws Exception { ObservableList<Inventory> list = _simProxy.getInventory(field, value); return list; } }; entry.setTask(task); Thread thread = new Thread(task); thread.setDaemon(true); thread.start(); return task.valueProperty(); } private void addToChart(AgentInfo info) { List<Pair<Integer, Double>> series = new ArrayList<>(); Pair<Integer, Double> current = null; // collect data. TODO: apply filters for (Inventory i : info.inventory) { if (current == null || current.v1 != i.time) { if (current != null) { series.add(current); } current = new Pair<>(); current.v1 = i.time; current.v2 = i.amount; } else { current.v2 += i.amount; } } if (current != null) { series.add(current); } _chart.add(info, info.getName(), series); } private Node buildChart() { _chart = new InventoryChart(); return _chart; } class AgentEntry extends HBox { public AgentInfo info; private Status _status; private Consumer<AgentEntry> _onClose = null; public AgentEntry(final AgentInfo info) { super(); this.info = info; getStyleClass().add("agent"); Text text = new Text(info.value); Node button = GlyphRegistry.get(AwesomeIcon.TIMES, "10px"); button.setVisible(false); _status = new Status(); getChildren().addAll(text, _status, button); setOnMouseEntered(e->{ button.setVisible(true); getStyleClass().add("hover"); }); setOnMouseExited(e->{ button.setVisible(false); getStyleClass().remove("hover"); }); button.setOnMouseClicked(e->{ if (_onClose != null) { _onClose.accept(this); } }); HBox.setHgrow(this, Priority.ALWAYS); } public void setTask(Task<?> task) { _status.setTask(task); } public void setOnClose(Consumer<AgentEntry> cb) { setTask(null); _onClose = cb; } } class Status extends Pane { private Task<?> _task = null; private Node _icon; private RotateTransition _animation; public Status() { super(); _icon = GlyphRegistry.get(AwesomeIcon.REFRESH, "10px"); getChildren().add(_icon); _animation = new RotateTransition(Duration.millis(10000), _icon); _animation.setFromAngle(0); _animation.setByAngle(3600); _animation.setCycleCount(Animation.INDEFINITE); setVisible(false); setOnMouseClicked(e->_task.cancel()); } public void setTask(Task<?> task) { if (_task != null) { _task.cancel(); _animation.stop(); visibleProperty().unbind(); } _task = task; if (_task != null) { visibleProperty().bind(task.runningProperty()); _task.runningProperty().addListener(e->{ if (_task.isRunning()) { _animation.play(); } else { _animation.stop(); } }); task.setOnFailed(e->{ System.out.println("Task failed:"+_task.getMessage()); setTask(null); // TODO: save the error msg; }); } } } }
package org.mousephenotype.cda.owl; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.mousephenotype.cda.utilities.UrlUtils; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyStorageException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import uk.ac.manchester.cs.owl.owlapi.OWLObjectPropertyImpl; import javax.validation.constraints.NotNull; import java.io.File; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.net.URL; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import java.util.stream.Collectors; @RunWith(SpringJUnit4ClassRunner.class) @TestPropertySource(locations = {"file:${user.home}/configfiles/${profile:dev}/test.properties"}) public class OntologyParserTest { public static boolean downloadFiles = false; private Map<String, Download> downloads = new HashMap<>(); // key = map name. value = download info. public boolean doDownload = true; private final Logger logger = LoggerFactory.getLogger(this.getClass()); private OntologyParser ontologyParser; @NotNull @Value("${owlpath}") protected String owlpath; @Before public void setUp() throws Exception { downloads.put("efo", new Download("EFO", "http: downloads.put("mphp", new Download("MP", "http://build-artifacts.berkeleybop.org/build-mp-hp-view/latest/mp-hp-view.owl", owlpath + "/mp-hp.owl")); downloads.put("mp", new Download("MP", "http://purl.obolibrary.org/obo/mp.owl", owlpath + "/mp.owl")); if ( ! downloadFiles) { downloadFiles(); downloadFiles = true; } } private class Download { public final String name; public final String url; public final String target; public Download(String name, String url, String target) { this.name = name; this.url = url; this.target = target; } } private void downloadFiles() { try { Files.createDirectories(Paths.get(owlpath)); } catch (IOException e) { System.err.println("Create owlpath directory '" + owlpath + "' failed. Reason: " + e.getLocalizedMessage()); } if (doDownload) { for (Download download : downloads.values()) { // Download the owl files. FileOutputStream fos; ReadableByteChannel rbc; final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss"); String outputAppender = DATE_FORMAT.format(new Date()); String target; String targetTemp; URL url; target = download.target; targetTemp = target + "." + outputAppender; try { url = new URL(UrlUtils.getRedirectedUrl(download.url)); if (download.url.equals(url.toString())) { System.out.println("DOWNLOADING " + url.toString() + " to " + download.target); } else { System.out.println("DOWNLOADING " + download.url + " (remapped to " + url.toString() + ") to " + download.target); } rbc = Channels.newChannel(url.openStream()); fos = new FileOutputStream(targetTemp); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); Files.move(Paths.get(targetTemp), Paths.get(target), StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { logger.error(download.url + " -> " + target + " download failed. Reason: " + e.getLocalizedMessage()); } } } } @Ignore @Test public void testOwlOntologyDownloads() throws Exception { String message; List<Exception> exception = new ArrayList(); File owlpathFile = new File(owlpath); File[] owlFiles = owlpathFile.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".owl"); } }); String prefix; for (File file : owlFiles) { prefix = file.getName().replace(".owl", "").toUpperCase(); try { ontologyParser = new OntologyParser(file.getPath(), prefix, null, null); } catch (Exception e) { message = "[FAIL - " + prefix + "] Exception in " + file.getPath() + "(" + prefix + "): " + e.getLocalizedMessage(); exception.add(e); System.out.println(message + "\n"); continue; } List<OntologyTermDTO> terms = ontologyParser.getTerms(); if (terms.size() > 700) { System.out.println("[PASS - " + prefix + "] - " + file.getPath() + ". Size: " + terms.size()); } else { System.out.println("[FAIL - " + prefix + "] - " + file.getPath() + ". Size: " + terms.size()); } System.out.println(); } if ( ! exception.isEmpty()) { throw exception.get(0); // Just throw the first one. } } // Because it had that IRI used twice, once with ObjectProperty and once with AnnotationProperty RO_0002200 @Test public void testEFO() throws Exception { ontologyParser = new OntologyParser(downloads.get("efo").target, downloads.get("efo").name, null, null); List<OntologyTermDTO> terms = ontologyParser.getTerms(); Assert.assertFalse("Expected at least one term.", terms.isEmpty()); } @Test public void testNarrowSynonyms() throws Exception { System.out.println("target: " + downloads.get("mphp").target); System.out.println("name: " + downloads.get("mphp").name); ontologyParser = new OntologyParser(downloads.get("mphp").target, downloads.get("mphp").name, null, null); OntologyTermDTO term = ontologyParser.getOntologyTerm("MP:0006325"); Set<String> narrowSynonyms = ontologyParser.getNarrowSynonyms(term, 1); Assert.assertFalse("Narrow synonyms list is empty!", narrowSynonyms.isEmpty()); Assert.assertTrue("Narrow synonyms list does not contain a label!", narrowSynonyms.contains("conductive hearing impairment")); Assert.assertTrue("Narrow synonyms list does not contain an exact synonym!", narrowSynonyms.contains("complete hearing loss")); // Test both HP and MP terms are considered. // Abnormal glucose homeostasis MP:0002078 is equivalent to HP:0011014 term = ontologyParser.getOntologyTerm("MP:0002078"); Assert.assertTrue("HP synonym not found, was looking for Abnormal C-peptide level ." , ontologyParser.getNarrowSynonyms(term,2).contains("Abnormal C-peptide level")); } @Test public void testEquivalent() throws Exception { ontologyParser = new OntologyParser(downloads.get("mphp").target, downloads.get("mphp").name, null, null); List<OntologyTermDTO> terms = ontologyParser.getTerms(); Assert.assertFalse("Term list is empty!", terms.isEmpty()); OntologyTermDTO mp0000572 = ontologyParser.getOntologyTerm("MP:0000572"); Assert.assertNotNull("Could not find MP:0000572 in mp-hp.owl", mp0000572); Assert.assertFalse("Could not find equivalent class for MP:0000572 in mp-hp.owl. Equivalent class should be HP:0005922.", mp0000572.getEquivalentClasses().isEmpty()); Set<OntologyTermDTO> termSet = mp0000572.getEquivalentClasses(); List<OntologyTermDTO> eqTerms = termSet.stream() .filter(term -> term.getAccessionId().equals("HP:0005922")) .collect(Collectors.toList()); Assert.assertFalse("Expected equivalent class HP:0005922 but list is empty.", eqTerms.isEmpty()); Assert.assertTrue("Expected equivalent class HP:0005922. Not found.", eqTerms.get(0).getAccessionId().equals("HP:0005922")); } @Test public void testReplacementOptions() throws Exception { ontologyParser = new OntologyParser(downloads.get("mp").target, downloads.get("mp").name, null, null); List<OntologyTermDTO> termList = ontologyParser.getTerms(); Map<String, OntologyTermDTO> terms = termList.stream() .filter(term -> term.getAccessionId().equals("MP:0006374") || term.getAccessionId().equals("MP:0002977") || term.getAccessionId().equals("MP:0000003")) .collect(Collectors.toMap(OntologyTermDTO::getAccessionId, ontologyTermDTO -> ontologyTermDTO)); /* Test alternative ids are found for MP_0000003 (should be MP:0000011). */ OntologyTermDTO withAltIds = terms.get("MP:0000003"); Assert.assertTrue("Expected MP:0000003 has MP:0000011 as alt id. ", (withAltIds.getAlternateIds() != null && withAltIds.getAlternateIds().contains("MP:0000011"))); /* * Test for term MP:0006374 with replacement ID MP:0008996 */ OntologyTermDTO withReplacementIds = terms.get("MP:0006374"); Assert.assertNotNull("Expected term MP:0006374, a term with replacement ids. Not found.", withReplacementIds); Assert.assertTrue("Expected MP:0006374 to be marked obsolete but it was not.", withReplacementIds.isObsolete()); Assert.assertNotNull("Expected MP:0006374 to have a replacement term, but the replacement term was null", withReplacementIds.getReplacementAccessionId()); Assert.assertFalse("Expected MP:0006374 to have a replacement term, but the replacement term list was empty.", withReplacementIds.getReplacementAccessionId().isEmpty()); Assert.assertTrue("Expected replacement accession id MP:0008996. Not found.", withReplacementIds.getReplacementAccessionId().contains("MP:0008996")); /* * Test for term MP:0002977 with consider IDs MP:0010241 and MP:0010464 */ OntologyTermDTO withConsiderIds = terms.get("MP:0002977"); Assert.assertNotNull("Expected term MP:0002977, a term with consider ids. Not found.", withConsiderIds); Assert.assertTrue("Expected at least two consider id terms: MP:0010241 and MP:0010464, but found " + withConsiderIds.getConsiderIds().size() + ".'", withConsiderIds.getConsiderIds().size() >= 2); Assert.assertTrue("Expected consider id MP:0010241. Not found.", withConsiderIds.getConsiderIds().contains("MP:0010241")); Assert.assertTrue("Expected consider id MP:0010464. Not found.", withConsiderIds.getConsiderIds().contains("MP:0010464")); } @Test public void testTermsInSlim() throws Exception{ ontologyParser = new OntologyParser(downloads.get("mp").target, downloads.get("mp").name, null, null); Set<String> wantedIds = new HashSet<>(); wantedIds.add("MP:0008901"); wantedIds.add("MP:0005395"); // "other phenotype" - obsolete and should not be in the sim Set<String> termsInSlim = ontologyParser.getTermsInSlim(wantedIds, null); Assert.assertTrue(termsInSlim.size() == 7); Assert.assertTrue(!termsInSlim.contains("MP:0005395")); } @Test public void testParentInfo() throws Exception{ ontologyParser = new OntologyParser(downloads.get("mp").target, downloads.get("mp").name, null, null); OntologyTermDTO term = ontologyParser.getOntologyTerm("MP:0005452"); // abnormal adipose tissue amount Assert.assertTrue(term.getParentIds().contains("MP:0000003")); Assert.assertTrue(term.getParentIds().size() == 1); Assert.assertTrue(term.getParentNames().size() == 1); } @Test public void testChildInfo() throws Exception{ ontologyParser = new OntologyParser(downloads.get("mp").target, downloads.get("mp").name, null, null); OntologyTermDTO term = ontologyParser.getOntologyTerm("MP:0005452"); // abnormal adipose tissue amount Assert.assertTrue(term.getChildIds().contains("MP:0010024")); System.out.println("term.getChildIds().size() " + term.getChildIds().size() + term.getChildIds()); Assert.assertTrue(term.getChildIds().size() == 4); // 4 child terms in the ontology without reasoning Assert.assertTrue(term.getChildNames().size() == 4); term =ontologyParser.getOntologyTerm("MP:0000003"); System.out.println("term.getChildIds().size() " + term.getChildIds().size() + term.getChildIds()); Assert.assertTrue(term.getChildIds().size() == 11); // 11 child terms in the ontology without reasoning Assert.assertTrue(term.getChildNames().size() == 11); } @Test public void testTopLevels() throws Exception{ Set<String> topLevels = new HashSet<>(Arrays.asList("MP:0010768", "MP:0002873", "MP:0001186", "MP:0003631", "MP:0003012", "MP:0005367", "MP:0005369", "MP:0005370", "MP:0005371", "MP:0005377", "MP:0005378", "MP:0005375", "MP:0005376", "MP:0005379", "MP:0005380", "MP:0005381", "MP:0005384", "MP:0005385", "MP:0005382", "MP:0005388", "MP:0005389", "MP:0005386", "MP:0005387", "MP:0005391", "MP:0005390", "MP:0005394", "MP:0005397")); ontologyParser = new OntologyParser(downloads.get("mp").target, downloads.get("mp").name, topLevels, null); // 1 term top level OntologyTermDTO term = ontologyParser.getOntologyTerm("MP:0005452"); // abnormal adipose tissue amount Assert.assertTrue(term.getTopLevelIds().contains("MP:0005375")); Assert.assertTrue(term.getTopLevelIds().size() == 1); Assert.assertTrue(term.getTopLevelNames().size() == 1); // multiple top levels term = ontologyParser.getOntologyTerm("MP:0000017"); // big ears Assert.assertTrue(term.getTopLevelIds().contains("MP:0005382")); Assert.assertTrue(term.getTopLevelIds().contains("MP:0005378")); Assert.assertTrue(term.getTopLevelIds().contains("MP:0005377")); Assert.assertTrue(term.getTopLevelIds().size() == 3); Assert.assertTrue(term.getTopLevelNames().size() == 3); // term is top level itself term = ontologyParser.getOntologyTerm("MP:0005378"); Assert.assertTrue(term.getTopLevelIds() == null || term.getTopLevelIds().size() == 0); } @Test public void testMpMaMapping() throws OWLOntologyCreationException, OWLOntologyStorageException, IOException { Set<OWLObjectPropertyImpl> viaProperties = new HashSet<>(); viaProperties.add(new OWLObjectPropertyImpl(IRI.create("http://purl.obolibrary.org/obo/BFO_0000052"))); viaProperties.add(new OWLObjectPropertyImpl(IRI.create("http://purl.obolibrary.org/obo/BFO_0000070"))); viaProperties.add(new OWLObjectPropertyImpl(IRI.create("http://purl.obolibrary.org/obo/mp/mp-logical-definitions#inheres_in_part_of"))); OntologyParser mpMaParser = new OntologyParser(Paths.get(owlpath)+ "/mp-ext-merged.owl", null, null, null); // Should have only MA_0000009 = adipose tissue; MP:0000003 = abnormal adipose tissue morphology Set<String> ma = mpMaParser.getReferencedClasses("MP:0000003", viaProperties, "MA"); Assert.assertTrue(ma.size() == 1); Assert.assertTrue(ma.contains("MA:0000009")); } }
package com.firebase.ui.database; import android.util.Pair; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; /** * Acts as a bridge between a list of {@link DataSnapshot}s and a list of objects of type E. * * @param <E> the model representation of a {@link DataSnapshot} */ public class FirebaseArrayOfObjects<E> extends ImmutableList<E> { protected List<DataSnapshot> mSnapshots; protected Class<E> mEClass; protected SnapshotParser<E> mParser; /** * @param snapshots a list of {@link DataSnapshot}s to be converted to a model type * @param modelClass the model representation of a {@link DataSnapshot} */ protected FirebaseArrayOfObjects(List<DataSnapshot> snapshots, Class<E> modelClass) { mSnapshots = snapshots; mEClass = modelClass; mParser = new SnapshotParser<E>() { @Override public E parseSnapshot(DataSnapshot snapshot) { return snapshot.getValue(mEClass); } }; } /** * @param snapshots a list of {@link DataSnapshot}s to be converted to a model type * @param modelClass the model representation of a {@link DataSnapshot} */ public static <T> FirebaseArrayOfObjects<T> newInstance(List<DataSnapshot> snapshots, Class<T> modelClass) { if (snapshots instanceof FirebaseArray) { return new FirebaseArrayOfObjectsOptimized<>((FirebaseArray) snapshots, modelClass); } else { return new FirebaseArrayOfObjects<>(snapshots, modelClass); } } /** * @param parser a custom {@link SnapshotParser} to manually convert each {@link DataSnapshot} * to its model type * @see #newInstance(List, Class) */ public static <T> FirebaseArrayOfObjects<T> newInstance(List<DataSnapshot> snapshots, Class<T> modelClass, SnapshotParser<T> parser) { FirebaseArrayOfObjects<T> array = newInstance(snapshots, modelClass); array.mParser = parser; return array; } public List<DataSnapshot> getSnapshots() { return mSnapshots; } protected List<E> getObjects() { List<E> objects = new ArrayList<>(mSnapshots.size()); for (int i = 0; i < mSnapshots.size(); i++) { objects.add(get(i)); } return objects; } @Override public int size() { return mSnapshots.size(); } @Override public boolean isEmpty() { return mSnapshots.isEmpty(); } @Override public boolean contains(Object o) { return indexOf(o) >= 0; } /** * {@inheritDoc} * * @return an immutable iterator */ @Override public Iterator<E> iterator() { return new ImmutableIterator(getObjects().iterator()); } @Override public Object[] toArray() { return getObjects().toArray(); } @Override public <T> T[] toArray(T[] a) { return getObjects().toArray(a); } @Override public boolean containsAll(Collection<?> c) { return getObjects().containsAll(c); } @Override public E get(int index) { return mParser.parseSnapshot(mSnapshots.get(index)); } @Override public int indexOf(Object o) { return getObjects().indexOf(o); } @Override public int lastIndexOf(Object o) { return getObjects().lastIndexOf(o); } /** * {@inheritDoc} * * @return an immutable list iterator */ @Override public ListIterator<E> listIterator() { return new ImmutableListIterator(getObjects().listIterator()); } /** * {@inheritDoc} * * @return an immutable list iterator */ @Override public ListIterator<E> listIterator(int index) { return new ImmutableListIterator(getObjects().listIterator(index)); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; FirebaseArrayOfObjects<?> array = (FirebaseArrayOfObjects<?>) obj; return mSnapshots.equals(array.mSnapshots) && mEClass.equals(array.mEClass); } @Override public int hashCode() { int result = mSnapshots.hashCode(); result = 31 * result + mEClass.hashCode(); return result; } @Override public String toString() { return "FirebaseArrayOfObjects{" + "mSnapshots=" + mSnapshots + '}'; } protected static class FirebaseArrayOfObjectsOptimized<E> extends FirebaseArrayOfObjects<E> implements ChangeEventListener, SubscriptionEventListener { protected List<E> mObjects = new ArrayList<>(); protected Pair<Boolean, Boolean> mIsListening$AddedListener = new Pair<>(true, false); public FirebaseArrayOfObjectsOptimized(FirebaseArray snapshots, Class<E> modelClass) { super(snapshots, modelClass); snapshots.addChangeEventListener(this); snapshots.addSubscriptionEventListener(this); } @Override protected List<E> getObjects() { return mObjects; } @Override public void onChildChanged(ChangeEventListener.EventType type, int index, int oldIndex) { switch (type) { case ADDED: mObjects.add(get(index)); break; case CHANGED: mObjects.set(index, get(index)); break; case REMOVED: mObjects.remove(index); break; case MOVED: mObjects.add(index, mObjects.remove(oldIndex)); break; } } @Override public void onSubscriptionRemoved() { FirebaseArray snapshots = (FirebaseArray) mSnapshots; if (!snapshots.isListening()) { snapshots.removeChangeEventListener(this); mIsListening$AddedListener = new Pair<>(false, false); } } @Override public void onSubscriptionAdded() { if (mIsListening$AddedListener.second) { mIsListening$AddedListener = new Pair<>(true, false); } else if (!mIsListening$AddedListener.first) { ((FirebaseArray) mSnapshots).addChangeEventListener(this); mIsListening$AddedListener = new Pair<>(true, true); } } @Override public void onDataChanged() { } @Override public void onCancelled(DatabaseError error) { } } }
package org.perfmon4j.dbupgrader; import java.io.File; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import junit.framework.TestCase; import liquibase.Liquibase; import liquibase.database.Database; import liquibase.database.DatabaseFactory; import liquibase.database.jvm.JdbcConnection; import liquibase.resource.ClassLoaderResourceAccessor; import org.perfmon4j.dbupgrader.UpdateOrCreateDb.Parameters; import org.slf4j.LoggerFactory; public class UpdateOrCreateDbTest extends TestCase { private static String SCHEMA = "TEST"; public static final String JDBC_URL = "jdbc:derby:memory:mydb"; public static final String JDBC_DRIVER = "org.apache.derby.jdbc.EmbeddedDriver"; private Connection conn = null; public UpdateOrCreateDbTest(String name) { super(name); ch.qos.logback.classic.Logger logger = (ch.qos.logback.classic.Logger)LoggerFactory.getLogger("liquibase"); logger.setLevel(ch.qos.logback.classic.Level.WARN); logger = (ch.qos.logback.classic.Logger)LoggerFactory.getLogger("org.perfmon4j"); logger.setLevel(ch.qos.logback.classic.Level.WARN); } protected void setUp() throws Exception { DatabaseFactory factory = DatabaseFactory.getInstance(); factory.clearRegistry(); factory.register(new NoCloseDerbyDatabase()); conn = UpdaterUtil.createConnection(JDBC_DRIVER, null, JDBC_URL + ";create=true", null, null); conn.setAutoCommit(true); executeUpdate("CREATE SCHEMA " + SCHEMA); super.setUp(); } protected void tearDown() throws Exception { UpdaterUtil.closeNoThrow(conn); try { UpdaterUtil.createConnection(JDBC_DRIVER, null, JDBC_URL + ";drop=true", null, null); } catch (SQLException sn) { } DatabaseFactory.reset(); super.tearDown(); } private static String rsRowToString(ResultSet rs) throws SQLException { String result = ""; ResultSetMetaData d = rs.getMetaData(); int count = d.getColumnCount(); for (int i = 1; i <= count; i++) { result += d.getColumnLabel(i) + "=" + rs.getString(i) + "\r\n"; } return result; } private static String dumpQuery(Connection conn, String SQL) throws SQLException { String result = ""; Statement stmt = null; ResultSet rs = null; try { stmt = conn.createStatement(); rs = stmt.executeQuery(SQL); while (rs.next()) { result += rsRowToString(rs); result += "*********************************************\r\n"; } } finally { UpdaterUtil.closeNoThrow(rs); UpdaterUtil.closeNoThrow(stmt); } return result; } public void testPopulateDatabase() throws Exception { // Start with an empty database... runUpdater(); assertTrue("Should have a P4JSystem table", UpdaterUtil.doesTableExist(conn, SCHEMA, "P4JSystem")); System.out.println(dumpQuery(conn, "SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG")); assertTrue("Database change log should reflect databaseLabel 0002.0 applied", databaseLabelExistsInChangeLog("0002.0")); int systemRows = getQueryCount("SELECT count(*) FROM " + SCHEMA + ".P4JSystem WHERE SystemID=1 AND SystemName='Default'"); assertEquals("Should have populated default system row", 1, systemRows); } public void testVersion4Update() throws Exception { // Start with an empty database... runUpdater(); int count = getQueryCount("SELECT count(*) FROM " + SCHEMA + ".DATABASECHANGELOG WHERE author = 'databaseLabel' AND ID = '0004.0'"); assertEquals("should have installed 4.0 label", 1, count); try { // Select new columns... getQueryCount("SELECT count(*) FROM " + SCHEMA + ".P4JVMSnapshot WHERE systemCpuLoad > 1.0 AND processCpuLoad > 1.0"); } catch (Exception ex) { fail("Should have added systemCpuLoad and processCpuLoad columns to the P4JVMSnapshot table"); } } public void testVersion5Update() throws Exception { // Start with an empty database... runUpdater(); int count = getQueryCount("SELECT count(*) FROM " + SCHEMA + ".DATABASECHANGELOG WHERE author = 'databaseLabel' AND ID = '0005.0'"); assertEquals("should have installed 5.0 label", 1, count); try { // Select new columns... count = getQueryCount("SELECT count(*) FROM " + SCHEMA + ".P4JDatabaseIdentity WHERE DatabaseID IS NOT NULL"); assertEquals("Should have populated database identity", 1, count); } catch (Exception ex) { fail("Should have added database identity table"); } } /** * 4/23/2018 * * After 2.5 years of monitoring many systems we overflowed the INTEGER column * primary key of the P4JIntervalData table and the referenced column in the * P4JIntervalThreshold table. * * Based on the SQL hoops (dropping indexes and constraints) that would be required for an * automated upgrade we decided not to upgrade existing tables to a BIGINT. * However, new databases will be created with a BIGINT column instead of a INTEGER Column. * For those with existing tables they will require a manual update of the columns. * @throws Exception */ public void testIntervalIDIsCreatedAsABigInt() throws Exception { // Start with an empty database... runUpdater(); String dataType = UpdaterUtil.getColumnDataType(conn, SCHEMA, "P4JIntervalData", "IntervalID"); assertEquals("P4JIntervalData.IntervalID column should be a BIGINT", "BIGINT", dataType.toUpperCase()); dataType = UpdaterUtil.getColumnDataType(conn, SCHEMA, "P4JIntervalThreshold", "IntervalID"); assertEquals("P4JIntervalThreshold.IntervalID column should be a BIGINT", "BIGINT", dataType.toUpperCase()); } public void testVersion6Update() throws Exception { // Start with an empty database... runUpdater(); int count = getQueryCount("SELECT count(*) FROM " + SCHEMA + ".DATABASECHANGELOG WHERE author = 'databaseLabel' AND ID = '0006.0'"); assertEquals("should have installed 6.0 label", 1, count); boolean groupExists = UpdaterUtil.doesTableExist(conn, SCHEMA, "P4JGroup"); boolean joinExists = UpdaterUtil.doesTableExist(conn, SCHEMA, "P4JGroupSystemJoin"); assertTrue("New P4JGroup table should exist", groupExists); assertTrue("New P4JGroupSystemJoin table should exist", joinExists); } public void testVersion7Update() throws Exception { // Start with an empty database... runUpdater(); assertTrue("should have installed 7.0 label", databaseLabelExistsInChangeLog("0007.0")); assertTrue("Changelog entry created", changeLogEntryExistsWithID("P4J-AddIndexesForP4JReports")); boolean indexExists = UpdaterUtil.doesIndexExist(conn, SCHEMA, "P4JIntervalData", "P4JIntervalData_SystemEndTime"); assertTrue("New P4JIntervalData_SystemEndTime index should exist", indexExists); // Demonstrate index will be skipped if it already exists.. deleteChangeLogEntyWithID("P4J-AddIndexesForP4JReports"); assertFalse("Make sure change log entry was deleted, this will cause Liquibase to run it again", changeLogEntryExistsWithID("P4J-AddIndexesForP4JReports")); // Rerun update. Change log should get re-applied, but skip because index already exists. runUpdater(); assertTrue("P4JIntervalData_SystemEndTime entry should have been restored", changeLogEntryExistsWithID("P4J-AddIndexesForP4JReports")); } public void testParseParameters() throws Exception { String args[] = { "userName=dave", "password=pw", "jdbcURL=my.jdbc.url", "driverClass=myDriver", "driverJarFile=c:/mydriver.jar", "schema=dbo" }; Parameters params = UpdateOrCreateDb.getParameters(args); assertNotNull(params); assertEquals("dave", params.getUserName()); assertEquals("pw", params.getPassword()); assertEquals("my.jdbc.url", params.getJdbcURL()); assertEquals("myDriver", params.getDriverClass()); assertEquals("c:/mydriver.jar", params.getDriverJarFile()); assertEquals("dbo", params.getSchema()); assertEquals(0, params.getThirdPartyExtensions().length); assertTrue(params.isValid()); } public void testParse3rdPartyIncludesMultiple() throws Exception { String args[] = { "userName=dave", "password=pw", "jdbcURL=my.jdbc.url", "driverClass=myDriver", "driverJarFile=c:/mydriver.jar", "schema=dbo", "thirdPartyExtensions=Follett,Other,YetAnother" }; Parameters params = UpdateOrCreateDb.getParameters(args); String thirdPartyExtensions[] = params.getThirdPartyExtensions(); assertEquals("Should have 3 thirdPartyExtensions", 3, thirdPartyExtensions.length); List<String> extensions = Arrays.asList(thirdPartyExtensions); assertTrue("Should have Follett Extension", extensions.contains("Follett")); assertTrue("Should have Other Extension", extensions.contains("Other")); assertTrue("Should have YetAnother Extension", extensions.contains("YetAnother")); } public void testParse3rdPartyIncludesSingle() throws Exception { String args[] = { "userName=dave", "password=pw", "jdbcURL=my.jdbc.url", "driverClass=myDriver", "driverJarFile=c:/mydriver.jar", "schema=dbo", "thirdPartyExtensions=Follett" }; Parameters params = UpdateOrCreateDb.getParameters(args); String thirdPartyExtensions[] = params.getThirdPartyExtensions(); assertEquals("Should have 3 thirdPartyExtensions", 1, thirdPartyExtensions.length); List<String> extensions = Arrays.asList(thirdPartyExtensions); assertTrue("Should have Follett Extension", extensions.contains("Follett")); } public void testInsufficientParameters() throws Exception { String args[] = {}; Parameters params = UpdateOrCreateDb.getParameters(args); assertTrue("InsufficentParameters", params.isInsufficentParameters()); assertFalse("isValid", params.isValid()); } public void testBadParameters() throws Exception { String args[] = { "userName=dave", "password=pw", "jdbcURL=my.jdbc.url", "driverClass=myDriver", "driverJarFile=c:/mydriver.jar", "somethingElse=5" }; Parameters params = UpdateOrCreateDb.getParameters(args); assertEquals(1, params.getBadParameters().size()); assertEquals("somethingElse=5", params.getBadParameters().get(0)); assertFalse(params.isValid()); } /** * This test verifies we can upgrade a database that was created with * SQL Scripts. These types of test will NOT be required post * database version 3.0 since the database will be populated with Liquibase * and contain the appropriate change log. * @throws Exception */ public void testInstallBaseChangeLogsVersion1Db() throws Exception { Statement stmt = null; try { // Simulate a database that was created with the Perfmon4j Version 1.0.2 // Database scripts. applyChangeLog("org/perfmon4j/initial-change-log.xml"); dropLiquibaseTables(); assertFalse("Make sure version 2.0 changes have not been applied", UpdaterUtil.doesColumnExist(conn, SCHEMA, "P4JIntervalData", "SQLMaxDuration")); // Start with a database that contain the base tables, // but does not contain the liquibase change logs.. // Should install change.logs and any additional upgrades. runUpdater(); } finally { UpdaterUtil.closeNoThrow(stmt); } assertTrue("Should have a changelog", UpdaterUtil.doesTableExist(conn, SCHEMA, "DATABASECHANGELOG")); assertTrue("Should have applied version 2.0 changes", UpdaterUtil.doesColumnExist(conn, SCHEMA, "P4JIntervalData", "SQLMaxDuration")); assertTrue("Database change log should reflect databaseLabel 0002.0 applied", databaseLabelExistsInChangeLog("0002.0")); } /** * This test verifies we can upgrade a database that was created with * SQL Scripts. These types of test will NOT be required post * database version 3.0 since the database will be populated with Liquibase * and contain the appropriate change log. * @throws Exception */ public void testInstallBaseChangeLogsVersion2Db() throws Exception { Statement stmt = null; try { // Simulate a database that was created with the Perfmon4j Version 1.1.0 // Database scripts. applyChangeLog("org/perfmon4j/initial-change-log.xml"); applyChangeLog("org/perfmon4j/version-2-change-log.xml"); dropLiquibaseTables(); // Start with a database that contain the base tables, // but does not contain the liquibase change logs.. // Should install change.logs and any additional upgrades. runUpdater(); } finally { UpdaterUtil.closeNoThrow(stmt); } assertTrue("Should have a changelog", UpdaterUtil.doesTableExist(conn, SCHEMA, "DATABASECHANGELOG")); System.out.println(dumpQuery(conn, "SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG")); assertTrue("Database change log should reflect databaseLabel 0002.0 applied", databaseLabelExistsInChangeLog("0002.0")); } /** * This test verifies we can upgrade a database that was created with * SQL Scripts. These types of test will NOT be required post * database version 3.0 since the database will be populated with Liquibase * and contain the appropriate change log. * @throws Exception */ public void testInstallBaseChangeLogsVersion3Db() throws Exception { Statement stmt = null; try { // Simulate a database that was created with the Perfmon4j Version 1.2.0 // Database scripts. applyChangeLog("org/perfmon4j/initial-change-log.xml"); applyChangeLog("org/perfmon4j/version-2-change-log.xml"); applyChangeLog("org/perfmon4j/version-3-change-log.xml"); dropLiquibaseTables(); // Start with a database that contain the base tables, // but does not contain the liquibase change logs.. // Should install change.logs and any additional upgrades. runUpdater(); } finally { UpdaterUtil.closeNoThrow(stmt); } assertTrue("Should have a changelog", UpdaterUtil.doesTableExist(conn, SCHEMA, "DATABASECHANGELOG")); System.out.println(dumpQuery(conn, "SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG")); assertTrue("Database change log should reflect databaseLabel 0003.0 applied", databaseLabelExistsInChangeLog("0003.0")); } public void testApplyThirdPartyChanges() throws Exception { // Start with an empty database... UpdateOrCreateDb.main(new String[]{"driverClass=org.apache.derby.jdbc.EmbeddedDriver", "jdbcURL=" + JDBC_URL, "driverJarFile=EMBEDDED", "schema=" + SCHEMA }); runUpdater(new String[]{"thirdPartyExtensions=FSS"}); assertTrue("Should have a FSSFetchThreadPoolSnapshot table", UpdaterUtil.doesTableExist(conn, SCHEMA, "FSSFetchThreadPoolSnapshot")); assertTrue("Should have a FSSFetchPolicySnapshot table", UpdaterUtil.doesTableExist(conn, SCHEMA, "FSSFetchPolicySnapshot")); System.out.println(dumpQuery(conn, "SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG")); assertTrue("Database change log should reflect databaseLabel 0002.0 applied", databaseLabelExistsInChangeLog("0002.0")); } public void testWriteSQLScript() throws Exception { File sqlFile = new File(System.getProperty("java.io.tmpdir"), new Random().nextInt(10000) + ".sql"); try { System.out.println(sqlFile.getCanonicalPath()); // Start with an empty database... runUpdater("thirdPartyExtensions=FSS" ,"sqlOutputScript=" + sqlFile.getCanonicalPath()); assertFalse("Should NOT have created database. We just asked for a srcipt", UpdaterUtil.doesTableExist(conn, SCHEMA, "FSSFetchPolicySnapshot")); assertTrue("Should have created SQL Script", sqlFile.exists()); } finally { if (sqlFile.exists()) { sqlFile.delete(); } } } public void X_testLivePostgres() { String [] args = new String[] { "driverJarFile=/home/perfmon/host/tools/common/JDBCDrivers/postgresql-9.3-1102.jdbc4.jar", "driverClass=org.postgresql.Driver", "jdbcURL=jdbc:postgresql://10.0.2.2:15432/TestUpgrade", "userName=perfmonwriter", "password=perfmon" }; UpdateOrCreateDb.main(args); } public void X_testLiveSQLServer() { DatabaseFactory.reset(); String [] args = new String[] { "driverJarFile=/media/sf_shared/tools/common/JDBCDrivers/sqljdbc4.jar", "driverClass=com.microsoft.sqlserver.jdbc.SQLServerDriver", "jdbcURL=jdbc:sqlserver://10.0.2.2:1433;databaseName=TestUpgrade", "userName=perfmonwriter", "password=perfmon", "thirdPartyExtensions=FSS", "sqlOutputScript=/media/sf_shared/tools/common/create.sql" }; UpdateOrCreateDb.main(args); } public void X_testLiveMySQL() { String [] args = new String[] { "driverJarFile=/home/perfmon/jdbc-drivers/mysql-connector-java-5.1.32-bin.jar", "driverClass=com.mysql.jdbc.Driver", "jdbcURL=jdbc:mysql://localhost:3306/TestUpgrade", "userName=perfmonwriter", "password=perfmon" }; UpdateOrCreateDb.main(args); } public void X_testLiveOracle() { String [] args = new String[] { "driverJarFile=/home/perfmon/host/tools/common/JDBCDrivers/ojdbc6.jar", "driverClass=oracle.jdbc.driver.OracleDriver", "jdbcURL=jdbc:oracle:thin:@10.0.2.2:1521/xe", "userName=perfmonwriter", "password=perfmon", "clearChecksums=true" }; UpdateOrCreateDb.main(args); } private void runUpdater(String... extraParameters) throws Exception { List<String> parameters = new ArrayList<String>(Arrays. asList(new String[]{"driverClass=org.apache.derby.jdbc.EmbeddedDriver", "jdbcURL=" + JDBC_URL, "driverJarFile=EMBEDDED", "schema=" + SCHEMA})); parameters.addAll(Arrays.asList(extraParameters)); UpdateOrCreateDb.main(parameters.toArray(new String[]{})); } private Database buildLiquibaseDatabaseConnection() throws Exception { return buildLiquibaseDatabaseConnection(JDBC_URL); } private Database buildLiquibaseDatabaseConnection(String jdbcURL) throws Exception { Connection tmpConn = UpdaterUtil.createConnection(JDBC_DRIVER, null, jdbcURL, null, null); Database db = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(tmpConn)); db.setDefaultSchemaName(SCHEMA); return db; } private void applyChangeLog(String changeLog) throws Exception { Database db = buildLiquibaseDatabaseConnection(); try { Liquibase updater = new Liquibase(changeLog, new ClassLoaderResourceAccessor(), db); updater.update((String)null); } finally { db.close(); } } private boolean databaseLabelExistsInChangeLog(String label) throws Exception { boolean result = false; Statement stmt = null; ResultSet rs = null; try { stmt = conn.createStatement(); rs = stmt.executeQuery("SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG " + "WHERE ID='" + label + "' AND AUTHOR='databaseLabel'"); result = rs.next(); } finally { UpdaterUtil.closeNoThrow(rs); UpdaterUtil.closeNoThrow(stmt); } return result; } private boolean changeLogEntryExistsWithID(String changeLogID) throws Exception { boolean result = false; Statement stmt = null; ResultSet rs = null; try { stmt = conn.createStatement(); rs = stmt.executeQuery("SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG " + "WHERE ID='" + changeLogID + "'"); result = rs.next(); } finally { UpdaterUtil.closeNoThrow(rs); UpdaterUtil.closeNoThrow(stmt); } return result; } private void deleteChangeLogEntyWithID(String changeLogID) throws Exception { Statement stmt = null; try { stmt = conn.createStatement(); stmt.executeUpdate("DELETE FROM " + SCHEMA + ".DATABASECHANGELOG " + "WHERE ID='" + changeLogID + "'"); } finally { UpdaterUtil.closeNoThrow(stmt); } } private void executeUpdate(String sql) throws Exception { Statement stmt = null; try { stmt = conn.createStatement(); stmt.executeUpdate(sql); } finally { UpdaterUtil.closeNoThrow(stmt); } } private int getQueryCount(String query) throws Exception { int result = 0; Statement stmt = null; ResultSet rs = null; try { stmt = conn.createStatement(); rs = stmt.executeQuery(query); rs.next(); result = rs.getInt(1); } finally { UpdaterUtil.closeNoThrow(rs); UpdaterUtil.closeNoThrow(stmt); } return result; } private void dumpChangeLog() throws Exception { System.out.println(dumpQuery(conn, "SELECT * FROM " + SCHEMA + ".DATABASECHANGELOG")); } private void dropLiquibaseTables() throws Exception { Statement stmt = null; try { stmt = conn.createStatement(); stmt.execute("DROP TABLE " + SCHEMA + ".DATABASECHANGELOG"); } finally { UpdaterUtil.closeNoThrow(stmt); } } }
package com.strohwitwer.awesomevalidation.demo; import android.app.Activity; import android.content.res.Configuration; import android.graphics.Color; import android.os.Bundle; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.Toolbar; import android.view.Gravity; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import com.google.common.collect.Range; import com.strohwitwer.awesomevalidation.AwesomeValidation; import com.strohwitwer.awesomevalidation.ValidationStyle; import com.strohwitwer.awesomevalidation.utility.RegexTemplate; import java.util.Calendar; import static com.strohwitwer.awesomevalidation.ValidationStyle.BASIC; import static com.strohwitwer.awesomevalidation.ValidationStyle.COLORATION; import static com.strohwitwer.awesomevalidation.ValidationStyle.UNDERLABEL; public class DemoActivity extends ActionBarActivity { private String[] mStyles; private String mStyle; private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerList; private DrawerItemClickListener mDrawerItemClickListener = new DrawerItemClickListener(); private int mPosition = 0; private AwesomeValidation mAwesomeValidation; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_demo); mStyles = getResources().getStringArray(R.array.styles); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); if (toolbar != null) { setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); } mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, toolbar, R.string.app_name, R.string.app_name); mDrawerLayout.setDrawerListener(mDrawerToggle); mDrawerList = (ListView) findViewById(R.id.left_drawer); mDrawerList.setAdapter(new ArrayAdapter<String>(this, R.layout.drawer_list_item, mStyles)); mDrawerList.setOnItemClickListener(mDrawerItemClickListener); mDrawerItemClickListener.selectItem(mPosition); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } else { return super.onOptionsItemSelected(item); } } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } @Override public void onBackPressed() { if(mDrawerLayout.isDrawerOpen(Gravity.START|Gravity.LEFT)){ mDrawerLayout.closeDrawers(); return; } if (mPosition > 0) { mDrawerItemClickListener.selectItem(0); return; } else { super.onBackPressed(); } } private class DrawerItemClickListener implements ListView.OnItemClickListener { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { selectItem(position); } private void selectItem(int position) { mDrawerList.setItemChecked(position, true); mPosition = position; mStyle = mStyles[mPosition]; setTitle(mStyle); mDrawerLayout.closeDrawer(mDrawerList); clearValidation(); initValidation(mStyle); addValidation(DemoActivity.this); } } private void clearValidation() { if (mAwesomeValidation != null) { mAwesomeValidation.clear(); } } private void initValidation(String style) { switch (ValidationStyle.valueOf(style)) { case BASIC: mAwesomeValidation = new AwesomeValidation(BASIC); break; case COLORATION: mAwesomeValidation = new AwesomeValidation(COLORATION); mAwesomeValidation.setColor(Color.YELLOW); break; case UNDERLABEL: mAwesomeValidation = new AwesomeValidation(UNDERLABEL); mAwesomeValidation.setContext(this); break; } } private void addValidation(final Activity activity) { mAwesomeValidation.addValidation(activity, R.id.edt_userid, "[a-zA-Z0-9_-]+", R.string.err_userid); mAwesomeValidation.addValidation(activity, R.id.edt_name, "[a-zA-Z\\s]+", R.string.err_name); mAwesomeValidation.addValidation(activity, R.id.edt_firstname, "[a-zA-Z\\s]+", R.string.err_name); mAwesomeValidation.addValidation(activity, R.id.edt_lastname, "[a-zA-Z\\s]+", R.string.err_name); mAwesomeValidation.addValidation(activity, R.id.edt_tel, RegexTemplate.TELEPHONE, R.string.err_tel); mAwesomeValidation.addValidation(activity, R.id.edt_zipcode, "\\d+", R.string.err_zipcode); mAwesomeValidation.addValidation(activity, R.id.edt_year, Range.closed(1900, Calendar.getInstance().get(Calendar.YEAR)), R.string.err_year); mAwesomeValidation.addValidation(activity, R.id.edt_height, Range.closed(0.0f, 2.72f), R.string.err_height); Button btnDone = (Button) findViewById(R.id.btn_done); btnDone.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mAwesomeValidation.validate(); } }); Button btnClr = (Button) findViewById(R.id.btn_clr); btnClr.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mAwesomeValidation.clear(); } }); } }
package com.xpn.xwiki.it; import junit.framework.Test; import com.xpn.xwiki.it.framework.XWikiLDAPTestSetup; import com.xpn.xwiki.it.selenium.framework.AbstractXWikiTestCase; import com.xpn.xwiki.it.selenium.framework.AlbatrossSkinExecutor; import com.xpn.xwiki.it.selenium.framework.XWikiTestSuite; /** * Verify the LDAP login and logout features. * * @version $Id$ */ public class LDAPAuthTest extends AbstractXWikiTestCase { public static Test suite() { XWikiTestSuite suite = new XWikiTestSuite("Verify the LDAP login and logout features"); suite.addTestSuite(LDAPAuthTest.class, AlbatrossSkinExecutor.class); return suite; } /** * {@inheritDoc} * * @see com.xpn.xwiki.it.selenium.framework.AbstractXWikiTestCase#setUp() */ @Override public void setUp() throws Exception { super.setUp(); open("/xwiki/bin/view/Main/"); // Verify that the user isn't logged in if (isAuthenticated()) { logout(); } clickLogin(); } /** * Validate that it tries to log as "common" XWiki login if user is not found in LDAP. */ public void testLogAsXWikiUser() { login("Admin", "admin", true); } /** * Validate that it success to authenticate with LDAP user. Also the user id contains space character. */ public void testLogAsLDAPUser() { login(XWikiLDAPTestSetup.HORATIOHORNBLOWER_CN, XWikiLDAPTestSetup.HORATIOHORNBLOWER_PWD, true); // Validate exclusion group logout(); clickLogin(); setFieldValue("j_username", XWikiLDAPTestSetup.THOMASQUIST_CN); setFieldValue("j_password", XWikiLDAPTestSetup.THOMASQUIST_PWD); checkField("rememberme"); submit(); assertFalse(XWikiLDAPTestSetup.THOMASQUIST_CN + " user has been authenticated", isAuthenticated()); // Validate XE-136: log with LDAP user then search for provided user uid/pass loginAsAdmin(); open("/xwiki/bin/edit/XWiki/XWikiPreferences?editor=object"); setFieldValue("XWiki.XWikiPreferences_0_ldap_bind_DN", XWikiLDAPTestSetup.HORATIOHORNBLOWER_DN); setFieldValue("XWiki.XWikiPreferences_0_ldap_bind_pass", XWikiLDAPTestSetup.HORATIOHORNBLOWER_PWD); setFieldValue("XWiki.XWikiPreferences_0_ldap_UID_attr", XWikiLDAPTestSetup.LDAP_USERUID_FIELD_UID); setFieldValue("XWiki.XWikiPreferences_0_ldap_fields_mapping", "name=" + XWikiLDAPTestSetup.LDAP_USERUID_FIELD_UID + ",last_name=sn,first_name=givenname,fullname=description,email=mail"); setFieldValue("XWiki.XWikiPreferences_0_ldap_group_mapping", "XWiki.XWikiAdminGroup=cn=HMS Lydia,ou=crews,ou=groups,o=sevenSeas"); clickEditSaveAndView(); login(XWikiLDAPTestSetup.WILLIAMBUSH_UID, XWikiLDAPTestSetup.WILLIAMBUSH_PWD, true); // Validate // - XWIKI-2205: case insensitive user uid // - XWIKI-2202: LDAP user update corrupt XWiki user page login(XWikiLDAPTestSetup.WILLIAMBUSH_UID_MIXED, XWikiLDAPTestSetup.WILLIAMBUSH_PWD, true); // Validate XWIKI-2201: LDAP group mapping defined in XWikiPreferences is not working open("/xwiki/bin/view/XWiki/XWikiAdminGroup"); String userFullName = "XWiki." + XWikiLDAPTestSetup.WILLIAMBUSH_UID; getSelenium().waitForCondition("selenium.page().bodyText().indexOf('" + userFullName + "') != -1;", "2000"); assertTextPresent(userFullName); // Validate XWIKI-2201: LDAP group mapping defined in XWikiPreferences is not working open("/xwiki/bin/view/XWiki/XWikiAdminGroup"); assertTextPresent("XWiki." + XWikiLDAPTestSetup.WILLIAMBUSH_UID); // Validate // - XWIKI-2264: LDAP authentication does not support "." in login names login(XWikiLDAPTestSetup.USERWITHPOINTS_UID, XWikiLDAPTestSetup.USERWITHPOINTS_PWD, true); } }
package arez.doc.examples.observed; import arez.Arez; import arez.Flags; public class ObserverExample2 { public static void main( String[] args ) throws Throwable { Arez.context().observer( "MyObserver", () -> { // Interact with arez observable state (or computed values) here // and any time these changed this function will be re-run. //DOC ELIDE START //DOC ELIDE END }, Flags.READ_WRITE ); } }
package ameba.mvc.assets; import ameba.util.IOUtils; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.glassfish.jersey.server.model.ModelProcessor; import org.glassfish.jersey.server.model.Resource; import org.glassfish.jersey.server.model.ResourceModel; import javax.ws.rs.ConstrainedTo; import javax.ws.rs.RuntimeType; import javax.ws.rs.core.Configuration; import javax.ws.rs.core.Feature; import javax.ws.rs.core.FeatureContext; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import java.util.Map; /** * <p>AssetsFeature class.</p> * * @author ICode * @since 13-8-17 2:55 */ @ConstrainedTo(RuntimeType.SERVER) public class AssetsFeature implements Feature { /** * <p>getAssetMap.</p> * * @param configuration a {@link javax.ws.rs.core.Configuration} object. * @return a {@link java.util.Map} object. */ public static Map<String, String[]> getAssetMap(Configuration configuration) { Map<String, String[]> assetsMap = Maps.newLinkedHashMap(); for (String key : configuration.getPropertyNames()) { if (key.startsWith(ASSETS_CONF_PREFIX) || key.equals("resource.assets")) { String routePath = key.replaceFirst("^resource\\.assets", ""); if (routePath.startsWith(".")) { routePath = routePath.substring(1); } else if (StringUtils.isBlank(routePath)) { routePath = "assets"; } if (routePath.endsWith("/")) { routePath = routePath.substring(0, routePath.lastIndexOf("/")); } String value = (String) configuration.getProperty(key); String[] uris = value.split(","); List<String> uriList = Lists.newArrayList(); for (String uri : uris) { uriList.add(uri.endsWith("/") ? uri : uri + "/"); } if (StringUtils.isNotBlank(value)) { String[] _uris = assetsMap.get(routePath); if (_uris == null) { assetsMap.put(routePath, uriList.toArray(uris)); } else { assetsMap.put(routePath, ArrayUtils.addAll(_uris, uriList.toArray(uris))); } } } } return assetsMap; } /** * <p>Getter for the field <code>assetsMap</code>.</p> * * @return a {@link java.util.Map} object. */ public static Map<String, String[]> getAssetsMap() { return Maps.newHashMap(assetsMap); } /** * <p>lookupAsset.</p> * * @param name a {@link java.lang.String} object. * @param file a {@link java.lang.String} object. * @return a {@link URL} object. */ public static URL lookupAsset(String name, String file) { URL url = null; if (name.startsWith("/")) { name = name.substring(1); } if (name.endsWith("/")) { name = name.substring(0, name.lastIndexOf("/")); } if (name.equals("")) { name = ROOT_MAPPING_PATH; } String[] dirs = assetsMap.get(name); if (dirs != null) { for (String dir : dirs) { File f = FileUtils.getFile(dir, file); if (f.exists() && f.isFile()) { try { url = f.getAbsoluteFile().toURI().toURL(); } catch (MalformedURLException e) { } } if (url == null) { url = IOUtils.getResource(f.getPath()); } if (url != null) { break; } } } return url; } /** * {@inheritDoc} */ @Override public boolean configure(FeatureContext context) { Configuration configuration = context.getConfiguration(); assetsMap.putAll(getAssetMap(configuration)); context.register(new ModelProcessor() { @Override public ResourceModel processResourceModel(ResourceModel resourceModel, Configuration configuration) { ResourceModel.Builder resourceModelBuilder = new ResourceModel.Builder(resourceModel, false); for (String routePath : assetsMap.keySet()) { Resource.Builder resourceBuilder = Resource.builder(AssetsResource.class); if (routePath.equals(ROOT_MAPPING_PATH)) { routePath = "/"; } resourceBuilder.path(routePath); Resource resource = resourceBuilder.build(); resourceModelBuilder.addResource(resource); } return resourceModelBuilder.build(); } @Override public ResourceModel processSubResource(ResourceModel subResourceModel, Configuration configuration) { return subResourceModel; } }); return true; } }
package at.ac.tuwien.inso.entity; import javax.persistence.*; import java.math.BigDecimal; @Entity public class Grade { @Id @GeneratedValue private Long id; @ManyToOne private Course course; @ManyToOne private Lecturer lecturer; @ManyToOne private Student student; @Column private BigDecimal mark; public Grade(Course course, Lecturer lecturer, Student student, BigDecimal mark) { this.course = course; this.lecturer = lecturer; this.student = student; this.mark = mark; } public Long getId() { return id; } public Course getCourse() { return course; } public Lecturer getLecturer() { return lecturer; } public Student getStudent() { return student; } public BigDecimal getMark() { return mark; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Grade grade = (Grade) o; if (!id.equals(grade.id)) return false; if (!course.equals(grade.course)) return false; if (!lecturer.equals(grade.lecturer)) return false; if (!student.equals(grade.student)) return false; return mark.equals(grade.mark); } @Override public int hashCode() { int result = id.hashCode(); result = 31 * result + course.hashCode(); result = 31 * result + lecturer.hashCode(); result = 31 * result + student.hashCode(); result = 31 * result + mark.hashCode(); return result; } @Override public String toString() { return "Grade{" + "id=" + id + ", course=" + course + ", lecturer=" + lecturer + ", student=" + student + ", mark=" + mark + '}'; } }
package br.uff.ic.utility.graph; import br.uff.ic.utility.GraphAttribute; import br.uff.ic.utility.Utils; import java.awt.BasicStroke; import java.awt.Paint; import java.awt.Stroke; import java.util.HashMap; import java.util.Map; /** * Abstract (Generic) vertex type for the provenance graph * * Time format must be either a Number or DayNumber:DayName (for the weekend display mode) * @author Kohwalter */ public abstract class Vertex extends GraphObject { private String id; // prov:id private double normalizedTime; private String time; // prov:startTime // Refactor for datetime type /** * Constructor without attributes * Using this constructor, attributes must be added later * * @param id vertex unique ID * @param label HUman readable name * @param time Time-related value. Used for temporal layouts */ public Vertex(String id, String label, String time) { this.id = id; setLabel(label); this.time = time; this.attributes = new HashMap<>(); } /** * Constructor with attributes * @param id * @param label * @param time * @param attributes */ public Vertex(String id, String label, String time, Map<String, GraphAttribute> attributes) { this.id = id; setLabel(label); this.time = time; this.attributes.putAll(attributes); } /** * Return the vertex ID * * @return (String) id */ public String getID() { return id; } /** * Set vertex ID * @param t is the new ID */ public void setID(String t) { id = t; } public void setNormalizedTime(double t) { this.normalizedTime = t; } public double getNormalizedTime() { return this.normalizedTime; } /** * Method for returning the vertex name (not type) from the sub-classes. * i.e. Agent Vertex name = Kohwalter * * @return (String) name */ /** * Method for returning the vertex day (if any) * * @return (int) date */ public float getTime() { // String[] day = this.time.split(":"); if(Utils.tryParseFloat(this.time)) return (Float.parseFloat(this.time)); else if(Utils.tryParseDate(this.time)) { // System.out.println("Time Milliseconds: " + (float) Utils.convertStringDateToDouble(this.time)); double milliseconds = Utils.convertStringDateToFloat(this.time); // int weeks = (int) (milliseconds / (1000*60*60*24*7)); // long days = TimeUnit.MILLISECONDS.toDays((long) milliseconds); // long hours = TimeUnit.MILLISECONDS.toHours((long) milliseconds); return (float) milliseconds; } else return -1; } /** * Method to get the value of the variable time * @return time */ public String getTimeString() { return this.time; } /** * Method to set the value of the variable time * @param t is the new value */ public void setTime(String t){ this.time = t; } /** * (Optional) Method for returning the day of the week instead of the day's * number. * * @return (String) the day of the week (mon, tue, wed, ...) */ public String getDayName() { String[] day = this.time.split(":"); return day[1]; } /** * This overrides the default JUNG method for displaying information * * @return (String) id */ @Override public String toString() { return this.getNodeType() + "<br> " + "<br>ID: " + this.id + "<br>" + "<b>Label: " + getLabel() + "</b>" + " <br>" + printTime() + " <br>" + printAttributes(); } public String printTime() { if(this.time.isEmpty()) { return ""; } return "Timestamp: " + this.time; } /** * Method to return the attribute value (not necessarily a number) * If the attribute does not exist, returns "Unknown" * @param attribute * @return */ public String getAttributeValue(String attribute) { if(attribute.equalsIgnoreCase("Label")) { return getLabel(); } if(attribute.equalsIgnoreCase("Time")) { return String.valueOf(getTime()); } if(attribute.equalsIgnoreCase("Date")) { return String.valueOf(getTime()); } GraphAttribute aux = attributes.get(attribute); if(aux != null) { return aux.getAverageValue(); } else { return "Unknown"; } } /** * Method to return the attribute value as float * @param attribute * @return */ public float getAttributeValueFloat(String attribute) { if(attribute.equalsIgnoreCase("Time")) { return getTime(); } if(attributes.get(attribute) == null) { // System.out.println("If getAttributeValue equals NULL: " + attributes.get(attribute).getValue()); return Float.NaN; } if(Utils.tryParseFloat(attributes.get(attribute).getAverageValue())) { return Utils.convertFloat(attributes.get(attribute).getAverageValue()); } else { // System.out.println("Else getAttributeValue: " + attributes.get(attribute).getValue()); return Float.NaN; } } /** * Method for getting the vertex border size * * @deprecated use VertexStroke class instead * @param width Define the border width * @return (Stroke) returns the new vertex border width */ public Stroke getStroke(float width) { float dash[] = null; final Stroke nodeStroke = new BasicStroke(width, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10.0f, dash, 0.0f); return nodeStroke; } /** * Method for defining the vertex color * * @return (Paint) vertex color */ public abstract Paint getColor(); /** * Method used to identify the vertex type * * * @return (String) vertex type */ public abstract String getNodeType(); }
package com.mindoo.domino.jna.internal; import com.mindoo.domino.jna.constants.OpenNote; import com.mindoo.domino.jna.internal.structs.LinuxNotesNamesListHeader64Struct; import com.mindoo.domino.jna.internal.structs.MacNotesNamesListHeader64Struct; import com.mindoo.domino.jna.internal.structs.NoteIdStruct; import com.mindoo.domino.jna.internal.structs.NotesCDFieldStruct; import com.mindoo.domino.jna.internal.structs.NotesCollectionPositionStruct; import com.mindoo.domino.jna.internal.structs.NotesFileObjectStruct; import com.mindoo.domino.jna.internal.structs.NotesItemValueTableStruct; import com.mindoo.domino.jna.internal.structs.NotesMIMEPartStruct; import com.mindoo.domino.jna.internal.structs.NotesNamesListHeader32Struct; import com.mindoo.domino.jna.internal.structs.NotesNumberPairStruct; import com.mindoo.domino.jna.internal.structs.NotesObjectDescriptorStruct; import com.mindoo.domino.jna.internal.structs.NotesOriginatorIdStruct; import com.mindoo.domino.jna.internal.structs.NotesRangeStruct; import com.mindoo.domino.jna.internal.structs.NotesSchedEntryExtStruct; import com.mindoo.domino.jna.internal.structs.NotesSchedEntryStruct; import com.mindoo.domino.jna.internal.structs.NotesScheduleListStruct; import com.mindoo.domino.jna.internal.structs.NotesScheduleStruct; import com.mindoo.domino.jna.internal.structs.NotesSearchMatch32Struct; import com.mindoo.domino.jna.internal.structs.NotesSearchMatch64Struct; import com.mindoo.domino.jna.internal.structs.NotesTableItemStruct; import com.mindoo.domino.jna.internal.structs.NotesTimeDatePairStruct; import com.mindoo.domino.jna.internal.structs.NotesTimeDateStruct; import com.mindoo.domino.jna.internal.structs.NotesTimeStruct; import com.mindoo.domino.jna.internal.structs.NotesUniversalNoteIdStruct; import com.mindoo.domino.jna.internal.structs.WinNotesNamesListHeader32Struct; import com.mindoo.domino.jna.internal.structs.WinNotesNamesListHeader64Struct; import com.mindoo.domino.jna.internal.structs.collation.NotesCollateDescriptorStruct; import com.mindoo.domino.jna.internal.structs.collation.NotesCollationStruct; import com.mindoo.domino.jna.internal.structs.html.HtmlApi_UrlArgStruct; import com.mindoo.domino.jna.internal.structs.html.HtmlApi_UrlTargetComponentStruct; import com.mindoo.domino.jna.internal.structs.html.StringListStruct; import com.mindoo.domino.jna.internal.structs.html.ValueUnion; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewColumnFormat2Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewColumnFormat3Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewColumnFormat4Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewColumnFormat5Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewColumnFormatStruct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewTableFormat2Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewTableFormat4Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewTableFormat5Struct; import com.mindoo.domino.jna.internal.structs.viewformat.NotesViewTableFormatStruct; import com.sun.jna.Pointer; /** * Extract of Notes C API constants, should only be used internally by the API. * The plan is to wrap/provide any relevant constant as enum, like {@link OpenNote}. * * @author Karsten Lehmann */ public interface NotesConstants { //computation of data type sizes for the current platform public final int timeDateSize = NotesTimeDateStruct.newInstance().size(); public final int rangeSize = NotesRangeStruct.newInstance().size(); public final int timeSize = NotesTimeStruct.newInstance().size(); public final int numberPairSize = NotesNumberPairStruct.newInstance().size(); public final int timeDatePairSize = NotesTimeDatePairStruct.newInstance().size(); public final int collectionPositionSize = NotesCollectionPositionStruct.newInstance().size(); public final int itemValueTableSize = NotesItemValueTableStruct.newInstance().size(); public final int tableItemSize = NotesTableItemStruct.newInstance().size(); public final int oidSize = NotesOriginatorIdStruct.newInstance().size(); public final int winNamesListHeaderSize64 = WinNotesNamesListHeader64Struct.newInstance().size(); public final int winNamesListHeaderSize32 = WinNotesNamesListHeader32Struct.newInstance().size(); public final int namesListHeaderSize32 = NotesNamesListHeader32Struct.newInstance().size(); public final int linuxNamesListHeaderSize64 = LinuxNotesNamesListHeader64Struct.newInstance().size(); public final int macNamesListHeaderSize64 = MacNotesNamesListHeader64Struct.newInstance().size(); public final int objectDescriptorSize = NotesObjectDescriptorStruct.newInstance().size(); public final int fileObjectSize = NotesFileObjectStruct.newInstance().size(); public final int cdFieldSize = NotesCDFieldStruct.newInstance().size(); public final int schedListSize = NotesScheduleListStruct.newInstance().size(); public final int schedEntrySize = NotesSchedEntryStruct.newInstance().size(); public final int schedEntryExtSize = NotesSchedEntryExtStruct.newInstance().size(); public final int scheduleSize = NotesScheduleStruct.newInstance().size(); public final int notesUniversalNoteIdSize = NotesUniversalNoteIdStruct.newInstance().size(); public final int noteIdSize = NoteIdStruct.newInstance().size(); public final int slistStructSize = StringListStruct.newInstance().size(); public final int valueUnionSize = Math.max( ValueUnion.newInstance(0).size(), Math.max(ValueUnion.newInstance(NotesUniversalNoteIdStruct.newInstance()).size(), Math.max(ValueUnion.newInstance(NoteIdStruct.newInstance()).size(), ValueUnion.newInstance(StringListStruct.newInstance()).size()))); public final int htmlApiUrlTargetComponentSize = Math.max( HtmlApi_UrlTargetComponentStruct.newInstance(0, 0, ValueUnion.newInstance(new Pointer(0))).size(), Math.max( HtmlApi_UrlTargetComponentStruct.newInstance(0, 0, ValueUnion.newInstance(NotesUniversalNoteIdStruct.newInstance())).size(), HtmlApi_UrlTargetComponentStruct.newInstance(0, 0, ValueUnion.newInstance(StringListStruct.newInstance())).size() ) ); public final int htmlApiUrlArgSize = Math.max( HtmlApi_UrlArgStruct.newInstance(0, 0, ValueUnion.newInstance(new Pointer(0))).size(), Math.max( HtmlApi_UrlArgStruct.newInstance(0, 0, ValueUnion.newInstance(NotesUniversalNoteIdStruct.newInstance())).size(), HtmlApi_UrlArgStruct.newInstance(0, 0, ValueUnion.newInstance(StringListStruct.newInstance())).size() ) ); public final int htmlApiUrlComponentSize = Math.max(htmlApiUrlTargetComponentSize, htmlApiUrlArgSize); public final int notesCollationSize = NotesCollationStruct.newInstance().size(); public final int notesCollateDescriptorSize = NotesCollateDescriptorStruct.newInstance().size(); public final int notesViewTableFormatSize = NotesViewTableFormatStruct.newInstance().size(); public final int notesViewTableFormat2Size = NotesViewTableFormat2Struct.newInstance().size(); public final int notesViewTableFormat4Size = NotesViewTableFormat4Struct.newInstance().size(); public final int notesViewTableFormat5Size = NotesViewTableFormat5Struct.newInstance().size(); public final int notesViewColumnFormatSize = NotesViewColumnFormatStruct.newInstance().size(); public final int notesViewColumnFormat2Size = NotesViewColumnFormat2Struct.newInstance().size(); public final int notesViewColumnFormat3Size = NotesViewColumnFormat3Struct.newInstance().size(); public final int notesViewColumnFormat4Size = NotesViewColumnFormat4Struct.newInstance().size(); public final int notesViewColumnFormat5Size = NotesViewColumnFormat5Struct.newInstance().size(); public final int notesSearchMatch32Size = NotesSearchMatch32Struct.newInstance().size(); public final int notesSearchMatch64Size = NotesSearchMatch64Struct.newInstance().size(); public final int mimePartSize = NotesMIMEPartStruct.newInstance().size(); public static final short MAXALPHATIMEDATE = 80; public static final short ERR_MASK = 0x3fff; /** error came from remote machine */ public static final short STS_REMOTE = 0x4000; /* Defines for Authentication flags */ /** Set if names list has been authenticated via Notes */ public static final short NAMES_LIST_AUTHENTICATED = 0x0001; /** Set if names list has been authenticated using external password -- Triggers "maximum password access allowed" feature */ public static final short NAMES_LIST_PASSWORD_AUTHENTICATED = 0x0002; /** Set if user requested full admin access and it was granted */ public static final short NAMES_LIST_FULL_ADMIN_ACCESS = 0x0004; /* Replication flags NOTE: Please note the distinction between REPLFLG_DISABLE and REPLFLG_NEVER_REPLICATE. The former is used to temporarily disable replication. The latter is used to indicate that this database should NEVER be replicated. The former may be set and cleared by the Notes user interface. The latter is intended to be set programmatically and SHOULD NEVER be able to be cleared by the user interface. The latter was invented to avoid having to set the replica ID to the known value of REPLICA_ID_NEVERREPLICATE. This latter method has the failing that DBs that use it cannot have DocLinks to them. */ /* 0x0001 spare was COPY_ACL */ /* 0x0002 spare */ /** Disable replication */ public short REPLFLG_DISABLE = 0x0004; /** Mark unread only if newer note */ public short REPLFLG_UNREADIFFNEW = 0x0008; /** Don't propagate deleted notes when replicating from this database */ public short REPLFLG_IGNORE_DELETES = 0x0010; /** UI does not allow perusal of Design */ public short REPLFLG_HIDDEN_DESIGN = 0x0020; /** Do not list in catalog */ public short REPLFLG_DO_NOT_CATALOG = 0x0040; /** Auto-Delete documents prior to cutoff date */ public short REPLFLG_CUTOFF_DELETE = 0x0080; /** DB is not to be replicated at all */ public short REPLFLG_NEVER_REPLICATE = 0x0100; /** Abstract during replication */ public short REPLFLG_ABSTRACT = 0x0200; /** Do not list in database add */ public short REPLFLG_DO_NOT_BROWSE = 0x0400; /** Do not run chronos on database */ public short REPLFLG_NO_CHRONOS = 0x0800; /** Don't replicate deleted notes into destination database */ public short REPLFLG_IGNORE_DEST_DELETES = 0x1000; /** Include in Multi Database indexing */ public short REPLFLG_MULTIDB_INDEX = 0x2000; /** Low priority */ public short REPLFLG_PRIORITY_LOW = (short) (0xC000 & 0xffff); /** Medium priority */ public short REPLFLG_PRIORITY_MED = 0x0000; /** High priority */ public short REPLFLG_PRIORITY_HI = 0x4000; /** Shift count for priority field */ public short REPLFLG_PRIORITY_SHIFT = 14; /** Mask for priority field after shifting*/ public short REPLFLG_PRIORITY_MASK = 0x0003; /** Mask for clearing the field */ public short REPLFLG_PRIORITY_INVMASK = 0x3fff; public short REPLFLG_USED_MASK = (short) ((0x4|0x8|0x10|0x40|0x80|0x100|0x200|0xC000|0x1000|0x2000|0x4000) & 0xffff); /** Reserved ReplicaID.Date. Used in ID.Date field in ReplicaID to escape to reserved REPLICA_ID_xxx */ public short REPLICA_DATE_RESERVED = 0; /** Number of times within cutoff interval that we purge deleted stubs. For example, if the cutoff interval is 90 days, we purge every 30 days. */ public short CUTOFF_CHANGES_DURING_INTERVAL = 3; /** UnreadList has been modified */ public static short FILTER_UNREAD = 0x0001; /** CollpasedList has been modified */ public static short FILTER_COLLAPSED = 0x0002; /** SelectedList has been modified */ public static short FILTER_SELECTED = 0x0004; /** UNID table has been modified. */ public static short FILTER_UNID_TABLE = 0x0008; /** Conditionaly do FILTER_UNREAD if current unread list indicates it - see NSFDbUpdateUnread */ public static short FILTER_UPDATE_UNREAD = 0x0010; /** Mark specified ID table Read */ public static short FILTER_MARK_READ = 0x0020; /** Mark specified ID table Unread */ public static short FILTER_MARK_UNREAD = 0x0040; /** Mark all read */ public static short FILTER_MARK_READ_ALL = 0x0080; /** Mark all unread */ public static short FILTER_MARK_UNREAD_ALL = 0x0100; /** * If the following is ORed in with a note class, the resultant note ID * may be passed into NSFNoteOpen and may be treated as though you first * did an NSFGetSpecialNoteID followed by an NSFNoteOpen, all in a single * transaction. */ public static final int NOTE_ID_SPECIAL = 0xFFFF0000; /* Note Classifications */ /* If NOTE_CLASS_DEFAULT is ORed with another note class, it is in essence specifying that this is the default item in this class. There should only be one DEFAULT note of each class that is ever updated, although nothing in the NSF machinery prevents the caller from adding more than one. The file header contains a table of the note IDs of the default notes (for efficient access to them). Whenever a note is updated that has the default bit set, the reference in the file header is updated to reflect that fact. WARNING: NOTE_CLASS_DOCUMENT CANNOT have a "default". This is precluded by code in NSFNoteOpen to make it fast for data notes. */ /** document note */ public static final short NOTE_CLASS_DOCUMENT = 0x0001; /** old name for document note */ public static final short NOTE_CLASS_DATA = NOTE_CLASS_DOCUMENT; /** notefile info (help-about) note */ public static final short NOTE_CLASS_INFO = 0x0002; /** form note */ public static final short NOTE_CLASS_FORM = 0x0004; /** view note */ public static final short NOTE_CLASS_VIEW = 0x0008; /** icon note */ public static final short NOTE_CLASS_ICON = 0x0010; /** design note collection */ public static final short NOTE_CLASS_DESIGN = 0x0020; /** acl note */ public static final short NOTE_CLASS_ACL = 0x0040; /** Notes product help index note */ public static final short NOTE_CLASS_HELP_INDEX = 0x0080; /** designer's help note */ public static final short NOTE_CLASS_HELP = 0x0100; /** filter note */ public static final short NOTE_CLASS_FILTER = 0x0200; /** field note */ public static final short NOTE_CLASS_FIELD = 0x0400; /** replication formula */ public static final short NOTE_CLASS_REPLFORMULA = 0x0800; /** Private design note, use $PrivateDesign view to locate/classify */ public static final short NOTE_CLASS_PRIVATE = 0x1000; /** MODIFIER - default version of each */ public static final short NOTE_CLASS_DEFAULT = (short) (0x8000 & 0xffff); /** see {@link #SEARCH_NOTIFYDELETIONS} */ public static final short NOTE_CLASS_NOTIFYDELETION = NOTE_CLASS_DEFAULT; /** all note types */ public static final short NOTE_CLASS_ALL = 0x7fff; /** all non-data notes */ public static final short NOTE_CLASS_ALLNONDATA = 0x7ffe; /** no notes */ public static final short NOTE_CLASS_NONE = 0x0000; /** Define symbol for those note classes that allow only one such in a file */ public static final short NOTE_CLASS_SINGLE_INSTANCE =( NOTE_CLASS_DESIGN | NOTE_CLASS_ACL | NOTE_CLASS_INFO | NOTE_CLASS_ICON | NOTE_CLASS_HELP_INDEX | 0); /* Note flag definitions */ /** signed */ public static final short NOTE_SIGNED = 0x0001; /** encrypted */ public static final short NOTE_ENCRYPTED = 0x0002; /* Open Flag Definitions. These flags are passed to NSFNoteOpen. */ /** open only summary info */ public static final short OPEN_SUMMARY = 0x0001; /** don't bother verifying default bit */ public static final short OPEN_NOVERIFYDEFAULT = 0x0002; /** expand data while opening */ public static final short OPEN_EXPAND = 0x0004; /** don't include any objects */ public static final short OPEN_NOOBJECTS = 0x0008; /** open in a "shared" memory mode */ public static final short OPEN_SHARE = 0x0020; /** Return ALL item values in canonical form */ public static final short OPEN_CANONICAL = 0x0040; /** Mark unread if unread list is currently associated */ public static final short OPEN_MARK_READ = 0x0100; /** Only open an abstract of large documents */ public static final short OPEN_ABSTRACT =0x0200; /** Return response ID table */ public static final short OPEN_RESPONSE_ID_TABLE = 0x1000; /** Include folder objects - default is not to */ public static final int OPEN_WITH_FOLDERS = 0x00020000; /** If set, leave TYPE_RFC822_TEXT items in native format. Otherwise, convert to TYPE_TEXT/TYPE_TIME. */ public static final int OPEN_RAW_RFC822_TEXT = 0x01000000; /** If set, leave TYPE_MIME_PART items in native format. Otherwise, convert to TYPE_COMPOSITE. */ public static final int OPEN_RAW_MIME_PART = 0x02000000; public static final int OPEN_RAW_MIME = (OPEN_RAW_RFC822_TEXT | OPEN_RAW_MIME_PART); /* Update Flag Definitions. These flags are passed to NSFNoteUpdate and NSFNoteDelete. See also NOTEID_xxx special definitions in nsfdata.h. */ /** update even if ERR_CONFLICT */ public static final short UPDATE_FORCE = 0x0001; /** give error if new field name defined */ public static final short UPDATE_NAME_KEY_WARNING = 0x0002; /** do NOT do a database commit after update */ public static final short UPDATE_NOCOMMIT = 0x0004; /** do NOT maintain revision history */ public static final short UPDATE_NOREVISION = 0x0100; /** update body but leave no trace of note in file if deleted */ public static final short UPDATE_NOSTUB = 0x0200; /** Compute incremental note info */ public static final short UPDATE_INCREMENTAL = 0x4000; /* update body DELETED */ public static final short UPDATE_DELETED = (short) (0x8000 & 0xffff); /* Obsolete; but in SDK */ public static final short UPDATE_DUPLICATES = 0; /* Conflict Handler defines */ public static final short CONFLICT_ACTION_MERGE = 1; public static final short CONFLICT_ACTION_HANDLED = 2; /** Split the second update of this note with the object store */ public static final int UPDATE_SHARE_SECOND = 0x00200000; /** Share objects only, not non-summary items, with the object store */ public static final int UPDATE_SHARE_OBJECTS = 0x00400000; /** Return status of lock */ public static final int NOTE_LOCK_STATUS = 0x00000008; /** Take out a hard note lock */ public static final int NOTE_LOCK_HARD = 0x00000010; /** Take out a provisional hard note lock */ public static final int NOTE_LOCK_PROVISIONAL = 0x00000020; /* Flags returned (beginning in V3) in the _NOTE_FLAGS */ /** TRUE if document cannot be updated */ public static final short NOTE_FLAG_READONLY = 0x0001; /** missing some data */ public static final short NOTE_FLAG_ABSTRACTED = 0x0002; /** Incremental note (place holders) */ public static final short NOTE_FLAG_INCREMENTAL = 0x0004; /** Note contains linked items or linked objects */ public static final short NOTE_FLAG_LINKED = 0x0020; /** Incremental type note Fully opened (NO place holders) This type of note is meant to retain the Item sequence numbers */ public static final short NOTE_FLAG_INCREMENTAL_FULL = 0x0040; /** Note is (opened) in canonical form */ public static final short NOTE_FLAG_CANONICAL = 0x4000; /* Note structure member IDs for NSFNoteGet and SetInfo. */ /** IDs for NSFNoteGet and SetInfo */ public static short _NOTE_DB = 0; /** (When adding new values, see the table in NTINFO.C */ public static short _NOTE_ID = 1; /** Get/set the Originator ID (OID). */ public static short _NOTE_OID = 2; /** Get/set the NOTE_CLASS (WORD). */ public static short _NOTE_CLASS = 3; /** Get/set the Modified in this file time/date (TIMEDATE : GMT normalized). */ public static short _NOTE_MODIFIED = 4; /** For pre-V3 compatibility. Should use $Readers item */ public static short _NOTE_PRIVILEGES = 5; /** Get/set the note flags (WORD). See NOTE_FLAG_xxx. */ public static short _NOTE_FLAGS = 7; /** Get/set the Accessed in this file date (TIMEDATE). */ public static short _NOTE_ACCESSED = 8; /** For response hierarchy */ public static short _NOTE_PARENT_NOTEID = 10; /** For response hierarchy */ public static short _NOTE_RESPONSE_COUNT = 11; /** For response hierarchy */ public static short _NOTE_RESPONSES = 12; /** For AddedToFile time */ public static short _NOTE_ADDED_TO_FILE = 13; /** DBHANDLE of object store used by linked items */ public static short _NOTE_OBJSTORE_DB = 14; /** display only views and folder; version filtering */ public static final String DFLAGPAT_VIEWS_AND_FOLDERS = "-G40n^"; /** display only views and folder; all notes &amp; web */ public static final String DFLAGPAT_VIEWS_AND_FOLDERS_DESIGN = "-G40^"; /** display things that are runnable; version filtering */ public static final String DFLAGPAT_TOOLSRUNMACRO = "-QXMBESIst5nmz{"; /** At least one of the "definition" * view items ($FORMULA, $COLLATION, * or $FORMULACLASS) has been modified * by another user since last ReadEntries. * Upon receipt, you may wish to * re-read the view note if up-to-date * copies of these items are needed. * Upon receipt, you may also wish to * re-synchronize your index position * and re-read the rebuilt index.<br> * <br> * Signal returned only ONCE per detection */ public static final int SIGNAL_DEFN_ITEM_MODIFIED = 0x0001; /** At least one of the non-"definition" * view items ($TITLE,etc) has been * modified since last ReadEntries. * Upon receipt, you may wish to * re-read the view note if up-to-date * copies of these items are needed.<br> * <br> * Signal returned only ONCE per detection */ public static final int SIGNAL_VIEW_ITEM_MODIFIED = 0x0002; /** Collection index has been modified * by another user since last ReadEntries. * Upon receipt, you may wish to * re-synchronize your index position * and re-read the modified index.<br> * <br> * Signal returned only ONCE per detection */ public static final int SIGNAL_INDEX_MODIFIED = 0x0004; /** Unread list has been modified * by another window using the same * hCollection context * Upon receipt, you may wish to * repaint the window if the window * contains the state of unread flags * (This signal is never generated * by NIF - only unread list users) */ public static final int SIGNAL_UNREADLIST_MODIFIED = 0x0008; /** Collection is not up to date */ public static final int SIGNAL_DATABASE_MODIFIED = 0x0010; /** End of collection has not been reached * due to buffer being too full. * The ReadEntries should be repeated * to continue reading the desired entries. */ public static final int SIGNAL_MORE_TO_DO = 0x0020; /** The view contains a time-relative formula * (e.g., @Now). Use this flag to tell if the * collection will EVER be up-to-date since * time-relative views, by definition, are NEVER * up-to-date. */ public static final int SIGNAL_VIEW_TIME_RELATIVE = 0x0040; /** Returned if signal flags are not supported * This is used by NIFFindByKeyExtended when it * is talking to a pre-V4 server that does not * support signal flags for FindByKey */ public static final int SIGNAL_NOT_SUPPORTED = 0x0080; /** The view contains documents with readers fields */ public static final int SIGNAL_VIEW_HASPRIVS = 0x0100; /** Mask that defines all "sharing conflicts", which are cases when the database or collection has changed out from under the user. */ public static final int SIGNAL_ANY_CONFLICT = (SIGNAL_DEFN_ITEM_MODIFIED | SIGNAL_VIEW_ITEM_MODIFIED | SIGNAL_INDEX_MODIFIED | SIGNAL_UNREADLIST_MODIFIED | SIGNAL_DATABASE_MODIFIED); /** Mask that defines all "sharing conflicts" except for SIGNAL_DATABASE_MODIFIED. This can be used in combination with SIGNAL_VIEW_TIME_RELATIVE to tell if the database or collection has truly changed out from under the user or if the view is a time-relative view which will NEVER be up-to-date. SIGNAL_DATABASE_MODIFIED is always returned for a time-relative view to indicate that it is never up-to-date. */ public static final int SIGNAL_ANY_NONDATA_CONFLICT = (SIGNAL_DEFN_ITEM_MODIFIED | SIGNAL_VIEW_ITEM_MODIFIED | SIGNAL_INDEX_MODIFIED | SIGNAL_UNREADLIST_MODIFIED); public static final short OS_TRANSLATE_NATIVE_TO_LMBCS = 0; /* Translate platform-specific to LMBCS */ public static final short OS_TRANSLATE_LMBCS_TO_NATIVE = 1; /* Translate LMBCS to platform-specific */ public static final short OS_TRANSLATE_LOWER_TO_UPPER = 3; /* current int'l case table */ public static final short OS_TRANSLATE_UPPER_TO_LOWER = 4; /* current int'l case table */ public static final short OS_TRANSLATE_UNACCENT = 5; /* int'l unaccenting table */ public static final short OS_TRANSLATE_LMBCS_TO_UNICODE = 20; public static final short OS_TRANSLATE_LMBCS_TO_UTF8 = 22; public static final short OS_TRANSLATE_UNICODE_TO_LMBCS = 23; public static final short OS_TRANSLATE_UTF8_TO_LMBCS = 24; public static final int MAXPATH = 256; public static final short MAXUSERNAME = 256; /* Maximum user name */ public static short IDTABLE_MODIFIED = 0x0001; /* modified - set by Insert/Delete */ /* and can be cleared by caller if desired */ public static short IDTABLE_INVERTED = 0x0002; /* sense of list inverted */ /* (reserved for use by caller only) */ public static long NOTEID_RESERVED = 0x80000000L; /* Reserved Note ID, used for categories in NIFReadEntries and for deleted notes in a lot of interfaces. */ public static long NOTEID_GHOST_ENTRY = 0x40000000L; /* Bit 30 -> partial thread ghost collection entry */ public static long NOTEID_CATEGORY = 0x80000000L; /* Bit 31 -> (ghost) "category entry" */ public static long NOTEID_CATEGORY_TOTAL = 0xC0000000L; /* Bit 31+30 -> (ghost) "grand total entry" */ public static long NOTEID_CATEGORY_INDENT = 0x3F000000L; /* Bits 24-29 -> category indent level within this column */ public static long NOTEID_CATEGORY_ID = 0x00FFFFFFL; /* Low 24 bits are unique category # */ public static long RRV_DELETED = NOTEID_RESERVED; /* indicates a deleted note (DBTABLE.C) */ /** Cascade can go only one level deep parent\sub */ public static int DESIGN_LEVELS = 2; /** Maximum size of a level */ public static int DESIGN_LEVEL_MAX = 64; /** Guaranteed to be the greatest of Form, View or Macro * length. NOTE: We need * space for LEVELS-1 cascade * characters and a NULL term. * The +1 takes care of that. */ public static int DESIGN_NAME_MAX = ((DESIGN_LEVEL_MAX+1)*DESIGN_LEVELS); /** Forms can cascade a level */ public static int DESIGN_FORM_MAX = DESIGN_NAME_MAX; /** Views can cascade a level */ public static int DESIGN_VIEW_MAX = DESIGN_NAME_MAX; /** Macros can cascade a level */ public static int DESIGN_MACRO_MAX = DESIGN_NAME_MAX; /** Fields cannot cascade */ public static int DESIGN_FIELD_MAX = DESIGN_LEVEL_MAX+1; /** Design element comment max size. */ public static int DESIGN_COMMENT_MAX = 256; /** All names, including sysnonyms */ public static int DESIGN_ALL_NAMES_MAX = 256; /** Same as for views */ public static int DESIGN_FOLDER_MAX = DESIGN_VIEW_MAX; /** Same as for views */ public static int DESIGN_FOLDER_MAX_NAME = DESIGN_LEVEL_MAX; public static int DESIGN_FLAGS_MAX = 32; public static String DESIGN_FLAGS = "$Flags"; public static String DESIGN_FLAG_FOLDER_VIEW = "F"; /* VIEW: This is a V4 folder view. */ //saves the info in the idtable header in the dest public static final byte IDREPLACE_SAVEDEST = 0x01; public short ECL_TYPE_LOTUS_SCRIPT = 0; public short ECL_TYPE_JAVA_APPLET = 1; public short ECL_TYPE_JAVASCRIPT = 2; /** Access files (read/write/export/import)*/ public short ECL_FLAG_FILES = 0x0008; /** Access current db's docs/db */ public short ECL_FLAG_DOCS_DBS = 0x0010; /** Access environ vars (get/set) */ public short ECL_FLAG_ENVIRON = 0x0080; /** Access non-notes dbs (@DB with non "","Notes" first arg) */ public short ECL_FLAG_EXTERN_DBS = 0x0100; /** Access "code" in external systems (LS, DLLS, DDE) */ public short ECL_FLAG_EXTERN_CODE = 0x0200; /** Access external programs (OLE/SendMsg/Launch) */ public short ECL_FLAG_EXTERN_PROGRAMS = 0x0400; /** Send mail (@MailSend) */ public short ECL_FLAG_SEND_MAIL = 0x0800; /** Access ECL */ public short ECL_FLAG_ECL = 0x1000; /** Read access to other databases */ public short ECL_FLAG_READ_OTHER_DBS = 0x2000; /** Write access to other databases */ public short ECL_FLAG_WRITE_OTHER_DBS = 0x4000; /** Ability to export data (copy/print, etc) */ public short ECL_FLAG_EXPORT_DATA = (short) (0x8000 & 0xffff); /* extended acl flags */ /** Access network programatically */ public short ECL_FLAG_NETWORK = 0x0001; /** Property Broker Get */ public short ECL_FLAG_PROPERTY_GET = 0x0002; /** Property Broker Put */ public short ECL_FLAG_PROPERTY_PUT = 0x0004; /** Widget configuration */ public short ECL_FLAG_WIDGETS = 0x0008; /** access to load Java */ public short ECL_FLAG_LOADJAVA = 0x0010; public short TIMEDATE_MINIMUM = 0; public short TIMEDATE_MAXIMUM = 1; public short TIMEDATE_WILDCARD = 2; /* Define flags for NSFFolderGetIDTable */ public int DB_GETIDTABLE_VALIDATE = 0x00000001; /* If set, return only "validated" noteIDs */ public int SIGN_NOTES_IF_MIME_PRESENT = 0x00000001; /* Possible validation phases for NSFNoteComputeWithForm() */ public short CWF_DV_FORMULA = 1; public short CWF_IT_FORMULA = 2; public short CWF_IV_FORMULA = 3; public short CWF_COMPUTED_FORMULA = 4; public short CWF_DATATYPE_CONVERSION = 5; public short CWF_COMPUTED_FORMULA_LOAD = CWF_COMPUTED_FORMULA; public short CWF_COMPUTED_FORMULA_SAVE = 6; /* Definitions for NSFDbGetMultNoteInfo and NSFDbGetMultNoteInfoByUNID */ /** Return NoteID */ public static short fINFO_NOTEID = 0x0001; /** Return SequenceTime from OID */ public static short fINFO_SEQTIME = 0x0002; /** Return Sequence number from OID */ public static short fINFO_SEQNUM = 0x0004; /** Return OID (disables SeqTime &amp; number &amp; UNID) */ public static short fINFO_OID = 0x0008; /** Compress non-existent UNIDs */ public static short fINFO_COMPRESS = 0x0040; /** Return UNID */ public static short fINFO_UNID = 0x0080; /** Allow the returned buffer to exceed 64k. */ public static short fINFO_ALLOW_HUGE = 0x0400; /* Define NSF DB Classes - These all begin with 0xf000 for no good reason other than to ENSURE that callers of NSFDbCreate call the routine with valid parameters, since in earlier versions of NSF the argument to the call was typically 0. */ /* The type of the database is determined by the filename extension. * The extensions and their database classes are .NSX (NSFTESTFILE), * .NSF (NOTEFILE), .DSK (DESKTOP), .NCF (NOTECLIPBOARD), .NTF (TEMPLATEFILE), * .NSG (GIANTNOTEFILE), .NSH (HUGENOTEFILE), NTD (ONEDOCFILE), * NS2 (V2NOTEFILE), NTM (ENCAPSMAILFILE). */ public short DBCLASS_BY_EXTENSION = 0; /** A test database. */ public short DBCLASS_NSFTESTFILE = (short) (0xff00 & 0xffff); /** A standard Domino database. */ public short DBCLASS_NOTEFILE = (short) (0xff01 & 0xffff); /** A Notes desktop (folders, icons, etc.). */ public short DBCLASS_DESKTOP = (short) (0xff02 & 0xffff); /** A Notes clipboard (used for cutting and pasting). */ public short DBCLASS_NOTECLIPBOARD = (short) (0xff03 & 0xffff); /** A database that contains every type of note (forms, views, ACL, icon, etc.) except data notes. */ public short DBCLASS_TEMPLATEFILE = (short) (0xff04 & 0xffff); /** A standard Domino database, with size up to 1 GB. This was used * in Notes Release 3 when the size of a previous version of a database had been limited to 200 MB. */ public short DBCLASS_GIANTNOTEFILE = (short) (0xff05 & 0xffff); /** A standard Domino database, with size up to 1 GB. This was used in Notes Release * 3 when the size of a previous version of a database had been limited to 300 MB. */ public short DBCLASS_HUGENOTEFILE = (short) (0xff06 & 0xffff); /** One document database with size up to 10MB. Specifically used by alternate * mail to create an encapsulated database. Components of the document are * further limited in size. It is not recommended that you use this database * class with NSFDbCreate. If you do, and you get an error when saving the document, * you will need to re-create the database using DBCLASS_NOTEFILE. */ public short DBCLASS_ONEDOCFILE = (short) (0xff07 & 0xffff); /** Database was created as a Notes Release 2 database. */ public short DBCLASS_V2NOTEFILE = (short) (0xff08 & 0xffff); /** One document database with size up to 5MB. Specifically used by alternate mail * to create an encapsulated database. Components of the document are further * limited in size. It is not recommended that you use this database class with * NSFDbCreate. If you do, and you get an error when saving the document, you will * need to re-create the database using DBCLASS_NOTEFILE. */ public short DBCLASS_ENCAPSMAILFILE = (short) (0xff09 & 0xffff); /** Specifically used by alternate mail. Not recomended for use with NSFDbCreate. */ public short DBCLASS_LRGENCAPSMAILFILE = (short) (0xff0a & 0xffff); /** Database was created as a Notes Release 3 database. */ public short DBCLASS_V3NOTEFILE = (short) (0xff0b & 0xffff); /** Object store. */ public short DBCLASS_OBJSTORE = (short) (0xff0c & 0xffff); /** One document database with size up to 10MB. Specifically used by Notes Release 3 * alternate mail to create an encapsulated database. Not recomended for use * with NSFDbCreate. */ public short DBCLASS_V3ONEDOCFILE = (short) (0xff0d & 0xffff); /** Database was created specifically for Domino and Notes Release 4. */ public short DBCLASS_V4NOTEFILE = (short) (0xff0e & 0xffff); /** Database was created specifically for Domino and Notes Release 5. */ public short DBCLASS_V5NOTEFILE = (short) (0xff0f & 0xffff); /** Database was created specifically for Domino and Notes Release Notes/Domino 6. */ public short DBCLASS_V6NOTEFILE = (short) (0xff10 & 0xffff); /** Database was created specifically for Domino and Notes Release Notes/Domino 8. */ public short DBCLASS_V8NOTEFILE = (short) (0xff11 & 0xffff); /** Database was created specifically for Domino and Notes Release Notes/Domino 8.5. */ public short DBCLASS_V85NOTEFILE = (short) (0xff12 & 0xffff); /** Database was created specifically for Domino and Notes Release Notes/Domino 9. */ public short DBCLASS_V9NOTEFILE = (short) (0xff13 & 0xffff); public short DBCLASS_MASK = (0x00ff & 0xffff); public short DBCLASS_VALID_MASK = (short) (0xff00 & 0xffff); /* Option flags for NSFDbCreateExtended */ /** Create a locally encrypted database. */ public short DBCREATE_LOCALSECURITY = 0x0001; /** NSFNoteUpdate will not use an object store for notes in the database. */ public short DBCREATE_OBJSTORE_NEVER = 0x0002; /** The maximum database length is specified in bytes in NSFDbCreateExtended. */ public short DBCREATE_MAX_SPECIFIED = 0x0004; /** Don't support note hierarchy - ODS21 and up only */ public short DBCREATE_NORESPONSE_INFO = 0x0010; /** Don't maintain unread lists for this DB */ public short DBCREATE_NOUNREAD = 0x0020; /** Skip overwriting freed disk buffer space */ public short DBCREATE_NO_FREE_OVERWRITE = 0x0200; /** Maintain form/bucket bitmap */ public short DBCREATE_FORM_BUCKET_OPT = 0x0400; /** Disable transaction logging for this database if specified */ public short DBCREATE_DISABLE_TXN_LOGGING = 0x0800; /** Enable maintaining last accessed time */ public short DBCREATE_MAINTAIN_LAST_ACCESSED = 0x1000; /** TRUE if database is a mail[n].box database */ public short DBCREATE_IS_MAILBOX = 0x4000; /** TRUE if database should allow "large" (&lt;64K bytes) UNK table */ public short DBCREATE_LARGE_UNKTABLE = (short) (0x8000 & 0xffff); /* Values for EncryptStrength of NSFDbCreateExtended */ public byte DBCREATE_ENCRYPT_NONE = 0x00; public byte DBCREATE_ENCRYPT_SIMPLE = 0x01; public byte DBCREATE_ENCRYPT_MEDIUM = 0x02; public byte DBCREATE_ENCRYPT_STRONG = 0x03; /* Data Type Definitions. */ /* Class definitions. Classes are defined to be the "generic" classes of data type that the internal formula computation mechanism recognizes when doing recalcs. */ public static final int CLASS_NOCOMPUTE = (int)(0 << 8); public static final int CLASS_ERROR = (int)(1 << 8); public static final int CLASS_UNAVAILABLE = (int)(2 << 8); public static final int CLASS_NUMBER = (int)(3 << 8); public static final int CLASS_TIME = (int)(4 << 8); public static final int CLASS_TEXT = (int)(5 << 8); public static final int CLASS_FORMULA = (int)(6 << 8); public static final int CLASS_USERID = (int)(7 << 8); public static final int CLASS_MASK = (int)0xff00; /* Item Flags */ // These flags define the characteristics of an item (field) in a note. The flags may be bitwise or'ed together for combined functionality. /** This item is signed. */ public static final short ITEM_SIGN = 0x0001; /** This item is sealed. When used in NSFItemAppend, the item is encryption * enabled; it can later be encrypted if edited from the Notes UI and saved * in a form that specifies Encryption. */ public static final short ITEM_SEAL = 0x0002; /** This item is stored in the note's summary buffer. Summary items may be used * in view columns, selection formulas, and @-functions. Summary items may be * accessed via the SEARCH_MATCH structure provided by NSFSearch or in the * buffer returned by NIFReadEntries. API program may read, modify, and write * items in the summary buffer without opening the note first. The maximum size * of the summary buffer is 32K. Items of TYPE_COMPOSITE may not have the * ITEM_SUMMARY flag set. */ public static final short ITEM_SUMMARY = 0x0004; /** This item is an Author Names field as indicated by the READ/WRITE-ACCESS * flag. Item is TYPE_TEXT or TYPE_TEXT_LIST. Author Names fields have the * ITEM_READWRITERS flag or'd with the ITEM_NAMES flag. */ public static final short ITEM_READWRITERS = 0x0020; /** This item is a Names field. Indicated by the NAMES (distinguished names) * flag. Item is TYPE_TEXT or TYPE_TEXT_LIST. */ public static final short ITEM_NAMES = 0x0040; /** * Item will not be written to disk */ public static final short ITEM_NOUPDATE = 0x0080; /** This item is a placeholder field in a form note. Item is TYPE_INVALID_OR_UNKNOWN. */ public static final short ITEM_PLACEHOLDER = 0x0100; /** A user requires editor access to change this field. */ public static final short ITEM_PROTECTED = 0x0200; /** This is a Reader Names field. Indicated by the READER-ACCESS flag. Item is * TYPE_TEXT or TYPE_TEXT_LIST. */ public static final short ITEM_READERS = 0x0400; /** Item is same as on-disk. */ public static final short ITEM_UNCHANGED = 0x1000; public static final int ALLDAY = 0xffffffff; public static final int ANYDAY = 0xffffffff; public static final int DT_SHOWDATE = (int)0x0008; public static final int DT_SHOWABBREV = (int)0x0800; public static final int DT_STYLE_MDY = (int)2; public static final int DT_USE_TFMT = (int)0x0001; public static final int SECS_IN_WEEK = (int)604800; public static final int DT_STYLE_MSK = (int)0x000f0000; public static final int DT_STYLE_YMD = (int)1; public static final int DT_STYLE_DMY = (int)3; public static final int DT_SHOWTIME = (int)0x0004; public static final int TICKS_IN_MINUTE = (int)6000; public static final int DT_4DIGITYEAR = (int)0x0001; public static final int DT_24HOUR = (int)0x0040; public static final int SECS_IN_MONTH = (int)2592000; public static final int DT_ALPHAMONTH = (int)0x0002; public static final int TICKS_IN_DAY = (int)8640000; public static final int SECS_IN_DAY = (int)86400; public static final int TICKS_IN_SECOND = (int)100; public static final int TICKS_IN_HOUR = (int)360000; public static final int DT_VALID = (int)0x8000; /* Define NSF Special Note ID Indices. The first 16 of these are reserved for "default notes" in each of the 16 note classes. In order to access these, use SPECIAL_ID_NOTE+NOTE_CLASS_XXX. This is generally used when calling NSFDbGetSpecialNoteID. NOTE: NSFNoteOpen, NSFDbReadObject and NSFDbWriteObject support reading special notes or objects directly (without calling NSFDbGetSpecialNoteID). They use a DIFFERENT flag with a similar name: NOTE_ID_SPECIAL (see nsfnote.h). Remember this rule: SPECIAL_ID_NOTE is a 16 bit mask and is used as a NoteClass argument. NOTE_ID_SPECIAL is a 32 bit mask and is used as a NoteID or RRV argument. */ public short SPECIAL_ID_NOTE = (short) (0x8000 & 0xffff); /* use in combination w/NOTE_CLASS when calling NSFDbGetSpecialNoteID */ /** No filter specified (hFilter ignored). */ public int SEARCH_FILTER_NONE = 0x00000000; /** hFilter is a Note ID table. */ public int SEARCH_FILTER_NOTEID_TABLE = 0x00000001; /** hFilter is a View note handle */ public int SEARCH_FILTER_FOLDER = 0x00000002; /** Filter on particular Properties. */ public int SEARCH_FILTER_DBDIR_PROPERTY = 0x00000004; /** Filter on Database Options (bits set). */ public int SEARCH_FILTER_DBOPTIONS = 0x00000010; /** Filter on Database Options (bits clear). */ public int SEARCH_FILTER_DBOPTIONS_CLEAR = 0x00000020; /** Filter based on a set of form names */ public int SEARCH_FILTER_FORMSKIMMED = 0x00000040; /** Don't try to filter on form names, we know it won't work */ public int SEARCH_FILTER_NOFORMSKIMMED = 0x00000080; /** Filter on Query View SQL */ public int SEARCH_FILTER_QUERY_VIEW = 0x00000100; /** Filter on item revision times */ public int SEARCH_FILTER_ITEM_TIME = 0x00000200; /** Filter on time range input */ public int SEARCH_FILTER_RANGE = 0x00000400; /** Filter out .ndx files */ public int SEARCH_FILTER_NO_NDX = 0x00000800; /** Search for databases with inline indexing */ public int SEARCH_FILTER_INLINE_INDEX = 0x00001000; /** * Include deleted and non-matching notes in search (ALWAYS "ON" in partial * searches!) */ public int SEARCH_ALL_VERSIONS = 0x0001; /** obsolete synonym */ public int SEARCH_INCLUDE_DELETED = SEARCH_ALL_VERSIONS; /** TRUE to return summary buffer with each match */ public int SEARCH_SUMMARY = 0x0002; /** * For directory mode file type filtering. If set, "NoteClassMask" is * treated as a FILE_xxx mask for directory filtering */ public int SEARCH_FILETYPE = 0x0004; /** special caching for dir scan */ public int SEARCH_SERVERCACHE = 0x0008; /** Set NOTE_CLASS_NOTIFYDELETION bit of NoteClass for deleted notes */ public int SEARCH_NOTIFYDELETIONS = 0x0010; /** do not put item names into summary info */ public int SEARCH_NOITEMNAMES = 0x0020; /** return error if we don't have full privileges */ public int SEARCH_ALLPRIVS = 0x0040; /** for dir scans, only return files needing fixup */ public int SEARCH_FILEFIXUP = 0x0080; /** Formula buffer is hashed UNID table */ public int SEARCH_UNID_TABLE = 0x0100; /** Return buffer in canonical form */ public int SEARCH_CANONICAL = 0x0200; /** Use current session's user name, not server's */ public int SEARCH_SESSION_USERNAME = 0x0400; /** Allow search to return id's only, i.e. no summary buffer */ public int SEARCH_NOPRIVCHECK = 0x0800; /** Filter out "Truncated" documents */ public int SEARCH_NOABSTRACTS = 0x1000; /** Perform unread flag sync */ public int SEARCH_SYNC = 0x2000; /** Search formula applies only to data notes, i.e., others match */ public int SEARCH_DATAONLY_FORMULA = 0x4000; /** INCLUDE notes with non-replicatable OID flag */ public int SEARCH_NONREPLICATABLE = 0x8000; /** * SEARCH_MATCH is V4 style. That is MatchesFormula is now a bit field where * the lower bit indicates whether the document matches. If it does, the * other bits provide additional information regarding the note. */ public int SEARCH_V4INFO = 0x00010000; /** Search includes all children of matching documents. */ public int SEARCH_ALLCHILDREN = 0x00020000; /** Search includes all descendants of matching documents. */ public int SEARCH_ALLDESCENDANTS = 0x00040000; /** First pass in a multipass hierarchical search. */ public int SEARCH_FIRSTPASS = 0x00080000; /** Descendants were added on this pass. */ public int SEARCH_DESCENDANTSADDED = 0x00100000; /** Formula is an Array of Formulas. */ public int SEARCH_MULTI_FORMULA = 0x00200000; /** Return purged note ids as deleted notes. */ public int SEARCH_INCLUDE_PURGED = 0x00400000; /** Only return templates without the "advanced" bit set */ public int SEARCH_NO_ADV_TEMPLATES = 0x00800000; /** Only Private Views or Agents */ public int SEARCH_PRIVATE_ONLY = 0x01000000; /** * Full search (as if Since was "1") but exclude DATA notes prior to * passed-in Since time */ public int SEARCH_FULL_DATACUTOFF = 0x02000000; /** * If specified, the progress field in the SEARCH_ENTRY structure will be * filled in. This avoids performing the calculation if it was not wanted. */ public int SEARCH_CALC_PROGRESS = 0x04000000; public int SEARCH_NAMED_GHOSTS = 0x08000000; /** Perform optimized unread sync */ public int SEARCH_SYNC_OPTIMIZED = 0x10000000; /** * Return only docs with protection fields (BS_PROTECTED set in note header) */ public int SEARCH_ONLYPROTECTED = 0x20000000; /** Return soft deleted documents */ public int SEARCH_SOFTDELETIONS = 0x40000000; /** for setting/verifying that bits 28-31 of search 1 flags are 1000 */ public int SEARCH1_SIGNATURE = 0x80000000; public int SEARCH1_SELECT_NAMED_GHOSTS = (0x00000001 | SEARCH1_SIGNATURE); /** * Include profile documents (a specific type of named ghost note) in the * search Note: set SEARCH1_SELECT_NAMED_GHOSTS, too, if you want the * selection formula to be applied to the profile docs (so as not to get * them all back as matches). */ public int SEARCH1_PROFILE_DOCS = (0X00000002 | SEARCH1_SIGNATURE); /** * Skim off notes whose summary buffer can't be generated because its size * is too big. */ public int SEARCH1_SKIM_SUMMARY_BUFFER_TOO_BIG = (0x00000004 | SEARCH1_SIGNATURE); public int SEARCH1_RETURN_THREAD_UNID_ARRAY = (0x00000008 | SEARCH1_SIGNATURE); public int SEARCH1_RETURN_TUA = SEARCH1_RETURN_THREAD_UNID_ARRAY; /** * flag for reporting noaccess in case of reader's field at the doc level */ public int SEARCH1_REPORT_NOACCESS = (0x000000010 | SEARCH1_SIGNATURE); /** Search "Truncated" documents */ public int SEARCH1_ONLY_ABSTRACTS = (0x000000020 | SEARCH1_SIGNATURE); /** * Search documents fixup purged. This distinct and mutually exlusive from * SEARCH_INCLUDE_PURGED which is used for view processing by NIF etc to * remove purged notes from views. This is used for replication restoring * corrupt documents. */ public int SEARCH1_FIXUP_PURGED = (0x000000040 | SEARCH1_SIGNATURE); public int CWF_CONTINUE_ON_ERROR = 0x0001; /* Ignore compute errors */ /* EncryptFlags used in NSFNoteCopyAndEncrypt */ public short ENCRYPT_WITH_USER_PUBLIC_KEY = 0x0001; public short ENCRYPT_SMIME_IF_MIME_PRESENT = 0x0002; public short ENCRYPT_SMIME_NO_SENDER = 0x0004; public short ENCRYPT_SMIME_TRUST_ALL_CERTS = 0x0008; /* DecryptFlags used in NSFNoteDecrypt */ public short DECRYPT_ATTACHMENTS_IN_PLACE = 0x0001; /* Use this flag to tell the run context that when it runs an agent, you want it to check the privileges of the signer of that agent and apply them. For example, if the signer of the agent has "restricted" agent privileges, then the agent will be restricted. If you don't set this flag, all agents run as unrestricted. List of security checks enabled by this flag: Restricted/unrestricted agent Can create databases Is agent targeted to this machine Is user allowed to access this machine Can user run personal agents */ public static final int AGENT_SECURITY_OFF = 0x00; /* CreateRunContext */ public static final int AGENT_SECURITY_ON = 0x01; /* CreateRunContext */ public static final int AGENT_REOPEN_DB = 0x10; /* AgentRun */ /* Definitions for stdout redirection types. This specifies where output from the LotusScript "print" statement will go */ public static short AGENT_REDIR_NONE = 0; /* goes to the bit bucket */ public static short AGENT_REDIR_LOG = 1; /* goes to the Notes log (default) */ public static short AGENT_REDIR_MEMORY = 2; /* goes to a memory buffer, cleared each AgentRun */ public static short AGENT_REDIR_MEMAPPEND = 3; /* goes to buffer, append mode for each agent */ /** does not match formula (deleted or updated) */ public static byte SE_FNOMATCH = 0x00; /** matches formula */ public static byte SE_FMATCH = 0x01; /** document truncated */ public static byte SE_FTRUNCATED = 0x02; /** note has been purged. Returned only when SEARCH_INCLUDE_PURGED is used */ public static byte SE_FPURGED = 0x04; /** note has no purge status. Returned only when SEARCH_FULL_DATACUTOFF is used */ public static byte SE_FNOPURGE = 0x08; /** if SEARCH_NOTIFYDELETIONS: note is soft deleted; NoteClass &amp; NOTE_CLASS_NOTIFYDELETION also on (off for hard delete) */ public static byte SE_FSOFTDELETED = 0x10; /** if there is reader's field at doc level this is the return value so that we could mark the replication as incomplete*/ public static byte SE_FNOACCESS = 0x20; /** note has truncated attachments. Returned only when SEARCH1_ONLY_ABSTRACTS is used */ public static byte SE_FTRUNCATT = 0x40; /* File type flags (used with NSFSearch directory searching). */ /** Any file type */ public static int FILE_ANY = 0; /** Starting in V3, any DB that is a candidate for replication */ public static int FILE_DBREPL = 1; /** Databases that can be templates */ public static int FILE_DBDESIGN = 2; /** BOX - Any .BOX (Mail.BOX, SMTP.Box...) */ public static int FILE_MAILBOX = 3; /** NS?, any NSF version */ public static int FILE_DBANY = 4; /** NT?, any NTF version */ public static int FILE_FTANY = 5; /** MDM - modem command file */ public static int FILE_MDMTYPE = 6; /** directories only */ public static int FILE_DIRSONLY = 7; /** VPC - virtual port command file */ public static int FILE_VPCTYPE = 8; /** SCR - comm port script files */ public static int FILE_SCRTYPE = 9; /** ANY Notes database (.NS?, .NT?, .BOX) */ public static int FILE_ANYNOTEFILE = 10; /** DTF - Any .DTF. Used for container and sort temp files to give them a more unique name than .TMP so we can delete *.DTF from the temp directory and hopefully not blow away other application's temp files. */ public static int FILE_UNIQUETEMP = 11; /** CLN - Any .cln file...multi user cleanup files*/ public static int FILE_MULTICLN = 12; /** any smarticon file *.smi */ public static int FILE_SMARTI = 13; /** File type mask (for FILE_xxx codes above) */ public static int FILE_TYPEMASK = 0x00ff; /** List subdirectories as well as normal files */ public static int FILE_DIRS = 0x8000; /** Do NOT return ..'s */ public static int FILE_NOUPDIRS = 0x4000; /** Recurse into subdirectories */ public static int FILE_RECURSE = 0x2000; /** All directories, linked files &amp; directories */ public static int FILE_LINKSONLY = 0x1000; /* Define memory allocator hints, which re-use the top 2 bits of the BLK_ codes so that we didn't have to add a new argument to OSMemAlloc() */ /** Object may be used by multiple processes */ public short MEM_SHARE = (short) (0x8000 & 0xffff); /** Object may be OSMemRealloc'ed LARGER */ public short MEM_GROWABLE = 0x4000; public static final int _TIMEDATE = 10; public static final int _TIMEDATE_PAIR = 11; public static final int _ALIGNED_NUMBER_PAIR = 12; public static final int _LIST = 13; public static final int _RANGE = 14; public static final int _DBID = 15; public static final int _ITEM = 17; public static final int _ITEM_TABLE = 18; public static final int _SEARCH_MATCH = 24; public static final int _ORIGINATORID = 26; public static final int _OID = _ORIGINATORID; public static final int _OBJECT_DESCRIPTOR = 27; public static final int _UNIVERSALNOTEID = 28; public static final int _UNID = _UNIVERSALNOTEID; public static final int _VIEW_TABLE_FORMAT = 29; public static final int _VIEW_COLUMN_FORMAT = 30; public static final int _NOTELINK = 33; public static final int _LICENSEID = 34; public static final int _VIEW_FORMAT_HEADER = 42; public static final int _VIEW_TABLE_FORMAT2 = 43; public static final int _DBREPLICAINFO = 56; public static final int _FILEOBJECT = 58; public static final int _COLLATION = 59; public static final int _COLLATE_DESCRIPTOR = 60; public static final int _CDKEYWORD = 68; public static final int _CDLINK2 = 72; public static final int _CDLINKEXPORT2 = 97; public static final int _CDPARAGRAPH = 109; public static final int _CDPABDEFINITION = 110; public static final int _CDPABREFERENCE = 111; public static final int _CDFIELD_PRE_36 = 112; public static final int _CDTEXT = 113; public static final int _CDDOCUMENT = 114; public static final int _CDMETAFILE = 115; public static final int _CDBITMAP = 116; public static final int _CDHEADER = 117; public static final int _CDFIELD = 118; public static final int _CDFONTTABLE = 119; public static final int _CDFACE = 120; public static final int _CDCGM = 156; public static final int _CDTIFF = 159; public static final int _CDBITMAPHEADER = 162; public static final int _CDBITMAPSEGMENT = 163; public static final int _CDCOLORTABLE = 164; public static final int _CDPATTERNTABLE = 165; public static final int _CDGRAPHIC = 166; public static final int _CDPMMETAHEADER = 167; public static final int _CDWINMETAHEADER = 168; public static final int _CDMACMETAHEADER = 169; public static final int _CDCGMMETA = 170; public static final int _CDPMMETASEG = 171; public static final int _CDWINMETASEG = 172; public static final int _CDMACMETASEG = 173; public static final int _CDDDEBEGIN = 174; public static final int _CDDDEEND = 175; public static final int _CDTABLEBEGIN = 176; public static final int _CDTABLECELL = 177; public static final int _CDTABLEEND = 178; public static final int _CDSTYLENAME = 188; public static final int _FILEOBJECT_MACEXT = 192; public static final int _FILEOBJECT_HPFSEXT = 193; public static final int _CDOLEBEGIN = 218; public static final int _CDOLEEND = 219; public static final int _CDHOTSPOTBEGIN = 230; public static final int _CDHOTSPOTEND = 231; public static final int _CDBUTTON = 237; public static final int _CDBAR = 308; public static final int _CDQUERYHEADER = 314; public static final int _CDQUERYTEXTTERM = 315; public static final int _CDACTIONHEADER = 316; public static final int _CDACTIONMODIFYFIELD = 317; public static final int _ODS_ASSISTSTRUCT = 318; public static final int _VIEWMAP_HEADER_RECORD = 319; public static final int _VIEWMAP_RECT_RECORD = 320; public static final int _VIEWMAP_BITMAP_RECORD = 321; public static final int _VIEWMAP_REGION_RECORD = 322; public static final int _VIEWMAP_POLYGON_RECORD_BYTE = 323; public static final int _VIEWMAP_POLYLINE_RECORD_BYTE = 324; public static final int _VIEWMAP_ACTION_RECORD = 325; public static final int _ODS_ASSISTRUNINFO = 326; public static final int _CDACTIONREPLY = 327; public static final int _CDACTIONFORMULA = 332; public static final int _CDACTIONLOTUSSCRIPT = 333; public static final int _CDQUERYBYFIELD = 334; public static final int _CDACTIONSENDMAIL = 335; public static final int _CDACTIONDBCOPY = 336; public static final int _CDACTIONDELETE = 337; public static final int _CDACTIONBYFORM = 338; public static final int _ODS_ASSISTFIELDSTRUCT = 339; public static final int _CDACTION = 340; public static final int _CDACTIONREADMARKS = 341; public static final int _CDEXTFIELD = 342; public static final int _CDLAYOUT = 343; public static final int _CDLAYOUTTEXT = 344; public static final int _CDLAYOUTEND = 345; public static final int _CDLAYOUTFIELD = 346; public static final int _VIEWMAP_DATASET_RECORD = 347; public static final int _CDDOCAUTOLAUNCH = 350; public static final int _CDPABHIDE = 358; public static final int _CDPABFORMULAREF = 359; public static final int _CDACTIONBAR = 360; public static final int _CDACTIONFOLDER = 361; public static final int _CDACTIONNEWSLETTER = 362; public static final int _CDACTIONRUNAGENT = 363; public static final int _CDACTIONSENDDOCUMENT = 364; public static final int _CDQUERYFORMULA = 365; public static final int _CDQUERYBYFORM = 373; public static final int _ODS_ASSISTRUNOBJECTHEADER = 374; public static final int _ODS_ASSISTRUNOBJECTENTRY=375; public static final int _CDOLEOBJ_INFO=379; public static final int _CDLAYOUTGRAPHIC=407; public static final int _CDQUERYBYFOLDER=413; public static final int _CDQUERYUSESFORM=423; public static final int _VIEW_COLUMN_FORMAT2=428; public static final int _VIEWMAP_TEXT_RECORD=464; public static final int _CDLAYOUTBUTTON=466; public static final int _CDQUERYTOPIC=471; public static final int _CDLSOBJECT=482; public static final int _CDHTMLHEADER=492; public static final int _CDHTMLSEGMENT=493; public static final int _SCHED_LIST=502; public static final int _SCHED_LIST_OBJ = _SCHED_LIST; public static final int _SCHED_ENTRY=503; public static final int _SCHEDULE=504; public static final int _CDTEXTEFFECT=508; public static final int _VIEW_CALENDAR_FORMAT=513; public static final int _CDSTORAGELINK=515; public static final int _ACTIVEOBJECT=516; public static final int _ACTIVEOBJECTPARAM=517; public static final int _ACTIVEOBJECTSTORAGELINK=518; public static final int _CDTRANSPARENTTABLE=541; /* modified viewmap records, changed CD record from byte to word */ public static final int _VIEWMAP_POLYGON_RECORD=551; public static final int _VIEWMAP_POLYLINE_RECORD=552; public static final int _SCHED_DETAIL_LIST=553; public static final int _CDALTERNATEBEGIN=554; public static final int _CDALTERNATEEND=555; public static final int _CDOLERTMARKER=556; public static final int _HSOLERICHTEXT=557; public static final int _CDANCHOR=559; public static final int _CDHRULE=560; public static final int _CDALTTEXT=561; public static final int _CDACTIONJAVAAGENT=562; public static final int _CDHTMLBEGIN=564; public static final int _CDHTMLEND=565; public static final int _CDHTMLFORMULA=566; public static final int _CDBEGINRECORD=577; public static final int _CDENDRECORD=578; public static final int _CDVERTICALALIGN=579; public static final int _CDFLOAT=580; public static final int _CDTIMERINFO=581; public static final int _CDTABLEROWHEIGHT=582; public static final int _CDTABLELABEL=583; public static final int _CDTRANSITION=610; public static final int _CDPLACEHOLDER=611; public static final int _CDEMBEDDEDVIEW=615; public static final int _CDEMBEDDEDOUTLINE=620; public static final int _CDREGIONBEGIN=621; public static final int _CDREGIONEND=622; public static final int _CDCELLBACKGROUNDDATA=623; public static final int _FRAMESETLENGTH=625; public static final int _CDFRAMESETHEADER=626; public static final int _CDFRAMESET=627; public static final int _CDFRAME=628; public static final int _CDTARGET=629; public static final int _CDRESOURCE=631; public static final int _CDMAPELEMENT=632; public static final int _CDAREAELEMENT=633; public static final int _CDRECT=634; public static final int _CDPOINT=635; public static final int _CDEMBEDDEDCTL=636; public static final int _CDEVENT=637; public static final int _MIME_PART=639; public static final int _CDPRETABLEBEGIN=640; public static final int _CDCOLOR=645; public static final int _CDBORDERINFO=646; public static final int _CDEXT2FIELD=672; public static final int _CDEMBEDDEDSCHEDCTL=674; public static final int _RFC822ITEMDESC=675; public static final int _COLOR_VALUE=690; public static final int _CDBLOBPART=695; public static final int _CDIMAGEHEADER=705; public static final int _CDIMAGESEGMENT=706; public static final int _VIEW_TABLE_FORMAT3=707; public static final int _CDIDNAME=708; public static final int _CDACTIONBAREXT=719; public static final int _CDLINKCOLORS=722; public static final int _CDCAPTION=728; public static final int _CDFIELDHINT=742; public static final int _CDLSOBJECT_R6=744; public static final int _CDINLINE=756; public static final int _CDTEXTPROPERTIESTABLE=765; public static final int _CDSPANRECORD=766; public static final int _CDDECSFIELD=767; public static final int _CDLAYER=808; public static final int _CDPOSITIONING=809; public static final int _CDBOXSIZE=810; public static final int _CDEMBEDDEDEDITCTL=816; public static final int _CDEMBEDDEDSCHEDCTLEXTRA=818; public static final int _LOG_SEARCHR6_REQ=821; public static final int _CDBACKGROUNDPROPERTIES=822; public static final int _CDTEXTPROPERTY=833; public static final int _CDDATAFLAGS=834; public static final int _CDFILEHEADER=835; public static final int _CDFILESEGMENT=836; public static final int _CDEVENTENTRY=847; public static final int _CDACTIONEXT=848; public static final int _CDEMBEDDEDCALCTL=849; public static final int _CDTABLEDATAEXTENSION=857; public static final int _CDLARGEPARAGRAPH=909; public static final int _CDIGNORE=912; public static final int _VIEW_COLUMN_FORMAT5=914; public static final int _CDEMBEDEXTRAINFO=934; public static final int _CDEMBEDDEDCONTACTLIST=935; public static final int _NOTE_SEAL2_HDR=1031; public static final int _NOTE_SEAL2=1032; public static final int _NOTE_RECORD_DESC=1033; /* These must be OR-ed into the ObjectType below in order to get the desired behavior. Note that OBJECT_COLLECTION implicitly has both of these bits implied, because that was the desired behavior before these bits were invented. */ /** do not copy object when updating to new note or database */ public static final int OBJECT_NO_COPY = 0x8000; /** keep object around even if hNote doesn't have it when NoteUpdating */ public static final int OBJECT_PRESERVE = 0x4000; /** Public access object being allocated. */ public static final int OBJECT_PUBLIC = 0x2000; /* Object Types, a sub-category of TYPE_OBJECT */ /** File Attachment */ public static final short OBJECT_FILE = 0; /** IDTable of "done" docs attached to filter */ public static final short OBJECT_FILTER_LEFTTODO = 3; /** Assistant run data object */ public static final short OBJECT_ASSIST_RUNDATA = 8; /** Used as input to NSFDbGetObjectSize */ public static final short OBJECT_UNKNOWN = (short) (0xffff & 0xffff); /** file object has object digest appended */ public static final short FILEFLAG_SIGN = 0x0001; /** file is represented by an editor run in the document */ public static final short FILEFLAG_INDOC = 0x0002; /** file object has mime data appended */ public static final short FILEFLAG_MIME = 0x0004; /** file is a folder automaticly compressed by Notes */ public static final short FILEFLAG_AUTOCOMPRESSED = 0x0080; public static short MAXENVVALUE = 256; public static int MAXDWORD = 0xffffffff; public static short MAXWORD = (short) (0xffff & 0xffff); /** Transactions is Sub-Commited if a Sub Transaction */ public static int NSF_TRANSACTION_BEGIN_SUB_COMMIT = 0x00000001; /** When starting a txn (not a sub tran) get an intent shared lock on the db */ public static int NSF_TRANSACTION_BEGIN_LOCK_DB = 0x00000002; /** Don't automatically abort if Commit Processing Fails */ public static final int TRANCOMMIT_SKIP_AUTO_ABORT = 1; /** Enable full text indexing */ public static final int DBOPTBIT_FT_INDEX = 0; /** TRUE if database is being used as an object store - for garbage collection */ public static final int DBOPTBIT_IS_OBJSTORE = 1; /** TRUE if database has notes which refer to an object store - for garbage collection*/ public static final int DBOPTBIT_USES_OBJSTORE = 2; /** TRUE if NoteUpdate of notes in this db should never use an object store. */ public static final int DBOPTBIT_OBJSTORE_NEVER = 3; /** TRUE if database is a library */ public static final int DBOPTBIT_IS_LIBRARY = 4; /** TRUE if uniform access control across all replicas */ public static final int DBOPTBIT_UNIFORM_ACCESS = 5; /** TRUE if NoteUpdate of notes in this db should always try to use an object store. */ public static final int DBOPTBIT_OBJSTORE_ALWAYS = 6; /** TRUE if garbage collection is never to be done on this object store */ public static final int DBOPTBIT_COLLECT_NEVER = 7; /** TRUE if this is a template and is considered an advanced one (for experts only.) */ public static final int DBOPTBIT_ADV_TEMPLATE = 8; /** TRUE if db has no background agent */ public static final int DBOPTBIT_NO_BGAGENT = 9; /** TRUE is db is out-of-service, no new opens allowed, unless DBOPEN_IGNORE_OUTOFSERVICE is specified */ public static final int DBOPTBIT_OUT_OF_SERVICE = 10; /** TRUE if db is personal journal */ public static final int DBOPTBIT_IS_PERSONALJOURNAL = 11; /** TRUE if db is marked for delete. no new opens allowed, cldbdir will delete the database when ref count = = 0;*/ public static final int DBOPTBIT_MARKED_FOR_DELETE = 12; /** TRUE if db stores calendar events */ public static final int DBOPTBIT_HAS_CALENDAR = 13; /** TRUE if db is a catalog index */ public static final int DBOPTBIT_IS_CATALOG_INDEX = 14; /** TRUE if db is an address book */ public static final int DBOPTBIT_IS_ADDRESS_BOOK = 15; /** TRUE if db is a "multi-db-search" repository */ public static final int DBOPTBIT_IS_SEARCH_SCOPE = 16; /** TRUE if db's user activity log is confidential, only viewable by designer and manager */ public static final int DBOPTBIT_IS_UA_CONFIDENTIAL = 17; /** TRUE if item names are to be treated as if the ITEM_RARELY_USED_NAME flag is set. */ public static final int DBOPTBIT_RARELY_USED_NAMES = 18; /** TRUE if db is a "multi-db-site" repository */ public static final int DBOPTBIT_IS_SITEDB = 19; /** TRUE if docs in folders in this db have folder references */ public static final int DBOPTBIT_FOLDER_REFERENCES = 20; /** TRUE if the database is a proxy for non-NSF data */ public static final int DBOPTBIT_IS_PROXY = 21; /** TRUE for NNTP server add-in dbs */ public static final int DBOPTBIT_IS_NNTP_SERVER_DB = 22; /** TRUE if this is a replica of an IMAP proxy, enables certain * special cases for interacting with db */ public static final int DBOPTBIT_IS_INET_REPL = 23; /** TRUE if db is a Lightweight NAB */ public static final int DBOPTBIT_IS_LIGHT_ADDRESS_BOOK = 24; /** TRUE if database has notes which refer to an object store - for garbage collection*/ public static final int DBOPTBIT_ACTIVE_OBJSTORE = 25; /** TRUE if database is globally routed */ public static final int DBOPTBIT_GLOBALLY_ROUTED = 26; /** TRUE if database has mail autoprocessing enabled */ public static final int DBOPTBIT_CS_AUTOPROCESSING_ENABLED = 27; /** TRUE if database has mail filters enabled */ public static final int DBOPTBIT_MAIL_FILTERS_ENABLED = 28; /** TRUE if database holds subscriptions */ public static final int DBOPTBIT_IS_SUBSCRIPTIONDB = 29; /** TRUE if data base supports "check-in" "check-out" */ public static final int DBOPTBIT_IS_LOCK_DB = 30; /** TRUE if editor must lock notes to edit */ public static final int DBOPTBIT_IS_DESIGNLOCK_DB = 31; /* ODS26+ options */ /** if TRUE, store all modified index blocks in lz1 compressed form */ public static final int DBOPTBIT_COMPRESS_INDEXES = 33; /** if TRUE, store all modified buckets in lz1 compressed form */ public static final int DBOPTBIT_COMPRESS_BUCKETS = 34; /** FALSE by default, turned on forever if DBFLAG_COMPRESS_INDEXES or DBFLAG_COMPRESS_BUCKETS are ever turned on. */ public static final int DBOPTBIT_POSSIBLY_COMPRESSED = 35; /** TRUE if freed space in db is not overwritten */ public static final int DBOPTBIT_NO_FREE_OVERWRITE = 36; /** DB doesn't maintain unread marks */ public static final int DBOPTBIT_NOUNREAD = 37; /** TRUE if the database does not maintain note hierarchy info. */ public static final int DBOPTBIT_NO_RESPONSE_INFO = 38; /** Disabling of response info will happen on next compaction */ public static final int DBOPTBIT_DISABLE_RSP_INFO_PEND = 39; /** Enabling of response info will happen on next compaction */ public static final int DBOPTBIT_ENABLE_RSP_INFO_PEND = 40; /** Form/Bucket bitmap optimization is enabled */ public static final int DBOPTBIT_FORM_BUCKET_OPT = 41; /** Disabling of Form/Bucket bitmap opt will happen on next compaction */ public static final int DBOPTBIT_DISABLE_FORMBKT_PEND = 42; /** Enabling of Form/Bucket bitmap opt will happen on next compaction */ public static final int DBOPTBIT_ENABLE_FORMBKT_PEND = 43; /** If TRUE, maintain LastAccessed */ public static final int DBOPTBIT_MAINTAIN_LAST_ACCESSED = 44; /** If TRUE, transaction logging is disabled for this database */ public static final int DBOPTBIT_DISABLE_TXN_LOGGING = 45; /** If TRUE, monitors can't be used against this database (non-replicating) */ public static final int DBOPTBIT_MONITORS_NOT_ALLOWED = 46; /** If TRUE, all transactions on this database are nested top actions */ public static final int DBOPTBIT_NTA_ALWAYS = 47; /** If TRUE, objects are not to be logged */ public static final int DBOPTBIT_DONTLOGOBJECTS = 48; /** If set, the default delete is soft. Can be overwritten by UPDATE_DELETE_HARD */ public static final int DBOPTBIT_DELETES_ARE_SOFT = 49; /* The following bits are used by the webserver and are gotten from the icon note */ /** if TRUE, the Db needs to be opened using SSL over HTTP */ public static final int DBOPTBIT_HTTP_DBIS_SSL = 50; /** if TRUE, the Db needs to use JavaScript to render the HTML for formulas, buttons, etc */ public static final int DBOPTBIT_HTTP_DBIS_JS = 51; /** if TRUE, there is a $DefaultLanguage value on the $icon note */ public static final int DBOPTBIT_HTTP_DBIS_MULTILANG = 52; /* ODS37+ options */ /** if TRUE, database is a mail.box (ODS37 and up) */ public static final int DBOPTBIT_IS_MAILBOX = 53; /** if TRUE, database is allowed to have /gt;64KB UNK table */ public static final int DBOPTBIT_LARGE_UNKTABLE = 54; /** If TRUE, full-text index is accent sensitive */ public static final int DBOPTBIT_ACCENT_SENSITIVE_FT = 55; /** TRUE if database has NSF support for IMAP enabled */ public static final int DBOPTBIT_IMAP_ENABLED = 56; /** TRUE if database is a USERless N&amp;A Book */ public static final int DBOPTBIT_USERLESS_NAB = 57; /** TRUE if extended ACL's apply to this Db */ public static final int DBOPTBIT_EXTENDED_ACL = 58; /** TRUE if connections to = 3;rd party DBs are allowed */ public static final int DBOPTBIT_DECS_ENABLED = 59; /** TRUE if a = 1;+ referenced shared template. Sticky bit once referenced. */ public static final int DBOPTBIT_IS_SHARED_TEMPLATE = 60; /** TRUE if database is a mailfile */ public static final int DBOPTBIT_IS_MAILFILE = 61; /** TRUE if database is a web application */ public static final int DBOPTBIT_IS_WEBAPPLICATION = 62; /** TRUE if the database should not be accessible via the standard URL syntax */ public static final int DBOPTBIT_HIDE_FROM_WEB = 63; /** TRUE if database contains one or more enabled background agent */ public static final int DBOPTBIT_ENABLED_BGAGENT = 64; /** database supports LZ1 compression. */ public static final int DBOPTBIT_LZ1 = 65; /** TRUE if database has default language */ public static final int DBOPTBIT_HTTP_DBHAS_DEFLANG = 66; /** TRUE if database design refresh is only on admin server */ public static final int DBOPTBIT_REFRESH_DESIGN_ON_ADMIN = 67; /** TRUE if shared template should be actively used to merge in design. */ public static final int DBOPTBIT_ACTIVE_SHARED_TEMPLATE = 68; /** TRUE to allow the use of themes when displaying the application. */ public static final int DBOPTBIT_APPLY_THEMES = 69; /** TRUE if unread marks replicate */ public static final int DBOPTBIT_UNREAD_REPLICATION = 70; /** TRUE if unread marks replicate out of the cluster */ public static final int DBOPTBIT_UNREAD_REP_OUT_OF_CLUSTER = 71; /** TRUE, if the mail file is a migrated one from Exchange */ public static final int DBOPTBIT_IS_MIGRATED_EXCHANGE_MAILFILE = 72; /** TRUE, if the mail file is a migrated one from Exchange */ public static final int DBOPTBIT_NEED_EX_NAMEFIXUP = 73; /** TRUE, if out of office service is enabled in a mail file */ public static final int DBOPTBIT_OOS_ENABLED = 74; /** TRUE if Support Response Threads is enabled in database */ public static final int DBOPTBIT_SUPPORT_RESP_THREADS = 75; /**TRUE if the database search is disabled<br> * LI = 4463;.02. give the admin a mechanism to prevent db search in scenarios * where the db is very large, they don't want to create new views, and they * don't want a full text index */ public static final int DBOPTBIT_NO_SIMPLE_SEARCH = 76; /** TRUE if the database FDO is repaired to proper coalation function. */ public static final int DBOPTBIT_FDO_REPAIRED = 77; /** TRUE if the policy settings have been removed from a db with no policies */ public static final int DBOPTBIT_POLICIES_REMOVED = 78; /** TRUE if Superblock is compressed. */ public static final int DBOPTBIT_COMPRESSED_SUPERBLOCK = 79; /** TRUE if design note non-summary should be compressed */ public static final int DBOPTBIT_COMPRESSED_DESIGN_NS = 80; /** TRUE if the db has opted in to use DAOS */ public static final int DBOPTBIT_DAOS_ENABLED = 81; /** TRUE if all data documents in database should be compressed (compare with DBOPTBIT_COMPRESSED_DESIGN_NS) */ public static final int DBOPTBIT_COMPRESSED_DATA_DOCS = 82; /** TRUE if views in this database should be skipped by server-side update task */ public static final int DBOPTBIT_DISABLE_AUTO_VIEW_UPDS = 83; /** if TRUE, Domino can suspend T/L check for DAOS items because the dbtarget is expendable */ public static final int DBOPTBIT_DAOS_LOGGING_NOT_REQD = 84; /** TRUE if exporting of view data is to be disabled */ public static final int DBOPTBIT_DISABLE_VIEW_EXPORT = 85; /** TRUE if database is a NAB which contains config information, groups, and mailin databases but where users are stored externally. */ public static final int DBOPTBIT_USERLESS2_NAB = 86; /** LLN2 specific, added to this codestream to reserve this value */ public static final int DBOPTBIT_ADVANCED_PROP_OVERRIDE = 87; /** Turn off VerySoftDeletes for ODS51 */ public static final int DBOPTBIT_NO_VSD = 88; /** NSF is to be used as a cache */ public static final int DBOPTBIT_LOCAL_CACHE = 89; /** Set to force next compact to be out of place. Initially done for ODS upgrade of in use Dbs, but may have other uses down the road. The next compact will clear this bit, it is transitory. */ public static final int DBOPTBIT_COMPACT_NO_INPLACE = 90; /** from LLN2 */ public static final int DBOPTBIT_NEEDS_ZAP_LSN = 91; /** set to indicate this is a system db (eg NAB, mail.box, etc) so we don't rely on the db name */ public static final int DBOPTBIT_IS_SYSTEM_DB = 92; /** TRUE if the db has opted in to use PIRC */ public static final int DBOPTBIT_PIRC_ENABLED = 93; /** from lln2 */ public static final int DBOPTBIT_DBMT_FORCE_FIXUP = 94; /** TRUE if the db has likely a complete design replication - for PIRC control */ public static final int DBOPTBIT_DESIGN_REPLICATED = 95; /** on the = 1;-&gt;0 transition rename the file (for LLN2 keep in sync please) */ public static final int DBOPTBIT_MARKED_FOR_PENDING_DELETE = 96; public static final int DBOPTBIT_IS_NDX_DB = 97; /** move NIF containers &amp; collection objects out of nsf into .ndx db */ public static final int DBOPTBIT_SPLIT_NIF_DATA = 98; /** NIFNSF is off but not all containers have been moved out yet */ public static final int DBOPTBIT_NIFNSF_OFF = 99; /** Inlined indexing exists for this DB */ public static final int DBOPTBIT_INLINE_INDEX = 100; /** db solr search enabled */ public static final int DBOPTBIT_SOLR_SEARCH = 101; /** init solr index done */ public static final int DBOPTBIT_SOLR_SEARCH_INIT_DONE = 102; /** Folder sync enabled for database (sync Drafts, Sent and Trash views to IMAP folders) */ public static final int DBOPTBIT_IMAP_FOLDERSYNC = 103; /** Large Summary Support (LSS) */ public static final int DBOPTBIT_LARGE_BUCKETS_ENABLED = 104; /** Open with scan lock to prevent other opens with scan lock (used by replicator) */ public static final short DBOPEN_WITH_SCAN_LOCK = 0x0001; /** DbPurge while opening */ public static final short DBOPEN_PURGE = 0x0002; /** No user info may be available, so don't ask for it */ public static final short DBOPEN_NO_USERINFO = 0x0004; /** Force a database fixup */ public static final short DBOPEN_FORCE_FIXUP = 0x0008 ; /** Scan all notes and all items (not incremental) */ public static final short DBOPEN_FIXUP_FULL_NOTE_SCAN = 0x0010; /** Do not delete bad notes during note scan */ public static final short DBOPEN_FIXUP_NO_NOTE_DELETE = 0x0020; /** If open fails try cluster failover */ public static final short DBOPEN_CLUSTER_FAILOVER = 0x0080; /** Close session on error paths */ public static final short DBOPEN_CLOSE_SESS_ON_ERROR = 0x0100 ; /** don't log errors - used when opening log database! */ public static final short DBOPEN_NOLOG = 0x0200; /** Open and read all information out of the id file */ public static final int SECKFM_open_All = 0x00000001; /** Write information conatined inthe handle out to the specified ID file */ public static final int SECKFM_close_WriteIdFile = 0x00000001; /** Don't set environment variable used to identify the ID file during process initialization - * usually either ServerKeyFileName or KeyFileName. See SECKFMSwitchToIDFile. */ public static final int fKFM_switchid_DontSetEnvVar = 0x00000008; /* Function codes for routine SECKFMGetPublicKey */ public short KFM_pubkey_Primary = 0; public short KFM_pubkey_International = 1; public short fSECToken_EnableRenewal = 0x0001; public int MAXONESEGSIZE = 0xffff - 1-128; public int MQ_MAX_MSGSIZE = MAXONESEGSIZE - 0x50; public short NOPRIORITY = (short) (0xffff & 0xffff); public short LOWPRIORITY = (short) (0xffff & 0xffff); public short HIGHPRIORITY = 0; /* Options to MQGet */ public short MQ_WAIT_FOR_MSG = 0x0001; /* Options to MQOpen */ /** Create the queue if it doesn't exist*/ public int MQ_OPEN_CREATE = 0x00000001; /* Public Queue Names */ /** Prepended to "addin" task name to form task's queue name */ public String TASK_QUEUE_PREFIX = "MQ$"; /** DB Server */ public String SERVER_QUEUE_NAME = "_SERVER"; /** Replicator */ public String REPL_QUEUE_NAME = TASK_QUEUE_PREFIX + "REPLICATOR"; /** Mail Router */ public String ROUTER_QUEUE_NAME = TASK_QUEUE_PREFIX + "ROUTER"; /** Index views &amp; full text process */ public String UPDATE_QUEUE_NAME = TASK_QUEUE_PREFIX + "INDEXER"; /** Login Process */ public String LOGIN_QUEUE_NAME = TASK_QUEUE_PREFIX + "LOGIN"; /** Event process */ public String EVENT_QUEUE_NAME = TASK_QUEUE_PREFIX + "EVENT"; /** Report process */ public String REPORT_QUEUE_NAME = TASK_QUEUE_PREFIX + "REPORTER"; /** Cluster Replicator */ public String CLREPL_QUEUE_NAME = TASK_QUEUE_PREFIX + "CLREPL"; /** Fixup */ public String FIXUP_QUEUE_NAME = TASK_QUEUE_PREFIX + "FIXUP"; /** Collector*/ public String COLLECT_QUEUE_NAME = TASK_QUEUE_PREFIX + "COLLECTOR"; /** NOI Process */ public String NOI_QUEUE_NAME = TASK_QUEUE_PREFIX + "DIIOP"; /** Alarms Cache daemon */ public String ALARM_QUEUE_NAME = TASK_QUEUE_PREFIX + "ALARMS"; /** Monitor */ public String MONITOR_QUEUE_NAME = TASK_QUEUE_PREFIX + "MONITOR"; /** Monitor */ public String MONALARM_QUEUE_NAME = TASK_QUEUE_PREFIX + "MONITORALARM"; /** Admin Panel Daemon (Request Queue) */ public String APDAEMON_REQ_QUEUE = TASK_QUEUE_PREFIX + "APDAEMONREQ"; /** Admin Panel Daemon (File Response Queue) */ public String APDAEMON_FILERES_QUEUE = TASK_QUEUE_PREFIX + "APDAEMONFILERESPONSE"; /** Admin Panel Daemon (Server Response Queue) */ public String APDAEMON_FILEREQ_QUEUE = TASK_QUEUE_PREFIX + "APDAEMONFILEREQUEST"; /** bktasks */ public String BKTASKS_QUEUE_NAME = TASK_QUEUE_PREFIX + "BKTASKS"; /** Red Zone Interface to Collector */ public String RZINTER_QUEUE_NAME = TASK_QUEUE_PREFIX + "RZINTER"; /** Red Zone Extra MQ */ public String RZEXTRA_QUEUE_NAME = TASK_QUEUE_PREFIX + "RZEXTRA"; /** Red Zone Background MQ */ public String RZBG_QUEUE_NAME = TASK_QUEUE_PREFIX + "RZBG"; /** Red Zone Background Extra MQ */ public String RZBGEXTRA_QUEUE_NAME = TASK_QUEUE_PREFIX + "RZBGEXTRA"; /** Monitor */ public String REALTIME_STATS_QUEUE_NAME = TASK_QUEUE_PREFIX + "REALTIME"; /** Runjava (used by ISpy) */ public String RUNJAVA_QUEUE_NAME = TASK_QUEUE_PREFIX + "RUNJAVA"; /** Runjava (used by ISpy) */ public String STATS_QUEUE_NAME = TASK_QUEUE_PREFIX + "STATS"; /** Runjava (used by ISpy) */ public String LOG_SEARCH_QUEUE_NAME = TASK_QUEUE_PREFIX + "LOGSEARCH"; /** Event process */ public String DAEMON_EVENT_QUEUE_NAME = TASK_QUEUE_PREFIX + "DAEMONEVENT"; /** Collector*/ public String DAEMON_COLLECT_QUEUE_NAME = TASK_QUEUE_PREFIX + "DAEMONCOLLECTOR"; /** Dircat */ public String DIRCAT_QUEUE_NAME = TASK_QUEUE_PREFIX + "DIRCAT"; /** Instructs the NSGetServerClusterMates function to not use the cluster name cache * and forces a lookup on the target server instead */ public static int CLUSTER_LOOKUP_NOCACHE = 0x00000001; /** Instructs the NSGetServerClusterMates function to only use the cluster name cache * and restricts lookup to the workstation cache */ public static int CLUSTER_LOOKUP_CACHEONLY = 0x00000002; /** Authors can't create new notes (only edit existing ones) */ public short ACL_FLAG_AUTHOR_NOCREATE = 0x0001; /** Entry represents a Server (V4) */ public short ACL_FLAG_SERVER = 0x0002; /** User cannot delete notes */ public short ACL_FLAG_NODELETE = 0x0004; /** User can create personal agents (V4) */ public short ACL_FLAG_CREATE_PRAGENT = 0x0008; /** User can create personal folders (V4) */ public short ACL_FLAG_CREATE_PRFOLDER = 0x0010; /** Entry represents a Person (V4) */ public short ACL_FLAG_PERSON = 0x0020; /** Entry represents a group (V4) */ public short ACL_FLAG_GROUP = 0x0040; /** User can create and update shared views &amp; folders (V4)<br> This allows an Editor to assume some Designer-level access */ public short ACL_FLAG_CREATE_FOLDER = 0x0080; /** User can create LotusScript */ public short ACL_FLAG_CREATE_LOTUSSCRIPT = 0x0100; /** User can read public notes */ public short ACL_FLAG_PUBLICREADER = 0x0200; /** User can write public notes */ public short ACL_FLAG_PUBLICWRITER = 0x0400; /** User CANNOT register monitors for this database */ public short ACL_FLAG_MONITORS_DISALLOWED = 0x800; /** User cannot replicate or copy this database */ public short ACL_FLAG_NOREPLICATE = 0x1000; /** Admin server can modify reader and author fields in db */ public short ACL_FLAG_ADMIN_READERAUTHOR = 0X4000; /** Entry is administration server (V4) */ public short ACL_FLAG_ADMIN_SERVER = (short) (0x8000 & 0xffff); /** User or Server has no access to the database. */ public short ACL_LEVEL_NOACCESS = 0; /** User or Server can add new data documents to a database, but cannot examine the new document or the database. */ public short ACL_LEVEL_DEPOSITOR = 1; /** User or Server can only view data documents in the database. */ public short ACL_LEVEL_READER = 2; /** User or Server can create and/or edit their own data documents and examine existing ones in the database. */ public short ACL_LEVEL_AUTHOR = 3; /** User or Server can create and/or edit any data document. */ public short ACL_LEVEL_EDITOR = 4; /** User or Server can create and/or edit any data document and/or design document. */ public short ACL_LEVEL_DESIGNER = 5; /** User or Server can create and/or maintain any type of database or document, including the ACL. */ public short ACL_LEVEL_MANAGER = 6; /** Highest access level */ public short ACL_LEVEL_HIGHEST = 6; /** Number of access levels */ public short ACL_LEVEL_COUNT = 7; /** * Keys in a COLLECTIONDATA structure are divided into percentiles - divisions * corresponding to one-tenth of the total range of keys - and a table of the keys * marking the divisions is returned with that structure. These constants are provided for indexing into the table. */ public static int PERCENTILE_COUNT = 11; public static int PERCENTILE_0 = 0; public static int PERCENTILE_10 = 1; public static int PERCENTILE_20 = 2; public static int PERCENTILE_30 = 3; public static int PERCENTILE_40 = 4; public static int PERCENTILE_50 = 5; public static int PERCENTILE_60 = 6; public static int PERCENTILE_70 = 7; public static int PERCENTILE_80 = 8; public static int PERCENTILE_90 = 9; public static int PERCENTILE_100 = 10; /* Options used when calling ReplicateWithServer */ /** Receive notes from server (pull) */ public int REPL_OPTION_RCV_NOTES = 0x00000001; /** Send notes to server (push) */ public int REPL_OPTION_SEND_NOTES = 0x00000002; /** Replicate all database files */ public int REPL_OPTION_ALL_DBS = 0x00000004; /** Close sessions when done */ public int REPL_OPTION_CLOSE_SESS = 0x00000040; /** Replicate NTFs as well */ public int REPL_OPTION_ALL_NTFS = 0x00000400; /** Low, Medium &amp; High priority databases */ public int REPL_OPTION_PRI_LOW = 0x00000000; /** Medium &amp; High priority databases only */ public int REPL_OPTION_PRI_MED = 0x00004000; /** High priority databases only */ public int REPL_OPTION_PRI_HI = 0x00008000; /* Use following bits with ReplicateWithServerExt only */ /* 0x00010000-0x8000000 WILL NOT BE HONORED BY V3 SERVERS, BECAUSE V3 ONLY LOOKS AT THE FIRST 16 BITS! */ /** Abstract/truncate docs to summary data and first RTF field. (~40K) */ public int REPL_OPTION_ABSTRACT_RTF = 0x00010000; /** Abstract/truncate docs to summary only data. */ public int REPL_OPTION_ABSTRACT_SMRY = 0x00020000; /** Replicate private documents even if not selected by default. */ public int REPL_OPTION_PRIVATE = 0x00400000; public int REPL_OPTION_ALL_FILES = (REPL_OPTION_ALL_DBS | REPL_OPTION_ALL_NTFS); /** Indirect way to call NEMMessageBox */; public short OS_SIGNAL_MESSAGE = 3; /** Paint busy indicator on screen */ public short OS_SIGNAL_BUSY = 4; /* Definitions specific to busy signal handler */ /** Remove the "File Activity" indicator */ public short BUSY_SIGNAL_FILE_INACTIVE = 0; /** Display the "File Activity" indicator (not supported on all platforms) */ public short BUSY_SIGNAL_FILE_ACTIVE = 1; /** Remove the "Network Activity" indicator. */ public short BUSY_SIGNAL_NET_INACTIVE = 2; /** Display the "Network Activity" indicator. */ public short BUSY_SIGNAL_NET_ACTIVE = 3; /** Display the "Poll" indicator. */ public short BUSY_SIGNAL_POLL = 4; /** Display the "Wan Sending" indicator. */ public short BUSY_SIGNAL_WAN_SENDING = 5; /** Display the "Wan Receiving" indicator. */ public short BUSY_SIGNAL_WAN_RECEIVING = 6; /** Called from NET to see if user cancelled I/O */ public short OS_SIGNAL_CHECK_BREAK = 5; /** Put up and manipulate the system wide progress indicator. */ public short OS_SIGNAL_PROGRESS = 13; /** N/A N/A */ public short PROGRESS_SIGNAL_BEGIN = 0; /** N/A N/A */ public short PROGRESS_SIGNAL_END = 1; /** Range N/A */ public short PROGRESS_SIGNAL_SETRANGE = 2; /** pText1 pText2 - usually NULL. */ public short PROGRESS_SIGNAL_SETTEXT = 3; /** New progress pos N/A */ public short PROGRESS_SIGNAL_SETPOS = 4; /** Delta of progress pos N/A */ public short PROGRESS_SIGNAL_DELTAPOS = 5; /** Total Bytes */ public short PROGRESS_SIGNAL_SETBYTERANGE = 6; /** Bytes Done */ public short PROGRESS_SIGNAL_SETBYTEPOS = 7; public short OS_SIGNAL_REPL = 15; /* Definitions for replication state signal handler */ /* pText1 pText2. */ /** None */ public short REPL_SIGNAL_IDLE = 0; /** None */ public short REPL_SIGNAL_PICKSERVER = 1; /** pServer pPort */ public short REPL_SIGNAL_CONNECTING = 2; /** pServer pPort */ public short REPL_SIGNAL_SEARCHING = 3; /** pServerFile pLocalFile */ public short REPL_SIGNAL_SENDING = 4; /** pServerFile pLocalFile */ public short REPL_SIGNAL_RECEIVING = 5; /** pSrcFile */ public short REPL_SIGNAL_SEARCHINGDOCS = 6; /** pLocalFile pReplFileStats */ public short REPL_SIGNAL_DONEFILE = 7; /** pServerFile pLocalFile */ public short REPL_SIGNAL_REDIRECT = 8; /** None */ public short REPL_SIGNAL_BUILDVIEW = 9; /** None */ public short REPL_SIGNAL_ABORT = 10; public int HTMLAPI_PROP_TEXTLENGTH = 0; public int HTMLAPI_PROP_NUMREFS = 1; public int HTMLAPI_PROP_USERAGENT_LEN = 3; public int HTMLAPI_PROP_USERAGENT = 4; public int HTMLAPI_PROP_BINARYDATA = 6; public int HTMLAPI_PROP_MIMEMAXLINELENSEEN = 102; int CAI_Start = 0; int CAI_StartKey = 1; int CAI_Count = 2; int CAI_Expand = 3; int CAI_FullyExpand = 4; int CAI_ExpandView = 5; int CAI_Collapse = 6; int CAI_CollapseView = 7; int CAI_3PaneUI = 8; int CAI_TargetFrame = 9; int CAI_FieldElemType = 10; int CAI_FieldElemFormat = 11; int CAI_SearchQuery = 12; int CAI_OldSearchQuery = 13; int CAI_SearchMax = 14; int CAI_SearchWV = 15; int CAI_SearchOrder = 16; int CAI_SearchThesarus = 17; int CAI_ResortAscending = 18; int CAI_ResortDescending = 19; int CAI_ParentUNID = 20; int CAI_Click = 21; int CAI_UserName = 22; int CAI_Password = 23; int CAI_To = 24; int CAI_ISMAPx = 25; int CAI_ISMAPy = 26; int CAI_Grid = 27; int CAI_Date = 28; int CAI_TemplateType = 29; int CAI_TargetUNID = 30; int CAI_ExpandSection = 31; int CAI_Login = 32; int CAI_PickupCert = 33; int CAI_PickupCACert = 34; int CAI_SubmitCert = 35; int CAI_ServerRequest = 36; int CAI_ServerPickup = 37; int CAI_PickupID = 38; int CAI_TranslateForm = 39; int CAI_SpecialAction = 40; int CAI_AllowGetMethod = 41; int CAI_Seq = 42; int CAI_BaseTarget = 43; int CAI_ExpandOutline = 44; int CAI_StartOutline = 45; int CAI_Days = 46; int CAI_TableTab = 47; int CAI_MIME = 48; int CAI_RestrictToCategory = 49; int CAI_Highlight = 50; int CAI_Frame = 51; int CAI_FrameSrc = 52; int CAI_Navigate = 53; int CAI_SkipNavigate = 54; int CAI_SkipCount = 55; int CAI_EndView = 56; int CAI_TableRow = 57; int CAI_RedirectTo = 58; int CAI_SessionId = 59; int CAI_SourceFolder = 60; int CAI_SearchFuzzy = 61; int CAI_HardDelete = 62; int CAI_SimpleView = 63; int CAI_SearchEntry = 64; int CAI_Name = 65; int CAI_Id = 66; int CAI_RootAlias = 67; int CAI_Scope = 68; int CAI_DblClkTarget = 69; int CAI_Charset = 70; int CAI_EmptyTrash = 71; int CAI_EndKey = 72; int CAI_PreFormat = 73; int CAI_ImgIndex = 74; int CAI_AutoFramed = 75; int CAI_OutputFormat = 76; int CAI_InheritParent = 77; int CAI_Last = 78; int kUnknownCmdId = 0; int kOpenServerCmdId = 1; int kOpenDatabaseCmdId = 2; int kOpenViewCmdId = 3; int kOpenDocumentCmdId = 4; int kOpenElementCmdId = 5; int kOpenFormCmdId = 6; int kOpenAgentCmdId = 7; int kOpenNavigatorCmdId = 8; int kOpenIconCmdId = 9; int kOpenAboutCmdId = 10; int kOpenHelpCmdId = 11; int kCreateDocumentCmdId = 12; int kSaveDocumentCmdId = 13; int kEditDocumentCmdId = 14; int kDeleteDocumentCmdId = 15; int kSearchViewCmdId = 16; int kSearchSiteCmdId = 17; int kNavigateCmdId = 18; int kReadFormCmdId = 19; int kRequestCertCmdId = 20; int kReadDesignCmdId = 21; int kReadViewEntriesCmdId = 22; int kReadEntriesCmdId = 23; int kOpenPageCmdId = 24; int kOpenFrameSetCmdId = 25; /** OpenField command for Java applet(s) and HAPI */ int kOpenFieldCmdId = 26; int kSearchDomainCmdId = 27; int kDeleteDocumentsCmdId = 28; int kLoginUserCmdId = 29; int kLogoutUserCmdId = 30; int kOpenImageResourceCmdId = 31; int kOpenImageCmdId = 32; int kCopyToFolderCmdId = 33; int kMoveToFolderCmdId = 34; int kRemoveFromFolderCmdId = 35; int kUndeleteDocumentsCmdId = 36; int kRedirectCmdId = 37; int kGetOrbCookieCmdId = 38; int kOpenCssResourceCmdId = 39; int kOpenFileResourceCmdId = 40; int kOpenJavascriptLibCmdId = 41; int kUnImplemented_01 = 42; int kChangePasswordCmdId = 43; int kOpenPreferencesCmdId = 44; int kOpenWebServiceCmdId = 45; int kWsdlCmdId = 46; int kGetImageCmdId = 47; int kNumberOfCmds = 48; /** * arg value is a pointer to a nul-terminated string */ int CAVT_String = 0; /** * arg value is an int */ int CAVT_Int = 1; /** * arg value is a NOTEID */ int CAVT_NoteId = 2; /** * arg value is an UNID */ int CAVT_UNID = 3; /** * arg value is a list of null-terminated strings */ int CAVT_StringList = 4; int UAT_None = 0; int UAT_Server = 1; int UAT_Database = 2; int UAT_View = 3; int UAT_Form = 4; int UAT_Navigator = 5; int UAT_Agent = 6; int UAT_Document = 7; /** internal filename of attachment */ int UAT_Filename = 8; /** external filename of attachment if different */ int UAT_ActualFilename = 9; int UAT_Field = 10; int UAT_FieldOffset = 11; int UAT_FieldSuboffset = 12; int UAT_Page = 13; int UAT_FrameSet = 14; int UAT_ImageResource = 15; int UAT_CssResource = 16; int UAT_JavascriptLib = 17; int UAT_FileResource = 18; int UAT_About = 19; int UAT_Help = 20; int UAT_Icon = 21; int UAT_SearchForm = 22; int UAT_SearchSiteForm = 23; int UAT_Outline = 24; /** must be the last one */ int UAT_NumberOfTypes = 25; int URT_None = 0; int URT_Name = 1; int URT_Unid = 2; int URT_NoteId = 3; int URT_Special = 4; int URT_RepId = 5; int USV_About = 0; int USV_Help = 1; int USV_Icon = 2; int USV_DefaultView = 3; int USV_DefaultForm = 4; int USV_DefaultNav = 5; int USV_SearchForm = 6; int USV_DefaultOutline = 7; int USV_First = 8; int USV_FileField = 9; int USV_NumberOfValues = 10; /** * unknown purpose */ int HTMLAPI_REF_UNKNOWN = 0; /** * A tag HREF= value */ int HTMLAPI_REF_HREF = 1; /** * IMG tag SRC= value */ int HTMLAPI_REF_IMG = 2; /** * (I)FRAME tag SRC= value */ int HTMLAPI_REF_FRAME = 3; /** * Java applet reference */ int HTMLAPI_REF_APPLET = 4; /** * plugin SRC= reference */ int HTMLAPI_REF_EMBED = 5; /** * active object DATA= referendce */ int HTMLAPI_REF_OBJECT = 6; /** * BASE tag value */ int HTMLAPI_REF_BASE = 7; /** * BODY BACKGROUND */ int HTMLAPI_REF_BACKGROUND = 8; /** * IMG SRC= value from MIME message */ int HTMLAPI_REF_CID = 9; /** Flag to indicate unique keys. */ public static byte COLLATION_FLAG_UNIQUE = 0x01; /** Flag to indicate only build demand. */ public static byte COLLATION_FLAG_BUILD_ON_DEMAND = 0x02; public static byte COLLATION_SIGNATURE = 0x44; /** Collate by key in summary buffer (requires key name string) */ public static byte COLLATE_TYPE_KEY = 0; /** Collate by note ID */ public static byte COLLATE_TYPE_NOTEID = 3; /** Collate by "tumbler" summary key (requires key name string) */ public static byte COLLATE_TYPE_TUMBLER = 6; /** Collate by "category" summary key (requires key name string) */ public static byte COLLATE_TYPE_CATEGORY = 7; public static byte COLLATE_TYPE_MAX = 7; /** True if descending */ public static byte CDF_S_descending = 0; /** False if ascending order (default) */ public static byte CDF_M_descending = 0x01; /** Obsolete - see new constant below */ public static byte CDF_M_caseinsensitive = 0x02; /** If prefix list, then ignore for sorting */ public static byte CDF_M_ignoreprefixes = 0x02; /** Obsolete - see new constant below */ public static byte CDF_M_accentinsensitive = 0x04; /** If set, lists are permuted */ public static byte CDF_M_permuted = 0x08; /** Qualifier if lists are permuted; if set, lists are pairwise permuted, otherwise lists are multiply permuted. */ public static byte CDF_M_permuted_pairwise = 0x10; /** If set, treat as permuted */ public static byte CDF_M_flat_in_v5 = 0x20; /** If set, text compares are case-sensitive */ public static byte CDF_M_casesensitive_in_v5 = 0x40; /** If set, text compares are accent-sensitive */ public static byte CDF_M_accentsensitive_in_v5 = (byte) (0x80 & 0xff); public static byte COLLATE_DESCRIPTOR_SIGNATURE = 0x66; // flags1 values of VIEW_TABLE_FORMAT /** Default to fully collapsed */ public static short VIEW_TABLE_FLAG_COLLAPSED = 0x0001; /** Do not index hierarchically. If FALSE, MUST have NSFFormulaSummaryItem($REF) as LAST item! */ public static short VIEW_TABLE_FLAG_FLATINDEX = 0x0002; /** Display unread flags in margin at ALL levels */ public static short VIEW_TABLE_FLAG_DISP_ALLUNREAD = 0x0004; /** Display replication conflicts. If TRUE, MUST have NSFFormulaSummaryItem($Conflict) as SECOND-TO-LAST item! */ public static short VIEW_TABLE_FLAG_CONFLICT = 0x0008; /** Display unread flags in margin for documents only */ public static short VIEW_TABLE_FLAG_DISP_UNREADDOCS = 0x0010; /** Position to top when view is opened. */ public static short VIEW_TABLE_GOTO_TOP_ON_OPEN = 0x0020; /** Position to bottom when view is opened. */ public static short VIEW_TABLE_GOTO_BOTTOM_ON_OPEN = 0x0040; /** Color alternate rows. */ public static short VIEW_TABLE_ALTERNATE_ROW_COLORING = 0x0080; /** Hide headings. */ public static short VIEW_TABLE_HIDE_HEADINGS = 0x0100; /** Hide left margin. */ public static short VIEW_TABLE_HIDE_LEFT_MARGIN = 0x0200; /** Show simple (background color) headings. */ public static short VIEW_TABLE_SIMPLE_HEADINGS = 0x0400; /** TRUE if LineCount is variable (can be reduced as needed). */ public static short VIEW_TABLE_VARIABLE_LINE_COUNT = 0x0800; /* Refresh flags. * * When both flags are clear, automatic refresh of display on update notification is disabled. * In this case, the refresh indicator will be displayed. * * When VIEW_TABLE_GOTO_TOP_ON_REFRESH is set, the view will fe refreshed from the top row of * the collection (as if the user pressed F9 and Ctrl-Home). * * When VIEW_TABLE_GOTO_BOTTOM_ON_REFRESH is set, the view will be refreshed so the bottom row of * the collection is visible (as if the user pressed F9 and Ctrl-End). * * When BOTH flags are set (done to avoid using another bit in the flags), the view will be * refreshed from the current top row (as if the user pressed F9). */ /** Position to top when view is refreshed. */ public static short VIEW_TABLE_GOTO_TOP_ON_REFRESH = 0x1000; /** Position to bottom when view is refreshed. */ public static short VIEW_TABLE_GOTO_BOTTOM_ON_REFRESH = 0x2000; /** TRUE if last column should be extended to fit the window width. */ public static short VIEW_TABLE_EXTEND_LAST_COLUMN = 0x4000; /** TRUE if the View indexing should work from the Right most column */ public static short VIEW_TABLE_RTLVIEW = (short) (0x8000 & 0xffff); // flags2 values of VIEW_TABLE_FORMAT /** TRUE if we should display no-borders at all on the header */ public static short VIEW_TABLE_FLAT_HEADINGS = 0x0001 ; /** TRUE if the icons displayed inthe view should be colorized */ public static short VIEW_TABLE_COLORIZE_ICONS = 0x0002; /** TRUE if we should not display a search bar for this view */ public static short VIEW_TABLE_HIDE_SB = 0x0004; /** TRUE if we should hide the calendar header */ public static short VIEW_TABLE_HIDE_CAL_HEADER = 0x0008; /** TRUE if view has not been customized (i.e. not saved by Designer) */ public static short VIEW_TABLE_NOT_CUSTOMIZED = 0x0010; /** TRUE if view supports display of partial thread hierarchy (Hannover v8)*/ public static short VIEW_TABLE_SHOW_PARITAL_THREADS = 0x0020; /** show partial index hierarchically, if TRUE */ public static short VIEW_TABLE_FLAG_PARTIAL_FLATINDEX = 0x0020; /** Value for the wSig member of the VIEW_TABLE_FORMAT2 structure. */ public static short VALID_VIEW_FORMAT_SIG = 0x2BAD; /** The VIEW_COLUMN_FORMAT record begins with a WORD value for the Signature of the record.<br> * This symbol specifies the signature of the VIEW_COLUMN_FORMAT record. */ public static short VIEW_COLUMN_FORMAT_SIGNATURE = 0x4356; /** * The VIEW_COLUMN_FORMAT2 record begins with a WORD value for the Signature of the record.<br> * This symbol specifies the signature of the VIEW_COLUMN_FORMAT2 record. */ public static short VIEW_COLUMN_FORMAT_SIGNATURE2 = 0x4357; /** * The VIEW_COLUMN_FORMAT3 record begins with a WORD value for the Signature of the record.<br> * This symbol specifies the signature of the VIEW_COLUMN_FORMAT3 record. */ public static short VIEW_COLUMN_FORMAT_SIGNATURE3 = 0x4358; /** * The VIEW_COLUMN_FORMAT4 record begins with a WORD value for the Signature of the record.<br> * This symbol specifies the signature of the VIEW_COLUMN_FORMAT4 record. */ public static short VIEW_COLUMN_FORMAT_SIGNATURE4 = 0x4359; /** * The VIEW_COLUMN_FORMAT5 record begins with a WORD value for the Signature of the record.<br> * This symbol specifies the signature of the VIEW_COLUMN_FORMAT5 record. */ public static short VIEW_COLUMN_FORMAT_SIGNATURE5 = 0x4360; /* Flags for COLOR_VALUE */ /** Color space is RGB */ public static short COLOR_VALUE_FLAGS_ISRGB = 0x0001; /** This object has no color */ public static short COLOR_VALUE_FLAGS_NOCOLOR = 0x0004; /** Use system default color, ignore color here */ public static short COLOR_VALUE_FLAGS_SYSTEMCOLOR = 0x0008; /** This color has a gradient color that follows */ public static short COLOR_VALUE_FLAGS_HASGRADIENT = 0x0010; /** upper 4 bits are reserved for application specific use */ public static short COLOR_VALUE_FLAGS_APPLICATION_MASK = (short) (0xf000 & 0xffff); /** Defined for Yellow Highlighting, (not reserved). */ public static short COLOR_VALUE_FLAGS_RESERVED1 = (short) (0x8000 & 0xffff); /** Defined for Pink Highlighting, (not reserved). */ public static short COLOR_VALUE_FLAGS_RESERVED2 = 0x4000; /** Defined for Blue Highlighting, (not reserved). */ public static short COLOR_VALUE_FLAGS_RESERVED3 = 0x2000; /** Reserved. */ public static short COLOR_VALUE_FLAGS_RESERVED4 = 0x1000; public static short VCF1_M_Sort = 0x0001; public static short VCF1_M_SortCategorize = 0x0002; public static short VCF1_M_SortDescending = 0x0004; public static short VCF1_M_Hidden = 0x0008; public static short VCF1_M_Response = 0x0010; public static short VCF1_M_HideDetail = 0x0020; public static short VCF1_M_Icon = 0x0040; public static short VCF1_M_NoResize = 0x0080; public static short VCF1_M_ResortAscending = 0x0100; public static short VCF1_M_ResortDescending = 0x0200; public static short VCF1_M_Twistie = 0x0400; public static short VCF1_M_ResortToView = 0x0800; public static short VCF1_M_SecondResort = 0x1000; public static short VCF1_M_SecondResortDescending = 0x2000; /* The following 4 constants are obsolete - see new VCF3_ constants below. */ public static short VCF1_M_CaseInsensitiveSort = 0x4000; public static short VCF1_M_AccentInsensitiveSort = (short) (0x8000 & 0xffff); public static short VCF2_M_DisplayAlignment = 0x0003; public static short VCF2_M_SubtotalCode = 0x003c; public static short VCF2_M_HeaderAlignment = 0x00c0; public static short VCF2_M_SortPermute = 0x0100; public static short VCF2_M_SecondResortUniqueSort = 0x0200; public static short VCF2_M_SecondResortCategorized = 0x0400; public static short VCF2_M_SecondResortPermute = 0x0800; public static short VCF2_M_SecondResortPermutePair = 0x1000; public static short VCF2_M_ShowValuesAsLinks = 0x2000; public static short VCF2_M_DisplayReadingOrder = 0x4000; public static short VCF2_M_HeaderReadingOrder = (short) (0x8000 & 0xffff); /* The following InfoType codes are defined for REGGetIDInfo */ /* Note that the Certifier Flag can only exist on a hierarchical ID */ /* and that Certifier, NotesExpress, and Desktop flags are not */ /* present in safe copies of ID files */ public short REGIDGetUSAFlag = 1; /* Data structure returned is BOOL */ public short REGIDGetHierarchicalFlag = 2; /* Data structure returned is BOOL */ public short REGIDGetSafeFlag = 3; /* Data structure returned is BOOL */ public short REGIDGetCertifierFlag = 4; /* Data structure returned is BOOL */ public short REGIDGetNotesExpressFlag = 5; /* Data structure returned is BOOL */ public short REGIDGetDesktopFlag = 6; /* Data structure returned is BOOL */ public short REGIDGetName = 7; /* Data structure returned is char xx[MAXUSERNAME] */ public short REGIDGetPublicKey = 8; /* Data structure returned is char xx[xx] */ public short REGIDGetPrivateKey = 9; /* Data structure returned is char xx[xx] */ public short REGIDGetIntlPublicKey = 10; /* Data structure returned is char xx[xx] */ public short REGIDGetIntlPrivateKey = 11; /** CompoundText is derived from a file */ public int COMP_FROM_FILE = 0x00000001; /** Insert a line break (0) for each line delimiter found in the input text buffer. This preserves input line breaks. */ public int COMP_PRESERVE_LINES = 0x00000002; /** Create a new paragraph for each line delimiter found in the input text buffer. */ public int COMP_PARA_LINE = 0x00000004; /** Create a new paragraph for each blank line found in the input text buffer. * A blank line is defined as a line containing just a line delimiter (specified by the * pszLineDelim parameter to CompoundTextAddTextExt). */ public int COMP_PARA_BLANK_LINE = 0x00000008; /** A "hint" follows the comment for a document link. If this flag is set, * the pszComment argument points to the comment string, the terminating NUL ('\0'), * the hint string, and the terminating NUL. */ public int COMP_SERVER_HINT_FOLLOWS = 0x00000010; /** (e.g. Times Roman family) */ public byte FONT_FACE_ROMAN = 0; /** (e.g. Helv family) */ public byte FONT_FACE_SWISS = 1; /** (e.g. Monotype Sans WT) */ public byte FONT_FACE_UNICODE = 2; /** (e.g. Arial */ public byte FONT_FACE_USERINTERFACE = 3; /** (e.g. Courier family) */ public byte FONT_FACE_TYPEWRITER = 4; /** Use this style ID in CompoundTextAddText to continue using the same paragraph style as the previous paragraph. */ public int STYLE_ID_SAMEASPREV = 0xFFFFFFFF; /* Standard colors -- so useful they're available by name. */ public byte MAX_NOTES_SOLIDCOLORS = 16; public byte NOTES_COLOR_BLACK = 0; public byte NOTES_COLOR_WHITE = 1; public byte NOTES_COLOR_RED = 2; public byte NOTES_COLOR_GREEN = 3; public byte NOTES_COLOR_BLUE = 4; public byte NOTES_COLOR_MAGENTA = 5; public byte NOTES_COLOR_YELLOW = 6; public byte NOTES_COLOR_CYAN = 7; public byte NOTES_COLOR_DKRED = 8; public byte NOTES_COLOR_DKGREEN = 9; public byte NOTES_COLOR_DKBLUE = 10; public byte NOTES_COLOR_DKMAGENTA = 11; public byte NOTES_COLOR_DKYELLOW = 12; public byte NOTES_COLOR_DKCYAN = 13; public byte NOTES_COLOR_GRAY = 14; public byte NOTES_COLOR_LTGRAY = 15; public byte ISBOLD = 0x01; public byte ISITALIC = 0x02; public byte ISUNDERLINE = 0x04; public byte ISSTRIKEOUT = 0x08; public byte ISSUPER = 0x10; public byte ISSUB = 0x20; public byte ISEFFECT = (byte) (0x80 & 0xff); /* Used for implementation of special effect styles */ public byte ISSHADOW = (byte) (0x80 & 0xff); /* Used for implementation of special effect styles */ public byte ISEMBOSS = (byte) (0x90 & 0xff); /* Used for implementation of special effect styles */ public byte ISEXTRUDE = (byte) (0xa0 & 0xff); /* Used for implementation of special effect styles */ /* Paragraph justification type codes */ /** flush left, ragged right */ public short JUSTIFY_LEFT = 0; /** flush right, ragged left */ public short JUSTIFY_RIGHT = 1; /** full block justification */ public short JUSTIFY_BLOCK = 2; /** centered */ public short JUSTIFY_CENTER = 3; /** no line wrapping AT ALL (except hard CRs) */ public short JUSTIFY_NONE = 4; /* One Inch */ public int ONEINCH = (20*72); /* One inch worth of TWIPS */ /* Paragraph Flags */ /** start new page with this par */ public short PABFLAG_PAGINATE_BEFORE = 0x0001; /** don't separate this and next par */ public short PABFLAG_KEEP_WITH_NEXT = 0x0002; /** don't split lines in paragraph */ public short PABFLAG_KEEP_TOGETHER = 0x0004; /** propagate even PAGINATE_BEFORE and KEEP_WITH_NEXT */ public short PABFLAG_PROPAGATE = 0x0008; /** hide paragraph in R/O mode */ public short PABFLAG_HIDE_RO = 0x0010; /** hide paragraph in R/W mode */ public short PABFLAG_HIDE_RW = 0x0020; /** hide paragraph when printing */ public short PABFLAG_HIDE_PR = 0x0040; /** in V4 and below, set if PAB.RightMargin (when nonzero) is to have meaning. Turns out, is set iff para is in a table. Anyway, V5+ no longer use this bit but it matters to V4 and below. V5+ runs with this bit zeroed throughout runtime but, for backward compatibility, outputs it to disk at Save() time per whether paragraph is in a table. */ public short PABFLAG_DISPLAY_RM = 0x0080; /* the pab was saved in V4. */ /** set this bit or the Notes client will assume the pab was saved pre-V4 and will thus "link" these bit definitions (assign the right one to the left one) since preview did not exist pre-V4: PABFLAG_HIDE_PV = PABFLAG_HIDE_RO PABFLAG_HIDE_PVE = PABFLAG_HIDE_RW */ public short PABFLAG_HIDE_UNLINK = 0x0100; /** hide paragraph when copying/forwarding */ public short PABFLAG_HIDE_CO = 0x0200; /** display paragraph with bullet */ public short PABFLAG_BULLET = 0x0400; /** use the hide when formula even if there is one. */ public short PABFLAG_HIDE_IF = 0x0800; /** display paragraph with number */ public short PABFLAG_NUMBEREDLIST = 0x1000; /** hide paragraph when previewing*/ public short PABFLAG_HIDE_PV = 0x2000; /** hide paragraph when editing in the preview pane. */ public short PABFLAG_HIDE_PVE = 0x4000; /** hide paragraph from Notes clients */ public short PABFLAG_HIDE_NOTES = (short) (0x8000 & 0xffff); public short PABFLAG_HIDEBITS = (short) ((PABFLAG_HIDE_RO | PABFLAG_HIDE_RW | PABFLAG_HIDE_CO | PABFLAG_HIDE_PR | PABFLAG_HIDE_PV | PABFLAG_HIDE_PVE | PABFLAG_HIDE_IF | PABFLAG_HIDE_NOTES) & 0xffff); public short TABLE_PABFLAGS = (short) (( PABFLAG_KEEP_TOGETHER | PABFLAG_KEEP_WITH_NEXT) & 0xffff); public short LONGRECORDLENGTH = 0x0000; public short WORDRECORDLENGTH = (short) (0xff00 & 0xffff); public short BYTERECORDLENGTH = 0; /* High byte contains record length */ /* Signatures for Composite Records in items of data type COMPOSITE */ public short SIG_CD_PDEF_MAIN = (83 | WORDRECORDLENGTH ) /* Signatures for items used in Property Broker definitions. LI 3925.04 */; public short SIG_CD_PDEF_TYPE = (84 | WORDRECORDLENGTH ); public short SIG_CD_PDEF_PROPERTY = (85 | WORDRECORDLENGTH ); public short SIG_CD_PDEF_ACTION = (86 | WORDRECORDLENGTH ); public short SIG_CD_TABLECELL_DATAFLAGS = (87 | BYTERECORDLENGTH); public short SIG_CD_EMBEDDEDCONTACTLIST = (88 | WORDRECORDLENGTH); public short SIG_CD_IGNORE = (89 | BYTERECORDLENGTH); public short SIG_CD_TABLECELL_HREF2 = (90 | WORDRECORDLENGTH); public short SIG_CD_HREFBORDER = (91 | WORDRECORDLENGTH); public short SIG_CD_TABLEDATAEXTENSION = (92 | WORDRECORDLENGTH); public short SIG_CD_EMBEDDEDCALCTL = (93 | WORDRECORDLENGTH); public short SIG_CD_ACTIONEXT = (94 | WORDRECORDLENGTH); public short SIG_CD_EVENT_LANGUAGE_ENTRY = (95 | WORDRECORDLENGTH); public short SIG_CD_FILESEGMENT = (96 | LONGRECORDLENGTH); public short SIG_CD_FILEHEADER = (97 | LONGRECORDLENGTH); public short SIG_CD_DATAFLAGS = (98 | BYTERECORDLENGTH); public short SIG_CD_BACKGROUNDPROPERTIES = (99 | BYTERECORDLENGTH); public short SIG_CD_EMBEDEXTRA_INFO = (100 | WORDRECORDLENGTH); public short SIG_CD_CLIENT_BLOBPART = (101 | WORDRECORDLENGTH); public short SIG_CD_CLIENT_EVENT = (102 | WORDRECORDLENGTH); public short SIG_CD_BORDERINFO_HS = (103 | WORDRECORDLENGTH); public short SIG_CD_LARGE_PARAGRAPH = (104 | WORDRECORDLENGTH); public short SIG_CD_EXT_EMBEDDEDSCHED = (105 | WORDRECORDLENGTH); public short SIG_CD_BOXSIZE = (106 | BYTERECORDLENGTH); public short SIG_CD_POSITIONING = (107 | BYTERECORDLENGTH); public short SIG_CD_LAYER = (108 | BYTERECORDLENGTH); public short SIG_CD_DECSFIELD = (109 | WORDRECORDLENGTH); public short SIG_CD_SPAN_END = (110 | BYTERECORDLENGTH) /* Span End */; public short SIG_CD_SPAN_BEGIN = (111 | BYTERECORDLENGTH) /* Span Begin */; public short SIG_CD_TEXTPROPERTIESTABLE = (112 | WORDRECORDLENGTH) /* Text Properties Table */; public short SIG_CD_HREF2 = (113 | WORDRECORDLENGTH); public short SIG_CD_BACKGROUNDCOLOR = (114 | BYTERECORDLENGTH); public short SIG_CD_INLINE = (115 | WORDRECORDLENGTH); public short SIG_CD_V6HOTSPOTBEGIN_CONTINUATION = (116 | WORDRECORDLENGTH); public short SIG_CD_TARGET_DBLCLK = (117 | WORDRECORDLENGTH); public short SIG_CD_CAPTION = (118 | WORDRECORDLENGTH); public short SIG_CD_LINKCOLORS = (119 | WORDRECORDLENGTH); public short SIG_CD_TABLECELL_HREF = (120 | WORDRECORDLENGTH); public short SIG_CD_ACTIONBAREXT = (121 | WORDRECORDLENGTH); public short SIG_CD_IDNAME = (122 | WORDRECORDLENGTH); public short SIG_CD_TABLECELL_IDNAME = (123 | WORDRECORDLENGTH); public short SIG_CD_IMAGESEGMENT = (124 | LONGRECORDLENGTH); public short SIG_CD_IMAGEHEADER = (125 | LONGRECORDLENGTH); public short SIG_CD_V5HOTSPOTBEGIN = (126 | WORDRECORDLENGTH); public short SIG_CD_V5HOTSPOTEND = (127 | BYTERECORDLENGTH); public short SIG_CD_TEXTPROPERTY = (128 | WORDRECORDLENGTH); public short SIG_CD_PARAGRAPH = (129 | BYTERECORDLENGTH); public short SIG_CD_PABDEFINITION = (130 | WORDRECORDLENGTH); public short SIG_CD_PABREFERENCE = (131 | BYTERECORDLENGTH); public short SIG_CD_TEXT = (133 | WORDRECORDLENGTH); public short SIG_CD_HEADER = (142 | WORDRECORDLENGTH); public short SIG_CD_LINKEXPORT2 = (146 | WORDRECORDLENGTH); public short SIG_CD_BITMAPHEADER = (149 | LONGRECORDLENGTH); public short SIG_CD_BITMAPSEGMENT = (150 | LONGRECORDLENGTH); public short SIG_CD_COLORTABLE = (151 | LONGRECORDLENGTH); public short SIG_CD_GRAPHIC = (153 | LONGRECORDLENGTH); public short SIG_CD_PMMETASEG = (154 | LONGRECORDLENGTH); public short SIG_CD_WINMETASEG = (155 | LONGRECORDLENGTH); public short SIG_CD_MACMETASEG = (156 | LONGRECORDLENGTH); public short SIG_CD_CGMMETA = (157 | LONGRECORDLENGTH); public short SIG_CD_PMMETAHEADER = (158 | LONGRECORDLENGTH); public short SIG_CD_WINMETAHEADER = (159 | LONGRECORDLENGTH); public short SIG_CD_MACMETAHEADER = (160 | LONGRECORDLENGTH); public short SIG_CD_TABLEBEGIN = (163 | BYTERECORDLENGTH); public short SIG_CD_TABLECELL = (164 | BYTERECORDLENGTH); public short SIG_CD_TABLEEND = (165 | BYTERECORDLENGTH); public short SIG_CD_STYLENAME = (166 | BYTERECORDLENGTH); public short SIG_CD_STORAGELINK = (196 | WORDRECORDLENGTH); public short SIG_CD_TRANSPARENTTABLE = (197 | LONGRECORDLENGTH); public short SIG_CD_HORIZONTALRULE = (201 | WORDRECORDLENGTH); public short SIG_CD_ALTTEXT = (202 | WORDRECORDLENGTH); public short SIG_CD_ANCHOR = (203 | WORDRECORDLENGTH); public short SIG_CD_HTMLBEGIN = (204 | WORDRECORDLENGTH); public short SIG_CD_HTMLEND = (205 | WORDRECORDLENGTH); public short SIG_CD_HTMLFORMULA = (206 | WORDRECORDLENGTH); public short SIG_CD_NESTEDTABLEBEGIN = (207 | BYTERECORDLENGTH); public short SIG_CD_NESTEDTABLECELL = (208 | BYTERECORDLENGTH); public short SIG_CD_NESTEDTABLEEND = (209 | BYTERECORDLENGTH); public short SIG_CD_COLOR = (210 | BYTERECORDLENGTH); public short SIG_CD_TABLECELL_COLOR = (211 | BYTERECORDLENGTH); /* 212 thru 219 reserved for BSIG'S - don't use until we hit 255 */ public short SIG_CD_BLOBPART = (220 | WORDRECORDLENGTH); public short SIG_CD_BEGIN = (221 | BYTERECORDLENGTH); public short SIG_CD_END = (222 | BYTERECORDLENGTH); public short SIG_CD_VERTICALALIGN = (223 | BYTERECORDLENGTH); public short SIG_CD_FLOATPOSITION = (224 | BYTERECORDLENGTH); public short SIG_CD_TIMERINFO = (225 | BYTERECORDLENGTH); public short SIG_CD_TABLEROWHEIGHT = (226 | BYTERECORDLENGTH); public short SIG_CD_TABLELABEL = (227 | WORDRECORDLENGTH); public short SIG_CD_BIDI_TEXT = (228 | WORDRECORDLENGTH); public short SIG_CD_BIDI_TEXTEFFECT = (229 | WORDRECORDLENGTH); public short SIG_CD_REGIONBEGIN = (230 | WORDRECORDLENGTH); public short SIG_CD_REGIONEND = (231 | WORDRECORDLENGTH); public short SIG_CD_TRANSITION = (232 | WORDRECORDLENGTH); public short SIG_CD_FIELDHINT = (233 | WORDRECORDLENGTH); public short SIG_CD_PLACEHOLDER = (234 | WORDRECORDLENGTH); public short SIG_CD_EMBEDDEDOUTLINE = (236 | WORDRECORDLENGTH); public short SIG_CD_EMBEDDEDVIEW = (237 | WORDRECORDLENGTH); public short SIG_CD_CELLBACKGROUNDDATA = (238 | WORDRECORDLENGTH); /* Signatures for Frameset CD records */ public short SIG_CD_FRAMESETHEADER = (239 | WORDRECORDLENGTH); public short SIG_CD_FRAMESET = (240 | WORDRECORDLENGTH); public short SIG_CD_FRAME = (241 | WORDRECORDLENGTH); /* Signature for Target Frame info on a link */ public short SIG_CD_TARGET = (242 | WORDRECORDLENGTH); public short SIG_CD_MAPELEMENT = (244 | WORDRECORDLENGTH); public short SIG_CD_AREAELEMENT = (245 | WORDRECORDLENGTH); public short SIG_CD_HREF = (246 | WORDRECORDLENGTH); public short SIG_CD_EMBEDDEDCTL = (247 | WORDRECORDLENGTH); public short SIG_CD_HTML_ALTTEXT = (248 | WORDRECORDLENGTH); public short SIG_CD_EVENT = (249 | WORDRECORDLENGTH); public short SIG_CD_PRETABLEBEGIN = (251 | WORDRECORDLENGTH); public short SIG_CD_BORDERINFO = (252 | WORDRECORDLENGTH); public short SIG_CD_EMBEDDEDSCHEDCTL = (253 | WORDRECORDLENGTH); public short SIG_CD_EXT2_FIELD = (254 | WORDRECORDLENGTH) /* Currency, numeric, and data/time extensions */; public short SIG_CD_EMBEDDEDEDITCTL = (255 | WORDRECORDLENGTH); /* Can not go beyond 255. However, there may be room at the beginning of the list. Check there. */ /* Signatures for Composite Records that are reserved internal records, */ /* whose format may change between releases. */ public short SIG_CD_DOCUMENT_PRE_26 = (128 | BYTERECORDLENGTH); public short SIG_CD_FIELD_PRE_36 = (132 | WORDRECORDLENGTH); public short SIG_CD_FIELD = (138 | WORDRECORDLENGTH); public short SIG_CD_DOCUMENT = (134 | BYTERECORDLENGTH); public short SIG_CD_METAFILE = (135 | WORDRECORDLENGTH); public short SIG_CD_BITMAP = (136 | WORDRECORDLENGTH); public short SIG_CD_FONTTABLE = (139 | WORDRECORDLENGTH); public short SIG_CD_LINK = (140 | BYTERECORDLENGTH); public short SIG_CD_LINKEXPORT = (141 | BYTERECORDLENGTH); public short SIG_CD_KEYWORD = (143 | WORDRECORDLENGTH); public short SIG_CD_LINK2 = (145 | WORDRECORDLENGTH); public short SIG_CD_CGM = (147 | WORDRECORDLENGTH); public short SIG_CD_TIFF = (148 | LONGRECORDLENGTH); public short SIG_CD_PATTERNTABLE = (152 | LONGRECORDLENGTH); public short SIG_CD_DDEBEGIN = (161 | WORDRECORDLENGTH); public short SIG_CD_DDEEND = (162 | WORDRECORDLENGTH); public short SIG_CD_OLEBEGIN = (167 | WORDRECORDLENGTH); public short SIG_CD_OLEEND = (168 | WORDRECORDLENGTH); public short SIG_CD_HOTSPOTBEGIN = (169 | WORDRECORDLENGTH); public short SIG_CD_HOTSPOTEND = (170 | BYTERECORDLENGTH); public short SIG_CD_BUTTON = (171 | WORDRECORDLENGTH); public short SIG_CD_BAR = (172 | WORDRECORDLENGTH); public short SIG_CD_V4HOTSPOTBEGIN = (173 | WORDRECORDLENGTH); public short SIG_CD_V4HOTSPOTEND = (174 | BYTERECORDLENGTH); public short SIG_CD_EXT_FIELD = (176 | WORDRECORDLENGTH); public short SIG_CD_LSOBJECT = (177 | WORDRECORDLENGTH)/* Compiled LS code*/; public short SIG_CD_HTMLHEADER = (178 | WORDRECORDLENGTH) /* Raw HTML */; public short SIG_CD_HTMLSEGMENT = (179 | WORDRECORDLENGTH); public short SIG_CD_LAYOUT = (183 | BYTERECORDLENGTH); public short SIG_CD_LAYOUTTEXT = (184 | BYTERECORDLENGTH); public short SIG_CD_LAYOUTEND = (185 | BYTERECORDLENGTH); public short SIG_CD_LAYOUTFIELD = (186 | BYTERECORDLENGTH); public short SIG_CD_PABHIDE = (187 | WORDRECORDLENGTH); public short SIG_CD_PABFORMREF = (188 | BYTERECORDLENGTH); public short SIG_CD_ACTIONBAR = (189 | BYTERECORDLENGTH); public short SIG_CD_ACTION = (190 | WORDRECORDLENGTH); public short SIG_CD_DOCAUTOLAUNCH = (191 | WORDRECORDLENGTH); public short SIG_CD_LAYOUTGRAPHIC = (192 | BYTERECORDLENGTH); public short SIG_CD_OLEOBJINFO = (193 | WORDRECORDLENGTH); public short SIG_CD_LAYOUTBUTTON = (194 | BYTERECORDLENGTH); public short SIG_CD_TEXTEFFECT = (195 | WORDRECORDLENGTH); public short SIG_ACTION_HEADER = (129 | BYTERECORDLENGTH); public short SIG_ACTION_MODIFYFIELD = (130 | WORDRECORDLENGTH); public short SIG_ACTION_REPLY = (131 | WORDRECORDLENGTH); public short SIG_ACTION_FORMULA = (132 | WORDRECORDLENGTH); public short SIG_ACTION_LOTUSSCRIPT = (133 | WORDRECORDLENGTH); public short SIG_ACTION_SENDMAIL = (134 | WORDRECORDLENGTH); public short SIG_ACTION_DBCOPY = (135 | WORDRECORDLENGTH); public short SIG_ACTION_DELETE = (136 | BYTERECORDLENGTH); public short SIG_ACTION_BYFORM = (137 | WORDRECORDLENGTH); public short SIG_ACTION_MARKREAD = (138 | BYTERECORDLENGTH); public short SIG_ACTION_MARKUNREAD = (139 | BYTERECORDLENGTH); public short SIG_ACTION_MOVETOFOLDER = (140 | WORDRECORDLENGTH); public short SIG_ACTION_COPYTOFOLDER = (141 | WORDRECORDLENGTH); public short SIG_ACTION_REMOVEFROMFOLDER = (142 | WORDRECORDLENGTH); public short SIG_ACTION_NEWSLETTER = (143 | WORDRECORDLENGTH); public short SIG_ACTION_RUNAGENT = (144 | WORDRECORDLENGTH); public short SIG_ACTION_SENDDOCUMENT = (145 | BYTERECORDLENGTH); public short SIG_ACTION_FORMULAONLY = (146 | WORDRECORDLENGTH); public short SIG_ACTION_JAVAAGENT = (147 | WORDRECORDLENGTH); public short SIG_ACTION_JAVA = (148 | WORDRECORDLENGTH); /* Signatures for items of type TYPE_VIEWMAP_DATASET */ public short SIG_VIEWMAP_DATASET = (87 | WORDRECORDLENGTH); /* Signatures for items of type TYPE_VIEWMAP */ public short SIG_CD_VMHEADER = (175 | BYTERECORDLENGTH); public short SIG_CD_VMBITMAP = (176 | BYTERECORDLENGTH); public short SIG_CD_VMRECT = (177 | BYTERECORDLENGTH); public short SIG_CD_VMPOLYGON_BYTE = (178 | BYTERECORDLENGTH); public short SIG_CD_VMPOLYLINE_BYTE = (179 | BYTERECORDLENGTH); public short SIG_CD_VMREGION = (180 | BYTERECORDLENGTH); public short SIG_CD_VMACTION = (181 | BYTERECORDLENGTH); public short SIG_CD_VMELLIPSE = (182 | BYTERECORDLENGTH); public short SIG_CD_VMRNDRECT = (184 | BYTERECORDLENGTH); public short SIG_CD_VMBUTTON = (185 | BYTERECORDLENGTH); public short SIG_CD_VMACTION_2 = (186 | WORDRECORDLENGTH); public short SIG_CD_VMTEXTBOX = (187 | WORDRECORDLENGTH); public short SIG_CD_VMPOLYGON = (188 | WORDRECORDLENGTH); public short SIG_CD_VMPOLYLINE = (189 | WORDRECORDLENGTH); public short SIG_CD_VMPOLYRGN = (190 | WORDRECORDLENGTH); public short SIG_CD_VMCIRCLE = (191 | BYTERECORDLENGTH); public short SIG_CD_VMPOLYRGN_BYTE = (192 | BYTERECORDLENGTH); /* Signatures for alternate CD sequences*/ public short SIG_CD_ALTERNATEBEGIN = (198 | WORDRECORDLENGTH); public short SIG_CD_ALTERNATEEND = (199 | BYTERECORDLENGTH); public short SIG_CD_OLERTMARKER = (200 | WORDRECORDLENGTH); public short CDIMAGETYPE_GIF = 1; public short CDIMAGETYPE_JPEG = 2; public short CDIMAGETYPE_BMP = 3; /* Version control of graphic header */ public byte CDGRAPHIC_VERSION1 = 0; public byte CDGRAPHIC_VERSION2 = 1; public byte CDGRAPHIC_VERSION3 = 2; /* The following flag indicates that the DestSize field contains pixel values instead of twips. */ public byte CDGRAPHIC_FLAG_DESTSIZE_IS_PIXELS = 0x01; public byte CDGRAPHIC_FLAG_SPANSLINES = 0x02; /* HOTSPOT_RUN Types */ public short HOTSPOTREC_TYPE_POPUP = 1; public short HOTSPOTREC_TYPE_HOTREGION = 2; public short HOTSPOTREC_TYPE_BUTTON = 3; public short HOTSPOTREC_TYPE_FILE = 4; public short HOTSPOTREC_TYPE_SECTION = 7; public short HOTSPOTREC_TYPE_ANY = 8; public short HOTSPOTREC_TYPE_HOTLINK = 11; public short HOTSPOTREC_TYPE_BUNDLE = 12; public short HOTSPOTREC_TYPE_V4_SECTION = 13; public short HOTSPOTREC_TYPE_SUBFORM = 14; public short HOTSPOTREC_TYPE_ACTIVEOBJECT = 15; public short HOTSPOTREC_TYPE_OLERICHTEXT = 18; public short HOTSPOTREC_TYPE_EMBEDDEDVIEW = 19; /* embedded view */ public short HOTSPOTREC_TYPE_EMBEDDEDFPANE = 20; /* embedded folder pane */ public short HOTSPOTREC_TYPE_EMBEDDEDNAV = 21; /* embedded navigator */ public short HOTSPOTREC_TYPE_MOUSEOVER = 22; public short HOTSPOTREC_TYPE_FILEUPLOAD = 24; /* file upload placeholder */ public short HOTSPOTREC_TYPE_EMBEDDEDOUTLINE = 27; /* embedded outline */ public short HOTSPOTREC_TYPE_EMBEDDEDCTL = 28; /* embedded control window */ public short HOTSPOTREC_TYPE_EMBEDDEDCALENDARCTL = 30; /* embedded calendar control (date picker) */ public short HOTSPOTREC_TYPE_EMBEDDEDSCHEDCTL = 31; /* embedded scheduling control */ public short HOTSPOTREC_TYPE_RCLINK = 32; /* Not a new type, but renamed for V5 terms*/ public short HOTSPOTREC_TYPE_EMBEDDEDEDITCTL = 34; /* embedded editor control */ public short HOTSPOTREC_TYPE_CONTACTLISTCTL = 36; /* Embeddeble buddy list */ public int HOTSPOTREC_RUNFLAG_BEGIN = 0x00000001; public int HOTSPOTREC_RUNFLAG_END = 0x00000002; public int HOTSPOTREC_RUNFLAG_BOX = 0x00000004; public int HOTSPOTREC_RUNFLAG_NOBORDER = 0x00000008; public int HOTSPOTREC_RUNFLAG_FORMULA = 0x00000010; /* Popup is a formula, not text. */ public int HOTSPOTREC_RUNFLAG_MOVIE = 0x00000020; /* File is a QuickTime movie. */ public int HOTSPOTREC_RUNFLAG_IGNORE = 0x00000040; /* Run is for backward compatibility (i.e. ignore the run) */ public int HOTSPOTREC_RUNFLAG_ACTION = 0x00000080; /* Hot region executes a canned action */ public int HOTSPOTREC_RUNFLAG_SCRIPT = 0x00000100; /* Hot region executes a script. */ public int HOTSPOTREC_RUNFLAG_INOTES = 0x00001000; public int HOTSPOTREC_RUNFLAG_ISMAP = 0x00002000; public int HOTSPOTREC_RUNFLAG_INOTES_AUTO = 0x00004000; public int HOTSPOTREC_RUNFLAG_ISMAP_INPUT = 0x00008000; public int HOTSPOTREC_RUNFLAG_SIGNED = 0x00010000; public int HOTSPOTREC_RUNFLAG_ANCHOR = 0x00020000; public int HOTSPOTREC_RUNFLAG_COMPUTED = 0x00040000; /* Used in conjunction with computed hotspots. */ public int HOTSPOTREC_RUNFLAG_TEMPLATE = 0x00080000; /* used in conjunction with embedded navigator panes. */ public int HOTSPOTREC_RUNFLAG_HIGHLIGHT = 0x00100000; public int HOTSPOTREC_RUNFLAG_EXTACTION = 0x00200000; /* Hot region executes an extended action */ public int HOTSPOTREC_RUNFLAG_NAMEDELEM = 0x00400000; /* Hot link to a named element */ /* Allow R6 dual action type buttons, e.g. client LotusScript, web JS */ public int HOTSPOTREC_RUNFLAG_WEBJAVASCRIPT = 0x00800000; public int HOTSPOTREC_RUNFLAG_ODSMASK = 0x00FFFFFC; /* Mask for bits stored on disk*/ /* CDCAPTION - Text to display with an object (e.g., a graphic) */ public byte CAPTION_POSITION_BELOW_CENTER = 0; /* Centered below object */ public byte CAPTION_POSITION_MIDDLE_CENTER = 1; /* Centered on object */ /** Force operation, even if destination "up to date" */ public int DESIGN_FORCE = 0x00000001; /** Return an error if the template is not found */ public int DESIGN_ERR_TMPL_NOT_FOUND = 0x00000008 ; /* NSF File Information Buffer size. This buffer is defined to contain Text (host format) that is NULL-TERMINATED. This is the ONLY null-terminated field in all of NSF. */ public int NSF_INFO_SIZE = 128; /* Define argument to NSFDbInfoParse/Modify to manipulate components from DbInfo */ /** database title */ public short INFOPARSE_TITLE = 0; /** database categories */ public short INFOPARSE_CATEGORIES = 1; /** template name (for a design template database) */ public short INFOPARSE_CLASS = 2; /** inherited template name (for a database that inherited its design from a design template) */ public short INFOPARSE_DESIGN_CLASS = 3; /* Define NSF DB open modes */ /** hDB refers to a normal database file */ public short DB_LOADED = 1; /** hDB refers to a "directory" and not a file */ public short DB_DIRECTORY = 2; // Flags that control behavior of the calendar APIs - Used when APIS take iCalendar input to modify calendar data public int CAL_WRITE_COMPLETE_REPLACE = 0x00000001; // Used when APIs modify entry data via CalUpdateEntry. // This flag means that NO data is preserved from the original entry and the // resulting entry is 100% a product of the iCalendar passed in. // NOTE: When this flag is NOT used, some content may be preserved during an // update if that particular content was not included in the iCalendar input. // This includes: // Body // Attachments // Custom data properties as specified in $CSCopyItems public int CAL_WRITE_DISABLE_IMPLICIT_SCHEDULING = 0x00000002; // Used when APIs create or modify calendar entries where the organizer is the mailfile owner. // When a calendar entry is modified with CAL_WRITE_DISABLE_IMPLICIT_SCHEDULING set, no notices are // sent (invites, updates, reschedules, cancels, etc) // Note: This is not intended for cases where you are saving a meeting as a draft (since there is currently // not a capability to then send it later. It will also not allow some notices to go out but other notices // not to go out (such as, send invites to added invitees but dont send updates to existing invitees). // Rather, this is targeted at callers that prefer to be responsible for sending out notices themselves through // a separate mechanism public int CAL_WRITE_IGNORE_VERIFY_DB = 0x00000004; // Used when APIs create or modify entries on the calendar // This will allow creation/modification of calendar entries, even if the database is not a mailfile public int CAL_WRITE_USE_ALARM_DEFAULTS = 0x00000008; // this flag will disregard VALARM information in the iCalendar and use the user's default alarm settings for // created or updated entries. // Flags that control behavior of the calendar APIs - Used when opening a note handle for calendar data public int CAL_NOTEOPEN_HANDLE_NOSPLIT = 0x00000001; // Used when getting a handle via CalOpenNoteHandle (Handy for read-only cases) // When a specific instance of a recurring entry is requested, the underlying note may represent multiple // instances. Default behavior makes appropriate modifications so that the returned handle represents // a single instance (but this might cause notes to be created or modified as a side effect). // Using CAL_NOTEOPEN_HANDLE_NOSPLIT will bypass any note creations or modifications and return a note handle // that may represent more than a single instance on the calendar. // Flags that control behavior of the calendar APIs that return iCalendar data for an entry or notice public int CAL_READ_HIDE_X_LOTUS = 0x00000001; // Used when APIs generate iCalendar // By default, some X-LOTUS properties and parameters will be included in iCalendar data // returned by these APIs. CAL_READ_HIDE_X_LOTUS causes all X-LOTUS properties and // parameters to be removed from the generated iCalendar data. // Note: This overrides CAL_READ_INCLUDE_X_LOTUS public int CAL_READ_INCLUDE_X_LOTUS = 0x00000002; // Used when APIs generate iCalendar // Include all Lotus specific properties like X-LOTUS-UPDATE-SEQ, X-LOTUS-UPDATE_WISL, etc // in the generated iCalendar data. // These properties are NOT included by default in any iCalendar data returned by the APIs. // Caution: Unless the caller knows how to use these it can be dangerous since their presence will // be honored and can cause issues if not updated properly. // Ignored if CAL_READ_HIDE_X_LOTUS is also specified. public int CAL_READ_SKIP_RESPONSE_DATA = 0x00000004; // RESERVED: This functionality is not currently in plan // When generating ATTENDEE info in CalReadEntry, determine and populate response // Status (which might be a performance hit) public int READ_RANGE_MASK_DTSTART = 0x00000001; public int READ_RANGE_MASK_DTEND = 0x00000002; public int READ_RANGE_MASK_DTSTAMP = 0x00000004; public int READ_RANGE_MASK_SUMMARY = 0x00000008; public int READ_RANGE_MASK_CLASS = 0x00000010; public int READ_RANGE_MASK_PRIORITY = 0x00000020; public int READ_RANGE_MASK_RECURRENCE_ID = 0x00000040; public int READ_RANGE_MASK_SEQUENCE = 0x00000080; public int READ_RANGE_MASK_LOCATION = 0x00000100; public int READ_RANGE_MASK_TRANSP = 0x00000200; public int READ_RANGE_MASK_CATEGORY = 0x00000400; public int READ_RANGE_MASK_APPTTYPE = 0x00000800; public int READ_RANGE_MASK_NOTICETYPE = 0x00001000; public int READ_RANGE_MASK_STATUS = 0x00002000; public int READ_RANGE_MASK_ONLINE_URL = 0x00004000; // Includes online meeting URL as well as any online meeting password or conf ID public int READ_RANGE_MASK_NOTESORGANIZER = 0x00008000; // Note: For performance reasons, the organizer may not be stored in ORGANIZER but rather in // X-LOTUS-ORGANIZER to avoid lookups necessary to get the internet address. public int READ_RANGE_MASK_NOTESROOM = 0x00010000; // Note: For performance reasons, the organizer may not be stored in PARTICIPANT but rather in // X-LOTUS-ROOM to avoid lookups necessary to get the internet address. public int READ_RANGE_MASK_ALARM = 0x00020000; // Output alarm information for this entry /* Non-default values - only harvested if requested in dwReturnMaskExt by CalReadRange.*/ public int READ_RANGE_MASK2_HASATTACH = 0x00000001; // X-LOTUS-HASATTACH is set to 1 if there are any file attachments for this entry public int READ_RANGE_MASK2_UNID = 0x00000002; // X-LOTUS-UNID will always be set for notices (as it is used as the identifier for // a notice), but setting this flag will also set X-LOTUS-UNID for calendar entries, // where this will be set with the UNID of the note that currently contains this // instance (can be used to construct a URL to open the instance in Notes, for instance) public int CAL_PROCESS_ACCEPT = 0x00000002; /* Accept (regardless of conflicts) * For Information update notices or confirm notices, this will apply the changes to the relavent * calendar entry. * Used by the organizer to accept a counter proposal. */ public int CAL_PROCESS_TENTATIVE = 0x00000004; /* Tentatively accept (regardless of conflicts) */ public int CAL_PROCESS_DECLINE = 0x00000008; /* Decline * Can be used by the organizer to decline a counter if done from a counter notice */ public int CAL_PROCESS_DELEGATE = 0x00000010; /* Delegate to EXT_CALACTION_DATA::pszDelegateTo */ public int CAL_PROCESS_COUNTER = 0x00000020; /* Counter to a new time (requires populating EXT_CALACTION_DATA::ptdChangeTo values) */ public int CAL_PROCESS_REQUESTINFO = 0x00000040; /* Request updated information from the organizer for this meeting. * Also used by the organizer to respond to a request for updated info. */ public int CAL_PROCESS_REMOVECANCEL = 0x00000080; /* This will process a cancelation notice, removing the meeting from the calendar */ public int CAL_PROCESS_DELETE = 0x00000100; /* This will physically delete a meeting from the calendar. This will NOT send notices out */ public int CAL_PROCESS_SMARTREMOVE = 0x00000200; /* This will remove the meeting or appointment from the calendar and send notices if * necessary. * It is treated as a CAL_PROCESS_CANCEL if the entry is a meeting the mailfile * owner is the organizer of. * It is treated as a CAL_PROCESS_DECLINE if the entry is a meeting that the mailfile * owner is not the organizer of except when the entry is a broadcast. In that case it * is treated as a CAL_PROCESS_DELETE. * It is treated as a CAL_PROCESS_DELETE if the entry is a non-meeting */ public int CAL_PROCESS_CANCEL = 0x00000400; /* This will cancel a meeting that the mailfile owner is the organizer of */ public int CAL_PROCESS_UPDATEINVITEES = 0x00002000; /* This will update the invitee lists on the specified entry (or entries) to include or remove * those users specified in lists contained in the EXT_CALACTION_DATA::pAddNames and * EXT_CALACTION_DATA::pRemoveNames values */ public int CAL_ACTION_DO_OVERWRITE_CHECK = 0x00000001; /* Indicates that a check should be performed when processing the action to determine * if an overwrite of invitee changes to the entry will occur. */ public int CAL_ACTION_UPDATE_ALL_PARTICIPANTS = 0x00000002; /* New in 9.01 release. Used to indicate that current entry participants should be notified of changes * to the participant list in addition to those being added or removed. */ /* Range values for actions on recurring entries */ public int RANGE_REPEAT_CURRENT = 0; /* Modifying just this instance */ public int RANGE_REPEAT_ALL = 1; /* Modifying all instances */ public int RANGE_REPEAT_PREV = 2; /* Modifying current + previous */ public int RANGE_REPEAT_FUT = 3; /* Modifying current + future */ public short MIME_PART_VERSION = 2; /** Mime part has boundary. */ public int MIME_PART_HAS_BOUNDARY = 0x00000001; /** Mime part has headers. */ public int MIME_PART_HAS_HEADERS = 0x00000002; /** Mime part has body in database object. */ public int MIME_PART_BODY_IN_DBOBJECT = 0x00000004; /** Mime part has shared database object. Used only with MIME_PART_BODY_IN_DBOBJECT. */ public int MIME_PART_SHARED_DBOBJECT = 0x00000008; /* Used only with MIME_PART_BODY_IN_DBOBJECT. */ /** Skip for conversion. */ public int MIME_PART_SKIP_FOR_CONVERSION = 0x00000010; /* only used during MIME->CD conversion */ //The mime part type cPartType within the MIME_PART structure. /** Mime part type is a prolog. */ public byte MIME_PART_PROLOG = 1; /** Mime part type is a body. */ public byte MIME_PART_BODY = 2; /** Mime part type is a epilog. */ public byte MIME_PART_EPILOG = 3; /** Mime part type is retrieve information. */ public byte MIME_PART_RETRIEVE_INFO = 4; /** Mime part type is a message. */ public byte MIME_PART_MESSAGE = 5; public int OOOPROF_MAX_BODY_SIZE = 32767; // Buffers passed into OOOGetGeneralSubject should be this size /* Item values to pass into OSGetExtIntlFormat(..) */ public byte EXT_AM_STRING = 1; /* Request for AM String */ public byte EXT_PM_STRING = 2; /* Request for PM String */ public byte EXT_CURRENCY_STRING = 3; /* Request for Currency String */ public byte MONTH_NAMES = 4; /* Request for Month Names */ public byte ABBR_MONTH_NAMES = 5; /* Request for abbreviated month names */ public byte WEEKDAY_NAMES = 6; /* Request for weekday names */ public byte ABBR_WEEKDAY_NAMES = 7; /* Request for abbreviated weekday names */ public byte CALENDARTYPE = 8; /* Request for Calendar Type, see CALENDAR_XXX types below */ public byte ERANAME = 9; /* Request for Asian Native Calendar Name */ public byte ABBRERANAME = 10; /* Request for abbreviated Asian Native Calendar Name*/ /* CalendarType */ public byte CALENDAR_NONE = 0; public byte CALENDAR_JAPAN = 1; public byte CALENDAR_TAIWAN = 2; public byte CALENDAR_THAI = 3; public byte CALENDAR_KOREA = 4; public static int MAX_ITEMDEF_SEGMENTS = 25; /** Open the Stream for Read */ public int MIME_STREAM_OPEN_READ = 0x00000001; /** Open the Stream for Write */ public int MIME_STREAM_OPEN_WRITE = 0x00000002; /** Include MIME Headers */ public int MIME_STREAM_MIME_INCLUDE_HEADERS = 0x00000010; /** Include RFC822 Headers */ public int MIME_STREAM_RFC2822_INCLUDE_HEADERS = 0x00000020; /** Include RFC822, MIME Headers */ public int MIME_STREAM_INCLUDE_HEADERS = (MIME_STREAM_MIME_INCLUDE_HEADERS|MIME_STREAM_RFC2822_INCLUDE_HEADERS); public int MIME_STREAM_SUCCESS = 0; public int MIME_STREAM_EOS = 1; public int MIME_STREAM_IO = 2; /* Define the MIME stream itemize options. */ public int MIME_STREAM_NO_DELETE_ATTACHMENTS = 0x00000001; public int MIME_STREAM_ITEMIZE_HEADERS = 0x00000002; public int MIME_STREAM_ITEMIZE_BODY = 0x00000004; public int MIME_STREAM_ITEMIZE_FULL = (MIME_STREAM_ITEMIZE_HEADERS|MIME_STREAM_ITEMIZE_BODY); }
package br.ufg.inf.saep.dao; import br.ufg.inf.es.saep.sandbox.dominio.*; import br.ufg.inf.saep.config.DBConfig; import br.ufg.inf.saep.db.DBConnection; import br.ufg.inf.saep.tools.MongoDocumentSerializer; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.bson.Document; import org.bson.types.BasicBSONList; import java.util.ArrayList; public class ParecerDAO implements ParecerRepository { private static ParecerDAO instance = new ParecerDAO(); private MongoDatabase db = DBConnection.getConnection().getDatabase(); private MongoCollection<Document> radocCollection = db.getCollection(DBConfig.RADOC_COLLECTION); private MongoCollection<Document> parecerCollection = db.getCollection(DBConfig.PARECER_COLLECTION); private MongoDocumentSerializer mds = new MongoDocumentSerializer(); public static ParecerDAO getInstance() { return instance; } private ParecerDAO() { } public void adicionaNota(String id, Nota nota) { Document query = new Document("id", id); Document notaDocument = mds.toDocument(nota, "Nota"); Document avaliavelJSON = mds.toDocument(nota.getItemOriginal(), "Avaliavel"); Document parecerDocument = parecerCollection.find(query).first(); if (parecerDocument == null){ throw new IdentificadorDesconhecido(id); } BasicBSONList newNotas = new BasicBSONList(); ArrayList<Document> notasDocument = (ArrayList<Document>) parecerDocument.get("notas"); boolean inserted = false; for (Document notaDoc : notasDocument){ Nota notaParecer = mds.fromDocument(notaDoc, Nota.class); Document originalJSON = mds.toDocument(notaParecer.getItemOriginal(), "Avaliavel"); if (!originalJSON.toJson().equals(avaliavelJSON.toJson())){ newNotas.add(mds.toDocument(notaParecer, "Nota")); } else{ newNotas.add(notaDocument); inserted = true; } } if (!inserted){ newNotas.add(notaDocument); } Document newNotasDocument = new Document("notas", newNotas); parecerCollection.updateOne(query, new Document("$set", newNotasDocument)); } public void removeNota(String id, Avaliavel original) { Document query = new Document("id", id); Document avaliavelJSON = mds.toDocument(original, "Avaliavel"); BasicBSONList newNotas = new BasicBSONList(); Document parecerDocument = parecerCollection.find(query).first(); ArrayList<Document> notasDocument = (ArrayList<Document>) parecerDocument.get("notas"); for (Document notaDoc : notasDocument){ Nota nota = mds.fromDocument(notaDoc, Nota.class); Document originalJSON = mds.toDocument(nota.getItemOriginal(), "Avaliavel"); if (!originalJSON.toJson().equals(avaliavelJSON.toJson())){ newNotas.add(mds.toDocument(nota, "Nota")); } } Document newNotasDocument = new Document("notas", newNotas); parecerCollection.updateOne(query, new Document("$set", newNotasDocument)); } public void persisteParecer(Parecer parecer) { long findings = parecerCollection.count(new Document("id", parecer.getId())); if (findings > 0){ throw new IdentificadorExistente("id"); } parecerCollection.insertOne(mds.toDocument(parecer, "Parecer")); } public void atualizaFundamentacao(String parecer, String fundamentacao) { Document query = new Document("id", parecer); Document update = new Document("$set", new Document("fundamentacao", fundamentacao)); Document originalDoc = parecerCollection.findOneAndUpdate(query, update); if (originalDoc == null){ throw new IdentificadorDesconhecido(parecer); } } public Parecer byId(String id) { Document query = new Document("id", id); Document parecerDocument = parecerCollection.find(query).first(); if (parecerDocument == null) return null; return mds.fromDocument(parecerDocument, Parecer.class); } public void removeParecer(String id) { parecerCollection.deleteOne(new Document("id", id)); } public Radoc radocById(String identificador) { Document query = new Document("id", identificador); Document resolucaoDocument = radocCollection.find(query).first(); if (resolucaoDocument == null) return null; return mds.fromDocument(resolucaoDocument, Radoc.class); } public String persisteRadoc(Radoc radoc) throws IdentificadorExistente{ Document query = new Document("id", radoc.getId()); Document resolucaoDocument = radocCollection.find(query).first(); if (resolucaoDocument != null){ throw new IdentificadorExistente("id"); } radocCollection.insertOne(mds.toDocument(radoc, "Radoc")); return radoc.getId(); } public void removeRadoc(String identificador) throws RuntimeException { Document query = new Document("radocs", identificador); long findings = parecerCollection.count(query); if (findings > 0){ throw new ExisteParecerReferenciandoRadoc(identificador); } radocCollection.deleteOne(new Document("id", identificador)); } }
package cc.redpen.intellij; import cc.redpen.RedPen; import org.junit.Test; import static org.junit.Assert.assertEquals; public class RedPenProviderTest { private RedPenProvider provider = RedPenProvider.getInstance(); @Test public void allConfigFilesAreLoaded() throws Exception { assertEquals("en", provider.getConfig("en").getKey()); assertEquals("ja", provider.getConfig("ja").getKey()); assertEquals("ja.hankaku", provider.getConfig("ja.hankaku").getKey()); assertEquals("ja.zenkaku2", provider.getConfig("ja.zenkaku2").getKey()); } @Test public void getRedPenFor_autodetectsLanguage() throws Exception { provider.setAutodetect(true); RedPen redPen = provider.getRedPenFor("Hello"); assertEquals("en", redPen.getConfiguration().getKey()); redPen = provider.getRedPenFor(""); assertEquals("ja", redPen.getConfiguration().getKey()); } @Test public void languageAutodetectionCanBeDisabled() throws Exception { provider.setAutodetect(false); RedPen redPen = provider.getRedPenFor(""); assertEquals("en", redPen.getConfiguration().getKey()); } }
package checkdep.parse; import java.util.Arrays; import java.util.Collection; import java.util.stream.Collectors; import checkdep.util.CollectionBase; public class ExcludePackages extends CollectionBase<ExcludePackage> { public static ExcludePackages of(String... directory) { return ExcludePackages.of(Arrays.asList(directory)); } public static ExcludePackages of(Collection<String> strings) { return new ExcludePackages(strings.stream() .map(item -> new ExcludePackage(item)) .collect(Collectors.toSet())); } private ExcludePackages(Collection<ExcludePackage> directories) { super(directories); } }
package co.ootf.garmin; import javastrava.api.v3.auth.AuthorisationService; import javastrava.api.v3.auth.impl.retrofit.AuthorisationServiceImpl; import javastrava.api.v3.auth.model.Token; import javastrava.api.v3.model.StravaUploadResponse; import javastrava.api.v3.service.Strava; import javastrava.api.v3.service.exception.BadRequestException; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.File; public class GarminUploadApp { public static final boolean PRIVATE_DEBUG = true; public static final String DATA_TYPE = "fit"; private static final Log LOG = LogFactory.getLog(GarminUploadApp.class); private static PropertiesService propertiesService = new PropertiesService(); public static void main(String[] args) { File activitiesFolder = null; String deviceName = null; if (args.length > 0) { String path = args[0]; if (StringUtils.isEmpty(path)) { LOG.error("No path specified"); System.exit(-1); } File devicePathFile = new File(path); deviceName = devicePathFile.getName(); if (!path.endsWith("/")) { path = path + "/"; } activitiesFolder = new File(path + propertiesService.getProperties().getProperty(PropertiesService.DEVICE_ACTIVITIES + deviceName)); } LOG.info(deviceName); Token token = null; int clientId = Integer.parseInt(propertiesService.getProperties().getProperty(PropertiesService.API_CLIENTID)); String secret = propertiesService.getProperties().getProperty(PropertiesService.API_SECRET); String code = propertiesService.getProperties().getProperty(PropertiesService.API_CODE); try { AuthorisationService service = new AuthorisationServiceImpl(); token = service.tokenExchange(clientId, secret, code); } catch (BadRequestException e) { LOG.error("Please register a Strava Application at \"https: System.exit(-1); } Strava strava = new Strava(token); int fileCount = 0; if (activitiesFolder != null || activitiesFolder.listFiles().length < 1) { for (File file : activitiesFolder.listFiles()) { LOG.info("Attempting to upload file #" + fileCount++ + " for device " + deviceName); final StravaUploadResponse uploadResponse = strava.upload(null, null, null, PRIVATE_DEBUG, false, DATA_TYPE, null, file); LOG.debug("*** UPLOAD RESPONSE: " + uploadResponse.getStatus()); LOG.debug("*** UPLOAD ERROR: " + uploadResponse.getError()); if (uploadResponse.getError() == null) { LOG.info("Upload appears to have been successful, deleting file."); file.delete(); } } } else { LOG.info("No files found on this device."); } } }
package com.akiban.ais.model; import com.akiban.util.ArgumentValidation; import java.util.*; public class UserTable extends Table { public static UserTable create(AkibanInformationSchema ais, String schemaName, String tableName, Integer tableId) { UserTable userTable = new UserTable(ais, schemaName, tableName, tableId); ais.addUserTable(userTable); return userTable; } public UserTable(AkibanInformationSchema ais, String schemaName, String tableName, Integer tableId) { super(ais, schemaName, tableName, tableId); migrationUsage = MigrationUsage.AKIBAN_STANDARD; } @Override public boolean isUserTable() { return true; } @Override protected void addIndex(TableIndex index) { super.addIndex(index); if (index.isPrimaryKey()) { assert primaryKey == null; primaryKey = new PrimaryKey(index); } } /** * Returns the columns in this table that are constrained to match the given column, e.g. * customer.cid and order.cid. These will be ordered by the table they appear on, root to leaf. * The given column will itself be in the resulting list. The list is calculated anew each time * and may be modified as needed by the caller. * @param column the column for which to find matching columns. * @return a new list of columns equivalent to the given column, including that column itself. */ List<Column> matchingColumns(Column column) { // TODO: make this a AISValidation check ArgumentValidation.isTrue(column + " doesn't belong to " + getName(), column.getTable() == this); List<Column> matchingColumns = new ArrayList<Column>(); matchingColumns.add(column); findMatchingAncestorColumns(column, matchingColumns); findMatchingDescendantColumns(column, matchingColumns); Collections.sort(matchingColumns, COLUMNS_BY_TABLE_DEPTH); return matchingColumns; } private void findMatchingAncestorColumns(Column fromColumn, List<Column> matchingColumns) { Join join = ((UserTable)fromColumn.getTable()).getParentJoin(); if (join != null) { JoinColumn ancestorJoinColumn = null; for (JoinColumn joinColumn : join.getJoinColumns()) { if (joinColumn.getChild() == fromColumn) { ancestorJoinColumn = joinColumn; } } if (ancestorJoinColumn != null) { Column ancestorColumn = ancestorJoinColumn.getParent(); matchingColumns.add(ancestorColumn); findMatchingAncestorColumns(ancestorJoinColumn.getParent(), matchingColumns); } } } private void findMatchingDescendantColumns(Column fromColumn, List<Column> matchingColumns) { for (Join join : getChildJoins()) { JoinColumn descendantJoinColumn = null; for (JoinColumn joinColumn : join.getJoinColumns()) { if (joinColumn.getParent() == fromColumn) { descendantJoinColumn = joinColumn; } } if (descendantJoinColumn != null) { Column descendantColumn = descendantJoinColumn.getChild(); matchingColumns.add(descendantColumn); findMatchingDescendantColumns(descendantJoinColumn.getChild(), matchingColumns); } } } public void setSize(int size) { this.size = size; } public int getSize() { return size; } public void addCandidateParentJoin(Join parentJoin) { candidateParentJoins.add(parentJoin); } public void addCandidateChildJoin(Join childJoin) { candidateChildJoins.add(childJoin); } public List<Join> getCandidateParentJoins() { return Collections.unmodifiableList(candidateParentJoins); } public List<Join> getCandidateChildJoins() { return Collections.unmodifiableList(candidateChildJoins); } public Join getParentJoin() { Join parentJoin = null; Group group = getGroup(); if (group != null) { for (Join candidateParentJoin : candidateParentJoins) { if (candidateParentJoin.getGroup() == group) { parentJoin = candidateParentJoin; } } } return parentJoin; } public List<Join> getChildJoins() { List<Join> childJoins = new ArrayList<Join>(); Group group = getGroup(); if (group != null) { for (Join candidateChildJoin : candidateChildJoins) { if (candidateChildJoin.getGroup() == group) { childJoins.add(candidateChildJoin); } } } return childJoins; } public Column getAutoIncrementColumn() { Column autoIncrementColumn = null; for (Column column : getColumns()) { if (column.getInitialAutoIncrementValue() != null) { autoIncrementColumn = column; } } return autoIncrementColumn; } @Override public Collection<TableIndex> getIndexes() { Collection<TableIndex> indexes = super.getIndexes(); return removeInternalColumnIndexes(indexes); } public Collection<TableIndex> getIndexesIncludingInternal() { return super.getIndexes(); } @Override public TableIndex getIndex(String indexName) { TableIndex index = null; if (indexName.equals(Index.PRIMARY_KEY_CONSTRAINT)) { // getPrimaryKey has logic for handling Akiban PK PrimaryKey primaryKey = getPrimaryKey(); index = primaryKey == null ? null : primaryKey.getIndex(); } else { index = super.getIndex(indexName); } return index; } public boolean isDescendantOf(UserTable other) { if (getGroup() == null || !getGroup().equals(other.getGroup())) { return false; } UserTable possibleDescendant = this; while (possibleDescendant != null) { if (possibleDescendant.equals(other)) { return true; } possibleDescendant = possibleDescendant.parentTable(); } return false; } public Index getIndexIncludingInternal(String indexName) { return super.getIndex(indexName); } @Override public void traversePreOrder(Visitor visitor) { for (Column column : getColumns()) { visitor.visitColumn(column); } for (Index index : getIndexes()) { visitor.visitIndex(index); index.traversePreOrder(visitor); } } @Override public void traversePostOrder(Visitor visitor) { for (Column column : getColumns()) { visitor.visitColumn(column); } for (Index index : getIndexes()) { index.traversePostOrder(visitor); visitor.visitIndex(index); } } public void setInitialAutoIncrementValue(Long initialAutoIncrementValue) { for (Column column : getColumns()) { if (column.getInitialAutoIncrementValue() != null) { column.setInitialAutoIncrementValue(initialAutoIncrementValue); } } } public synchronized PrimaryKey getPrimaryKey() { PrimaryKey declaredPrimaryKey = primaryKey; if (declaredPrimaryKey != null) { List<IndexColumn> pkColumns = primaryKey.getIndex().getColumns(); if (pkColumns.size() == 1 && pkColumns.get(0).getColumn().isAkibanPKColumn()) { declaredPrimaryKey = null; } } return declaredPrimaryKey; } public synchronized PrimaryKey getPrimaryKeyIncludingInternal() { return primaryKey; } public synchronized void endTable() { // Creates a PK for a pk-less table. if (primaryKey == null) { // Find primary key index TableIndex primaryKeyIndex = null; for (TableIndex index : getIndexesIncludingInternal()) { if (index.isPrimaryKey()) { primaryKeyIndex = index; } } if (primaryKeyIndex == null) { primaryKeyIndex = createAkibanPrimaryKeyIndex(); } assert primaryKeyIndex != null : this; primaryKey = new PrimaryKey(primaryKeyIndex); } } public synchronized Integer getDepth() { if (depth == null && getGroup() != null) { depth = getParentJoin() == null ? 0 : getParentJoin().getParent().getDepth() + 1; } return depth; } public Boolean isLookupTable() { return migrationUsage == MigrationUsage.AKIBAN_LOOKUP_TABLE; } public Boolean isRoot() { return getGroup() == null || getParentJoin() == null; } public void setLookupTable(Boolean isLookup) { setMigrationUsage(isLookup ? MigrationUsage.AKIBAN_LOOKUP_TABLE : MigrationUsage.AKIBAN_STANDARD); } public MigrationUsage getMigrationUsage() { return migrationUsage; } public void setMigrationUsage(MigrationUsage migrationUsage) { assert (this.migrationUsage != MigrationUsage.INCOMPATIBLE || migrationUsage == MigrationUsage.INCOMPATIBLE) : "cannot change migration usage from INCOMPATIBLE to " + migrationUsage; this.migrationUsage = migrationUsage; } public void setEngine(String engine) { this.engine = engine; } public HKey hKey() { assert getGroup() != null; if (hKey == null) { computeHKey(); } return hKey; } // An HKey in terms of group table columns, for a branch of a group, terminating with this user table. public HKey branchHKey() { if (branchHKey == null) { // Construct an hkey in which group columns replace user columns. branchHKey = new HKey(this); for (HKeySegment userHKeySegment : hKey().segments()) { HKeySegment branchHKeySegment = branchHKey.addSegment(userHKeySegment.table()); for (HKeyColumn userHKeyColumn : userHKeySegment.columns()) { branchHKeySegment.addColumn(userHKeyColumn.column().getGroupColumn()); } } } return branchHKey; } public List<Column> allHKeyColumns() { assert getGroup() != null; assert getPrimaryKeyIncludingInternal() != null; if (allHKeyColumns == null) { allHKeyColumns = new ArrayList<Column>(); for (HKeySegment segment : hKey().segments()) { for (HKeyColumn hKeyColumn : segment.columns()) { allHKeyColumns.add(hKeyColumn.column()); } } } return allHKeyColumns; } public boolean containsOwnHKey() { hKey(); // Force computation of hKey and containsOwnHKey return containsOwnHKey; } public UserTable parentTable() { Join join = getParentJoin(); return join == null ? null : join.getParent(); } @SuppressWarnings("unused") private UserTable() { // XXX: GWT requires empty constructor } private void computeHKey() { hKey = new HKey(this); List<Column> hKeyColumns = new ArrayList<Column>(); if (!isRoot()) { // Start with the parent's hkey Join join = getParentJoin(); HKey parentHKey = join.getParent().hKey(); // Start forming this table's full hkey by including all of the parent hkey columns, but replacing // columns participating in the join (to this table) by columns from this table. for (HKeySegment parentHKeySegment : parentHKey.segments()) { HKeySegment segment = hKey.addSegment(parentHKeySegment.table()); for (HKeyColumn parentHKeyColumn : parentHKeySegment.columns()) { Column columnInChild = join.getMatchingChild(parentHKeyColumn.column()); Column segmentColumn = columnInChild == null ? parentHKeyColumn.column() : columnInChild; segment.addColumn(segmentColumn); hKeyColumns.add(segmentColumn); } } } // This table's hkey also includes any PK columns not already included. HKeySegment newSegment = hKey.addSegment(this); for (Column pkColumn : getPrimaryKeyIncludingInternal().getColumns()) { if (!hKeyColumns.contains(pkColumn)) { newSegment.addColumn(pkColumn); } } // Determine whether the table contains its own hkey, i.e., whether all hkey columns come from this table. containsOwnHKey = true; for (HKeySegment segment : hKey().segments()) { for (HKeyColumn hKeyColumn : segment.columns()) { if (hKeyColumn.column().getTable() != this) { containsOwnHKey = false; } } } } private TableIndex createAkibanPrimaryKeyIndex() { // Create a column for a PK Column pkColumn = Column.create(this, Column.AKIBAN_PK_NAME, getColumns().size(), Types.BIGINT); // adds column to table pkColumn.setNullable(false); // Create an index for the PK column // Starting index should be id 1 int maxIndexId = 0; for (Index index : getIndexes()) { if (index.getIndexId() > maxIndexId) { maxIndexId = index.getIndexId(); } } TableIndex pkIndex = TableIndex.create(ais, this, Index.PRIMARY_KEY_CONSTRAINT, maxIndexId + 1, true, Index.PRIMARY_KEY_CONSTRAINT); IndexColumn pkIndexColumn = new IndexColumn(pkIndex, pkColumn, 0, true, null); pkIndex.addColumn(pkIndexColumn); return pkIndex; } private static Collection<TableIndex> removeInternalColumnIndexes(Collection<TableIndex> indexes) { Collection<TableIndex> declaredIndexes = new ArrayList<TableIndex>(indexes); for (Iterator<TableIndex> iterator = declaredIndexes.iterator(); iterator.hasNext();) { TableIndex index = iterator.next(); List<IndexColumn> indexColumns = index.getColumns(); if (indexColumns.size() == 1 && indexColumns.get(0).getColumn().isAkibanPKColumn()) { iterator.remove(); } } return declaredIndexes; } // State private int size; private List<Join> candidateParentJoins = new ArrayList<Join>(); private List<Join> candidateChildJoins = new ArrayList<Join>(); private PrimaryKey primaryKey; private transient HKey hKey; private transient boolean containsOwnHKey; private transient HKey branchHKey; private transient List<Column> allHKeyColumns; private transient Integer depth = null; // consts private static final Comparator<Column> COLUMNS_BY_TABLE_DEPTH = new Comparator<Column>() { @Override public int compare(Column o1, Column o2) { return o1.getUserTable().getDepth() - o2.getUserTable().getDepth(); } }; }
package com.clementscode.mmi.swing; import java.awt.BorderLayout; import java.awt.Color; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.awt.image.BufferedImage; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import javax.sound.sampled.UnsupportedAudioFileException; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.KeyStroke; import javax.swing.Timer; import junk.ExtractFileSubDirectories; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.codehaus.jackson.map.AnnotationIntrospector; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.xc.JaxbAnnotationIntrospector; import com.clementscode.mmi.MainGui; import com.clementscode.mmi.res.CategoryItem; import com.clementscode.mmi.res.Session; import com.clementscode.mmi.res.SessionConfig; import com.clementscode.mmi.sound.SoundUtility; import com.clementscode.mmi.util.Shuffler; import com.clementscode.mmi.util.Utils; public class Gui implements ActionListener { private ImageIcon imgIconCenter; private JButton centerButton; private Queue<CategoryItem> itemQueue = null; private Session session = null; private Timer timer; protected Log log = LogFactory.getLog(this.getClass()); private JCheckBox attending; private JFrame frame; private String frameTitle = Messages.getString("Gui.FrameTitle"); //$NON-NLS-1$ private ActionRecorder attendingAction; private ActionRecorder independentAction; private ActionRecorder verbalAction; private ActionRecorder modelingAction; private ActionRecorder noAnswerAction; private ActionRecorder quitAction; private ActionRecorder timerAction; private ActionRecorder openAction; private JPanel mainPanel; private Mediator mediator; private ActionRecorder openHttpAction; private File tmpDir; private ArrayList<JComponent> lstButtons; private ImageIcon iiSmilingFace, iiSmilingFaceClickToBegin; private JTextField tfSessionName; private JTextField tfSessionDataFile; private JButton clickToStartButton; private List<String> lstTempDirectories; public Gui() { String tmpDirStr = "/tmp/mmi"; tmpDir = new File(tmpDirStr); tmpDir.mkdirs(); mediator = new Mediator(this); setupActions(mediator); mainPanel = setupMainPanel(); // TODO: Check to see if there's a logic bug here.... frame = new JFrame(frameTitle); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().add(mainPanel); setupMenus(); disableButtons(); lstTempDirectories = new ArrayList<String>(); // Register a shutdown thread Runtime.getRuntime().addShutdownHook(new Thread() { // This method is called during shutdown public void run() { // Do shutdown work ... Utils.deleteTempDirectories(lstTempDirectories); } }); } private void disableButtons() { for (JComponent jc : lstButtons) { jc.setEnabled(false); } } private void enableButtons() { for (JComponent jc : lstButtons) { jc.setEnabled(true); } } private void setupMenus() { // Create the menu bar. JMenuBar menuBar = new JMenuBar(); // Build the first menu. JMenu menu = new JMenu(Messages.getString("Gui.File")); //$NON-NLS-1$ menu.setMnemonic(KeyEvent.VK_A); menuBar.add(menu); // a group of JMenuItems JMenuItem menuItem = new JMenuItem(openAction); menu.add(menuItem); menuItem = new JMenuItem(openHttpAction); menu.add(menuItem); /* * menuItem = new JMenuItem(crudAction); menu.add(menuItem); */ menuItem = new JMenuItem(quitAction); menuItem.setMnemonic(KeyEvent.VK_B); menu.add(menuItem); menuBar.add(menu); JMenu buttonMenu = new JMenu(Messages.getString("Gui.Buttons")); //$NON-NLS-1$ menuItem = new JMenuItem(attendingAction); buttonMenu.add(menuItem); menuItem = new JMenuItem(independentAction); buttonMenu.add(menuItem); menuItem = new JMenuItem(verbalAction); buttonMenu.add(menuItem); menuItem = new JMenuItem(modelingAction); buttonMenu.add(menuItem); menuItem = new JMenuItem(noAnswerAction); buttonMenu.add(menuItem); menuBar.add(buttonMenu); frame.setJMenuBar(menuBar); frame.pack(); frame.setVisible(true); } private JPanel setupMainPanel() { JPanel panel = new JPanel(); panel.setLayout(new BorderLayout()); JPanel southPanel = new JPanel(); attending = new JCheckBox(attendingAction); southPanel.add(attending); lstButtons = new ArrayList<JComponent>(); lstButtons.add(attending); addButton(southPanel, independentAction); addButton(southPanel, verbalAction); addButton(southPanel, modelingAction); addButton(southPanel, noAnswerAction); JPanel belowSouthPanel = new JPanel(); belowSouthPanel.setLayout(new GridLayout(0, 1)); tfSessionName = new JTextField(30); if (null != session) { tfSessionName.setText(session.getSessionName()); } belowSouthPanel.add(new LabelAndField("Session Name: ", tfSessionName)); tfSessionDataFile = new JTextField(30); try { tfSessionDataFile.setText(session.getSessionDataFile() .getCanonicalPath()); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } JPanel midBelowSouthPanel = new JPanel(); midBelowSouthPanel.add(new LabelAndField("Session Data File: ", tfSessionDataFile)); JButton browse = new JButton("Browse..."); // browse.setActionCommand(BROWSE_IMAGE_FILE); // browse.addActionListener(this); midBelowSouthPanel.add(browse); belowSouthPanel.add(midBelowSouthPanel); clickToStartButton = new JButton("Click to Start"); belowSouthPanel.add(clickToStartButton); clickToStartButton.setEnabled(false); clickToStartButton.addActionListener(this); // response value. This can be 1 of 4 things: independent (child // answered before the prompt audio), verbal (child answered after the // prompt but before the answer), modeling (child answered anytime after // the answer audio) or the child did not answer. JPanel southContainerPanel = new JPanel(); southContainerPanel.setLayout(new GridLayout(0, 1)); southContainerPanel.add(southPanel); southContainerPanel.add(belowSouthPanel); panel.add(southContainerPanel, BorderLayout.SOUTH); byte[] imageData = null; byte[] imageDataClickToBegin = null; try { imageData = readImageDataFromClasspath("images/a-happy-face.jpg", 17833); imageDataClickToBegin = readImageDataFromClasspath( "images/a-happy-face-click-to-begin.jpg", 30055); } catch (Exception e) { // TODO Auto-generated catch block System.out.println("Could not find image from classpath..."); e.printStackTrace(); } iiSmilingFace = null; if (null != imageData) { iiSmilingFace = new ImageIcon(imageData); iiSmilingFaceClickToBegin = new ImageIcon(imageDataClickToBegin); } if (null == imageData) { try { iiSmilingFace = new ImageIcon(new URL( "http://MattPayne.org/mmi/happy-face.jpg")); iiSmilingFaceClickToBegin = new ImageIcon(new URL( "http://MattPayne.org/mmi/happy-face.jpg")); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } centerButton = new JButton(iiSmilingFace); centerButton.addActionListener(this); panel.add(centerButton, BorderLayout.CENTER); return panel; } public void backToStartScreen() { frame.setTitle(frameTitle); centerButton.setIcon(iiSmilingFace); refreshGui(); disableButtons(); } private void addButton(JPanel southPanel, ActionRecorder independentAction2) { JButton responseButton = new JButton(independentAction2); southPanel.add(responseButton); lstButtons.add(responseButton); } private byte[] readImageDataFromClasspath(String fileName, int lazy) throws IOException { // Do it this way and no relative path huha is needed. InputStream in = this.getClass().getClassLoader() .getResourceAsStream(fileName); return readImageDataFromInputStream(in, lazy); } private byte[] readImageDataFromInputStream(InputStream in, int lazy) throws IOException { byte[] imageData = new byte[lazy]; int numBytesRead = 0, totalBytesRead = 0; // Yes, I feel dirty for not finding the size of the file by hand // here. // I'm in a hurry. // Yes, I know I'll burn in hell. Unless Jesus saves me. Which he // has. Thanks! while (totalBytesRead < lazy) { numBytesRead = in.read(imageData, totalBytesRead, lazy); totalBytesRead += numBytesRead; } in.close(); return imageData; } public void setupTimer() { if (null != timer) { timer.stop(); // fix for issue } timer = new Timer(session.getTimeDelayAnswer() * 1000, timerAction); timer.setInitialDelay(session.getTimeDelayPrompt() * 1000); timer.setRepeats(true); timer.start(); } public void setupCenterButton() { // TODO: Call this when we get a new session file read in.... CategoryItem first = itemQueue.remove(); try { imgIconCenter = new ImageIcon(first.getImgFile().getCanonicalPath()); } catch (IOException e) { log.error( "Odd, this error should not happen. Can't find the first image", e); e.printStackTrace(); } // centerButton = new JButton(imgIconCenter); centerButton.setIcon(imgIconCenter); centerButton.setPreferredSize(session.getMaxDimensions()); } private void setupActions(MediatorListener mediator) { // TODO: Fix bug that control A does not toggle the checkbox attendingAction = new ActionRecorder( Messages.getString("Gui.Attending"), null, //$NON-NLS-1$ Messages.getString("Gui.AttendingDescription"), new Integer( //$NON-NLS-1$ KeyEvent.VK_F1), KeyStroke.getKeyStroke("control A"), Mediator.ATTENDING, mediator); independentAction = new ActionRecorder( Messages.getString("Gui.Independent"), null, //$NON-NLS-1$ Messages.getString("Gui.IndependentDescription"), new Integer( //$NON-NLS-1$ KeyEvent.VK_F2), KeyStroke.getKeyStroke("control I"), Mediator.INDEPENDENT, mediator); verbalAction = new ActionRecorder( Messages.getString("Gui.Verbal"), null, //$NON-NLS-1$ Messages.getString("Gui.VerbalDescription"), //$NON-NLS-1$ new Integer(KeyEvent.VK_F3), KeyStroke.getKeyStroke("control V"), Mediator.VERBAL, mediator); modelingAction = new ActionRecorder( Messages.getString("Gui.Modeling"), null, //$NON-NLS-1$ Messages.getString("Gui.ModelingDescriptin"), new Integer( //$NON-NLS-1$ KeyEvent.VK_F4), KeyStroke.getKeyStroke("control M"), Mediator.MODELING, mediator); noAnswerAction = new ActionRecorder( Messages.getString("Gui.NoAnswer"), null, //$NON-NLS-1$ Messages.getString("Gui.NoAnswerDescription"), new Integer(KeyEvent.VK_F5), //$NON-NLS-1$ KeyStroke.getKeyStroke("control N"), Mediator.NO_ANSWER, mediator); quitAction = new ActionRecorder(Messages.getString("Gui.Quit"), null, //$NON-NLS-1$ Messages.getString("Gui.QuitDescriptino"), new Integer(KeyEvent.VK_L), //$NON-NLS-1$ KeyStroke.getKeyStroke("control Q"), Mediator.QUIT, mediator); timerAction = new ActionRecorder( Messages.getString("Gui.TimerSwing"), null, //$NON-NLS-1$ "Quit (Exit) the program", new Integer(KeyEvent.VK_L), //$NON-NLS-1$ KeyStroke.getKeyStroke("control F2"), Mediator.TIMER, mediator); openAction = new ActionRecorder(Messages.getString("Gui.Open"), null, //$NON-NLS-1$ Messages.getString("Gui.OpenDescription"), //$NON-NLS-1$ new Integer(KeyEvent.VK_L), KeyStroke.getKeyStroke("control O"), Mediator.OPEN, mediator); openHttpAction = new ActionRecorder( Messages.getString("Gui.Open.Http"), null, //$NON-NLS-1$ Messages.getString("Gui.OpenHttpDescription"), //$NON-NLS-1$ new Integer(KeyEvent.VK_L), KeyStroke.getKeyStroke("control H"), Mediator.OPEN_HTTP, mediator); } public void run(Session session) { } public void playSound(File file) { try { SoundUtility.playSound(file); // "src/test/resources/bc/animals/fooduck/answer.wav")); } catch (UnsupportedAudioFileException e) { log.error("Problem with playSound: " + file, e); e.printStackTrace(); } catch (IOException e) { log.error("Problem with playSound: " + file, e); e.printStackTrace(); } } public Queue<CategoryItem> getItemQueue() { return itemQueue; } public void switchImage(File file) { try { switchImage(new ImageIcon(file.getCanonicalPath())); } catch (IOException e) { log.error( String.format("Problem switching image to file='%s'", file), e); e.printStackTrace(); } } public void switchImage(ImageIcon ii) { setFrameTitle(); centerButton.setIcon(ii); } /* * They use bmp images which don't display with the program right now. I * looked at the code and noticed that you don't use the 'img' field of the * CategoryItem. You use the image file to get a path to the image and read * it in again using an icon. I suspect that it will work better if you use * the image that ImageIO already read into memory. */ public void switchImage(BufferedImage img) { ImageIcon ii = new ImageIcon(img); switchImage(ii); } private void setFrameTitle() { frame.setTitle(frameTitle + String.format("%d of %d", itemQueue.size() + 1, //$NON-NLS-1$ session.getItems().length)); } public Session getSession() { return session; } public void setSession(Session session) { this.session = session; } public Timer getTimer() { return timer; } public void setTimer(Timer timer) { this.timer = timer; } public JCheckBox getAttending() { return attending; } public void setAttending(JCheckBox attending) { this.attending = attending; } public void setVisble(boolean b) { frame.setVisible(b); } public void openSession() { File file; // TODO: Remove hard coded directory. // TODO: Get application to remember the last place we opened this... JFileChooser chooser = new JFileChooser(new File( "/Users/mgpayne/MMI/src/test/resources")); int returnVal = chooser.showOpenDialog(frame); if (returnVal == JFileChooser.APPROVE_OPTION) { file = chooser.getSelectedFile(); try { readSessionFile(file); } catch (Exception e) { JOptionPane.showMessageDialog(frame, String.format( "Problem reading %s exception was %s", file, e)); e.printStackTrace(); } } displayClickToBegin(); } public void useNewSession() { centerButton.removeActionListener(this); clickToStartButton.setEnabled(false); clickToStartButton.setForeground(Color.white); // centerButton.setText(""); if (null != session) { CategoryItem[] copy = Arrays.copyOf(session.getItems(), session.getItems().length); for (int i = 0; i < session.getShuffleCount(); ++i) { Shuffler.shuffle(copy); } itemQueue = new ConcurrentLinkedQueue<CategoryItem>(); for (CategoryItem item : copy) { // TODO: Is there a collections add all I could use here? itemQueue.add(item); } mediator.setSession(session); setupCenterButton(); setFrameTitle(); refreshGui(); setupTimer(); enableButtons(); } } private void readSessionFile(File file) throws Exception { readSessionFile(file, null); } private void readSessionFile(File file, String newItemBase) throws Exception { Properties props = new Properties(); // Do it this way and no relative path huha is needed. InputStream in = this.getClass().getClassLoader() .getResourceAsStream(MainGui.propFile); props.load(new InputStreamReader(in)); String[] sndExts = props.getProperty(MainGui.sndKey).split(","); ObjectMapper mapper = new ObjectMapper(); AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(); mapper.getDeserializationConfig().withAnnotationIntrospector( introspector); mapper.getSerializationConfig() .withAnnotationIntrospector(introspector); SessionConfig config = mapper.readValue(new FileInputStream(file), SessionConfig.class); if (null != newItemBase) { config.setItemBase(newItemBase); config.setPrompt(newItemBase + "/prompt.wav"); } session = new Session(config, sndExts); } void refreshGui() { mainPanel.revalidate(); frame.pack(); } public void openHttpSession() { // Started with clues from Object[] possibilities = { "http://MattPayne.org/mmi/demo1.zip", "http://MattPayne.org/mmi/mp.zip", "http://MattPayne.org/mmi/bc.zip", "more to come later..." }; String s = (String) JOptionPane.showInputDialog(frame, "Complete the sentence:\n" + "\"Green eggs and...\"", "Customized Dialog", JOptionPane.PLAIN_MESSAGE, null, possibilities, "ham"); System.out.println("s=" + s); unpackToTempDirectory(s); } private void unpackToTempDirectory(String strUrl) { try { File tempZipFile = fetchViaHttp(strUrl); File tmp = File.createTempFile("mmi", "", tmpDir); String zipPath = tmp.getAbsolutePath() + ".dir"; tmp.delete(); lstTempDirectories.add(zipPath); ExtractFileSubDirectories.unzip(zipPath, tempZipFile.getAbsolutePath()); tempZipFile.delete(); readSessionFile(new File(zipPath + "/session.txt"), zipPath); displayClickToBegin(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } private void displayClickToBegin() { centerButton.setEnabled(true); // centerButton.addActionListener(this); // fix for issue centerButton.setIcon(iiSmilingFaceClickToBegin); // centerButton.setText("Click to Begin"); centerButton.invalidate(); clickToStartButton.setEnabled(true); clickToStartButton.setForeground(Color.red); refreshGui(); } private File fetchViaHttp(String strUrl) throws IOException { // Since Mac's OS X make f-ed up temp directories like this: // Extracting // /var/folders/IL/ILL0adgsGq89FHBGBZvCF++++TI/-Tmp-/mmi7319452195262685629.dir/food/foobar/redbar.jpg // We're going to specify the temp directory File tempZipFile = File.createTempFile("mmiSession", "zip", tmpDir); URL url = new URL(strUrl); InputStream in = url.openStream(); byte[] chunk = new byte[8 * 1024]; OutputStream out = new FileOutputStream(tempZipFile); int numBytesRead = 0; while (-1 != (numBytesRead = in.read(chunk))) { out.write(chunk, 0, numBytesRead); } out.close(); in.close(); return tempZipFile; } public void actionPerformed(ActionEvent e) { if (clickToStartButton == e.getSource()) { useNewSession(); } } public void populateSessionName() { session.setSessionName(tfSessionName.getText()); } public void populateSessionDataFile() { String fileName = System.getProperty("user.home") + "/brian.csv"; String str = tfSessionDataFile.getText(); str = "".equals(str) ? fileName : str; session.setSessionDataFile(new File(str)); } }
package com.jetbrains.edu.learning.checker; import com.intellij.execution.process.CapturingProcessHandler; import com.intellij.execution.process.ProcessOutput; import com.intellij.ide.projectView.ProjectView; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.vfs.VirtualFile; import com.jetbrains.edu.learning.StudySettings; import com.jetbrains.edu.learning.StudyState; import com.jetbrains.edu.learning.StudyTaskManager; import com.jetbrains.edu.learning.StudyUtils; import com.jetbrains.edu.learning.core.EduNames; import com.jetbrains.edu.learning.core.EduUtils; import com.jetbrains.edu.learning.courseFormat.Course; import com.jetbrains.edu.learning.courseFormat.StudyStatus; import com.jetbrains.edu.learning.courseFormat.tasks.ChoiceTask; import com.jetbrains.edu.learning.courseFormat.tasks.Task; import com.jetbrains.edu.learning.courseFormat.tasks.TheoryTask; import com.jetbrains.edu.learning.stepic.EduAdaptiveStepicConnector; import com.jetbrains.edu.learning.stepic.EduStepicConnector; import com.jetbrains.edu.learning.stepic.StepicUser; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class StudyCheckTask extends com.intellij.openapi.progress.Task.Backgroundable { private static final Logger LOG = Logger.getInstance(StudyCheckTask.class); private final Project myProject; protected final StudyState myStudyState; protected final Task myTask; protected final VirtualFile myTaskDir; protected final StudyTaskManager myTaskManger; private final StudyStatus myStatusBeforeCheck; private final Ref<Boolean> myCheckInProcess; private final Process myTestProcess; private final String myCommandLine; private static final String FAILED_CHECK_LAUNCH = "Failed to launch checking"; public StudyCheckTask(Project project, StudyState studyState, Ref<Boolean> checkInProcess, Process testProcess, String commandLine) { super(project, "Checking Task"); myProject = project; myStudyState = studyState; myCheckInProcess = checkInProcess; myTestProcess = testProcess; myCommandLine = commandLine; myTask = studyState.getTask(); myTaskDir = studyState.getTaskDir(); myTaskManger = StudyTaskManager.getInstance(myProject); myStatusBeforeCheck = myTask.getStatus(); } @Override public void onSuccess() { StudyUtils.updateToolWindows(myProject); StudyCheckUtils.drawAllPlaceholders(myProject, myTask); ProjectView.getInstance(myProject).refresh(); clearState(); } protected void clearState() { EduUtils.deleteWindowDescriptions(myTask, myTaskDir); myCheckInProcess.set(false); } @Override public void onCancel() { myTask.setStatus(myStatusBeforeCheck); clearState(); } @Override public void run(@NotNull ProgressIndicator indicator) { final Course course = StudyTaskManager.getInstance(myProject).getCourse(); if (course != null) { if (course.isAdaptive()) { checkForAdaptiveCourse(indicator); } else { checkForEduCourse(indicator); } runAfterTaskCheckedActions(); } } private void checkForEduCourse(@NotNull ProgressIndicator indicator) { final StudyTestsOutputParser.TestsOutput testsOutput = getTestOutput(indicator); if (testsOutput != null) { if (testsOutput.isSuccess()) { onTaskSolved(testsOutput.getMessage()); } else { onTaskFailed(testsOutput.getMessage()); } final Course course = StudyTaskManager.getInstance(myProject).getCourse(); if (course != null && EduNames.STUDY.equals(course.getCourseMode())) { StepicUser user = StudySettings.getInstance().getUser(); if (user != null) { EduStepicConnector.postSolution(myTask, testsOutput.isSuccess(), myProject); } } } } @Nullable private StudyTestsOutputParser.TestsOutput getTestOutput(@NotNull ProgressIndicator indicator) { final CapturingProcessHandler handler = new CapturingProcessHandler(myTestProcess, null, myCommandLine); final ProcessOutput output = handler.runProcessWithProgressIndicator(indicator); if (indicator.isCanceled()) { ApplicationManager.getApplication().invokeLater( () -> StudyCheckUtils.showTestResultPopUp("Check cancelled", MessageType.WARNING.getPopupBackground(), myProject)); } final Course course = StudyTaskManager.getInstance(myProject).getCourse(); if (course != null) { final StudyTestsOutputParser.TestsOutput testsOutput = StudyTestsOutputParser.getTestsOutput(output, course.isAdaptive()); String stderr = output.getStderr(); if (!stderr.isEmpty() && output.getStdout().isEmpty()) { //log error output of tests LOG.info("#educational " + stderr); return new StudyTestsOutputParser.TestsOutput(false, stderr); } return testsOutput; } return null; } private void checkForAdaptiveCourse(@NotNull ProgressIndicator indicator) { final StepicUser user = StudySettings.getInstance().getUser(); if (user == null) { LOG.warn("User is null"); ApplicationManager.getApplication().invokeLater(() -> StudyUtils.showErrorPopupOnToolbar(myProject, "Failed to launch checking: you're not authorized")); return; } if (myTask instanceof ChoiceTask) { final Pair<Boolean, String> result = EduAdaptiveStepicConnector.checkChoiceTask(myProject, (ChoiceTask)myTask, user); processStepicCheckOutput(result); } else if (myTask instanceof TheoryTask) { myTask.setStatus(StudyStatus.Solved); } else { final StudyTestsOutputParser.TestsOutput testOutput = getTestOutput(indicator); if (testOutput != null) { // As tests in adaptive courses are created from // samples and stored in task, to disable it we should ignore local testing results if (StudySettings.getInstance().isEnableTestingFromSamples() && !testOutput.isSuccess()) { onTaskFailed(testOutput.getMessage()); } else { final Pair<Boolean, String> pair = EduAdaptiveStepicConnector.checkCodeTask(myProject, myTask, user); processStepicCheckOutput(pair); } } } } private void processStepicCheckOutput(@Nullable Pair<Boolean, String> pair) { if (pair != null && pair.getFirst() != null) { if (pair.getFirst()) { onTaskSolved("Congratulations! Remote tests passed."); } else { final String checkMessage = pair.getSecond(); onTaskFailed(checkMessage); } } else { ApplicationManager.getApplication().invokeLater(() -> { String message = pair == null ? FAILED_CHECK_LAUNCH : pair.getSecond(); StudyCheckUtils.showTestResultPopUp(message, MessageType.WARNING .getPopupBackground(), myProject); }); } } protected void onTaskFailed(@NotNull String message) { myTask.setStatus(StudyStatus.Failed); myTask.getChecker(myProject).onTaskFailed(message); } protected void onTaskSolved(@NotNull String message) { myTask.setStatus(StudyStatus.Solved); myTask.getChecker(myProject).onTaskSolved(message); } private void runAfterTaskCheckedActions() { for (StudyCheckListener listener : StudyCheckListener.EP_NAME.getExtensions()) { listener.afterCheck(myProject, myTask); } } }
package com.fishercoder.solutions; import java.util.HashMap; import java.util.Map; /** * 205. Isomorphic Strings * * Given two strings s and t, determine if they are isomorphic. Two strings are isomorphic if the characters in s can be replaced to get t. All occurrences of a character must be replaced with another character while preserving the order of characters. No two characters may map to the same character but a character may map to itself. For example, Given "egg", "add", return true. Given "foo", "bar", return false. Given "paper", "title", return true. Note: You may assume both s and t have the same length.*/ public class _205 { /**space should be O(1) since it only has alphabetic letters which are capped at 52.*/ public static class Solution1 { public boolean isIsomorphic(String s, String t) { if (s == null || s.length() == 0) { return (t == null || t.length() == 0); } if (t == null || t.length() == 0) { return (s == null || s.length() == 0); } char[] schar = s.toCharArray(); char[] tchar = t.toCharArray(); Map<Character, Character> map = new HashMap(); if (s.length() != t.length()) { return false; } for (int i = 0; i < s.length(); i++) { if (map.containsKey(schar[i])) { if (map.get(schar[i]) != tchar[i]) { return false; } } else { if (map.containsValue(tchar[i])) { return false;//this line is necessary for this case: ("ab", "aa") } map.put(schar[i], tchar[i]); } } return true; } } }
package org.opens.tanaguru.analyser; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.*; import org.opens.tanaguru.entity.audit.Audit; import org.opens.tanaguru.entity.audit.ProcessResult; import org.opens.tanaguru.entity.audit.TestSolution; import org.opens.tanaguru.entity.factory.audit.ProcessResultFactory; import org.opens.tanaguru.entity.parameterization.Parameter; import org.opens.tanaguru.entity.reference.Criterion; import org.opens.tanaguru.entity.reference.Test; import org.opens.tanaguru.entity.reference.Theme; import org.opens.tanaguru.entity.service.audit.AuditDataService; import org.opens.tanaguru.entity.service.audit.ProcessResultDataService; import org.opens.tanaguru.entity.service.statistics.CriterionStatisticsDataService; import org.opens.tanaguru.entity.service.statistics.TestStatisticsDataService; import org.opens.tanaguru.entity.service.statistics.ThemeStatisticsDataService; import org.opens.tanaguru.entity.service.statistics.WebResourceStatisticsDataService; import org.opens.tanaguru.entity.statistics.CriterionStatistics; import org.opens.tanaguru.entity.statistics.TestStatistics; import org.opens.tanaguru.entity.statistics.ThemeStatistics; import org.opens.tanaguru.entity.statistics.WebResourceStatistics; import org.opens.tanaguru.entity.subject.Page; import org.opens.tanaguru.entity.subject.Site; import org.opens.tanaguru.entity.subject.WebResource; /** * * @author jkowalczyk */ public class AnalyserImpl implements Analyser { /** * The webResource used to extract statistics */ private WebResource webResource; /** * The audit used to extract statistics */ private Audit audit; /** * The auditStatisticsDataService instance needed to retrieve and save * auditStatistics instances */ private WebResourceStatisticsDataService webResourceStatisticsDataService; public WebResourceStatisticsDataService getWebResourceStatisticsDataService() { return webResourceStatisticsDataService; } /** * The ThemeStatisticsDataService instance */ private ThemeStatisticsDataService themeStatisticsDataService; /** * The CriterionStatisticsDataService instance */ private CriterionStatisticsDataService criterionStatisticsDataService; /** * THe testStatisticsDataService instance */ private TestStatisticsDataService testStatisticsDataService; /** * The auditDataService instance */ private AuditDataService auditDataService; /** * The ProcessResultDataService instance */ private ProcessResultDataService processResultDataService; /** * The ProcessResultFactory instance */ private ProcessResultFactory processResultFactory; private Map<Criterion, Integer> criterionMap; private Map<Theme, Integer> themeMap; private Collection<Test> testSet; private Map<Criterion, CriterionStatistics> csMap; private Map<Theme, ThemeStatistics> tsMap; private Collection<ProcessResult> netResultList; @Override public List<ProcessResult> getNetResultList() { throw new UnsupportedOperationException("Not supported yet."); } @Override public void setNetResultList(final List<ProcessResult> netResultList) { throw new UnsupportedOperationException("Not supported yet."); } /** * This attribute is used to compute the number of not tested tests. In case * of Page audit, this value is supposed to be 1 and thus, the not tested * value is computed from the difference between the total number of tests * for the given level and the qualified results. In case of Site audit, the * total number of tests (or criterions) has to multiplied by this value * before substracting the qualified results. * */ private int nbOfWr = 0; /** * the set of audit parameters that handles some overridden values for test * weight (needed to compute the raw mark) */ private Collection<Parameter> paramSet; private static BigDecimal ZERO = BigDecimal.valueOf(Double.valueOf(0.0)); public AnalyserImpl( AuditDataService auditDataService, TestStatisticsDataService testStatisticsDataService, ThemeStatisticsDataService themeStatisticsDataService, WebResourceStatisticsDataService webResourceStatisticsDataService, CriterionStatisticsDataService criterionStatisticsDataService, ProcessResultDataService processResultDataService, ProcessResultFactory processResultFactory, WebResource webResource, Collection<Parameter> paramSet, int nbOfWr) { this.auditDataService = auditDataService; this.testStatisticsDataService = testStatisticsDataService; this.themeStatisticsDataService = themeStatisticsDataService; this.webResourceStatisticsDataService = webResourceStatisticsDataService; this.criterionStatisticsDataService = criterionStatisticsDataService; this.processResultDataService = processResultDataService; this.processResultFactory = processResultFactory; this.setWebResource(webResource); this.paramSet = paramSet; this.nbOfWr = nbOfWr; } @Override public float getResult() { throw new UnsupportedOperationException("Not supported yet."); } @Override public void run() { WebResourceStatistics wrStats = webResourceStatisticsDataService.create(); // Regardind the webResource type the computation of the statitics is // done in memory or through the db if (webResource instanceof Page) { extractTestSet(false); netResultList = getProcessResultWithNotTested( testSet, processResultDataService.getNetResultFromAuditAndWebResource(audit, webResource)); wrStats = computeAuditStatisticsFromPrList(wrStats); wrStats = computeHttpStatusCode(wrStats); } else if (webResource instanceof Site) { extractTestSet(true); wrStats = computeAuditStatisticsFromDb(wrStats); wrStats = computeCriterionStatisticsFromDb(wrStats); wrStats = computeTestStatisticsFromDb(wrStats); wrStats = computeThemeStatisticsFromDb(wrStats); } wrStats = computeMark(wrStats); wrStats = computeRawMark(wrStats); wrStats = computeNumberOfFailedOccurrences(wrStats); wrStats.setAudit(audit); wrStats.setWebResource(webResource); webResourceStatisticsDataService.saveOrUpdate(wrStats); } @Override public final void setWebResource(final WebResource webResource) { this.webResource = webResource; if (webResource instanceof Site) { this.audit = webResource.getAudit(); } else if (webResource instanceof Page) { if (webResource.getAudit() != null) { this.audit = webResource.getAudit(); } else if (webResource.getParent() != null) { this.audit = webResource.getParent().getAudit(); } } } @Override public WebResource getWebResource() { return webResource; } /** * Gather the Http status code for a given web resource. * * @param wrStatistics * @return */ private WebResourceStatistics computeHttpStatusCode(WebResourceStatistics wrStatistics) { wrStatistics.setHttpStatusCode(webResourceStatisticsDataService.getHttpStatusCodeByWebResource(webResource.getId())); return wrStatistics; } /** * To avoid multiple count requests to the db, the audits statistics are * computing by iterating through the ProcessResult list. The criterion * statistics and the theme statistics are collected on the fly while * parsing the collection of ProcessResult * * @param wrStatistics * @return */ private WebResourceStatistics computeAuditStatisticsFromPrList(WebResourceStatistics wrStatistics) { int nbOfPassed = 0; int nbOfFailed = 0; int nbOfNmi = 0; int nbOfNa = 0; int nbOfDetected = 0; int nbOfSuspected = 0; int nbOfNt = 0; for (ProcessResult pr : netResultList) { TestSolution prResult = (TestSolution) pr.getValue(); switch (prResult) { case PASSED: nbOfPassed++; break; case FAILED: nbOfFailed++; break; case NOT_APPLICABLE: nbOfNa++; break; case NEED_MORE_INFO: case DETECTED: case SUSPECTED_FAILED: case SUSPECTED_PASSED: nbOfNmi++; break; case NOT_TESTED: nbOfNt++; break; } addResultToCriterionCounterMap(prResult, pr.getTest().getCriterion(), wrStatistics); addResultToThemeCounterMap(prResult, pr.getTest().getCriterion().getTheme(), wrStatistics); } // if no test have been processed for any reason, mostly cause the source // code couldn't have been adapted, all theses values are set to -1 if (nbOfFailed + nbOfNa + nbOfNmi + nbOfPassed + nbOfDetected + nbOfSuspected == 0) { nbOfFailed = nbOfNa = nbOfNmi = nbOfPassed = nbOfSuspected = nbOfDetected = -1; } wrStatistics.setNbOfFailed(nbOfFailed); wrStatistics.setNbOfInvalidTest(nbOfFailed); wrStatistics.setNbOfPassed(nbOfPassed); wrStatistics.setNbOfNmi(nbOfNmi); wrStatistics.setNbOfNa(nbOfNa); wrStatistics.setNbOfDetected(nbOfDetected); wrStatistics.setNbOfSuspected(nbOfSuspected); wrStatistics.setNbOfNotTested(nbOfNt); setWeightedResult(wrStatistics); // Compute criterion Result for each criterion and link each // criterionStatistics to the current webResourceStatistics for (CriterionStatistics cs : csMap.values()) { computeCriterionResult(cs); wrStatistics.addCriterionStatistics(cs); } // Link each themeStatistics to the current webResourceStatistics for (ThemeStatistics ts : tsMap.values()) { wrStatistics.addThemeStatistics(ts); } wrStatistics.setAudit(audit); return wrStatistics; } /** * * @param testSolution * @param criterion * @param wrs */ private void addResultToCriterionCounterMap( TestSolution testSolution, Criterion criterion, WebResourceStatistics wrs) { if (csMap == null) { csMap = new HashMap<Criterion, CriterionStatistics>(); } if (csMap.containsKey(criterion)) { CriterionStatistics cs = csMap.get(criterion); incrementCriterionCounterFromTestSolution(cs, testSolution); } else { CriterionStatistics cs = criterionStatisticsDataService.create(); cs.setCriterion(criterion); incrementCriterionCounterFromTestSolution(cs, testSolution); csMap.put(criterion, cs); } } /** * * @param cs * @param testSolution */ private void incrementCriterionCounterFromTestSolution( CriterionStatistics cs, TestSolution testSolution) { switch (testSolution) { case PASSED: cs.setNbOfPassed(cs.getNbOfPassed() + 1); break; case FAILED: cs.setNbOfFailed(cs.getNbOfFailed() + 1); break; case NOT_APPLICABLE: cs.setNbOfNa(cs.getNbOfNa() + 1); break; case NEED_MORE_INFO: case DETECTED: case SUSPECTED_FAILED: case SUSPECTED_PASSED: cs.setNbOfNmi(cs.getNbOfNmi() + 1); break; case NOT_TESTED: cs.setNbOfNotTested(cs.getNbOfNotTested() + 1); break; } } /** * * @param testSolution * @param criterion * @param wrs */ private void addResultToThemeCounterMap( TestSolution testSolution, Theme theme, WebResourceStatistics wrs) { if (tsMap == null) { tsMap = new HashMap<Theme, ThemeStatistics>(); } if (tsMap.containsKey(theme)) { ThemeStatistics ts = tsMap.get(theme); incrementThemeCounterFromTestSolution(ts, testSolution); } else { ThemeStatistics ts = themeStatisticsDataService.create(); ts.setTheme(theme); incrementThemeCounterFromTestSolution(ts, testSolution); tsMap.put(theme, ts); } } /** * * @param ts * @param testSolution */ private void incrementThemeCounterFromTestSolution( ThemeStatistics ts, TestSolution testSolution) { switch (testSolution) { case PASSED: ts.setNbOfPassed(ts.getNbOfPassed() + 1); break; case FAILED: ts.setNbOfFailed(ts.getNbOfFailed() + 1); break; case NOT_APPLICABLE: ts.setNbOfNa(ts.getNbOfNa() + 1); break; case NEED_MORE_INFO: case DETECTED: case SUSPECTED_FAILED: case SUSPECTED_PASSED: ts.setNbOfNmi(ts.getNbOfNmi() + 1); break; case NOT_TESTED: ts.setNbOfNotTested(ts.getNbOfNotTested() + 1); break; } } /** * Gather the audit statistics informations : - Number of passed results - * Number of failed results - Number of need_more_information results - * Number of not applicable results - Number of failed tests * * @param wrStatistics * @return */ private WebResourceStatistics computeAuditStatisticsFromDb(WebResourceStatistics wrStatistics) { int nbOfPassed = webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.PASSED).intValue(); int nbOfFailed = webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.FAILED).intValue(); int nbOfNmi = webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.NEED_MORE_INFO).intValue(); int nbOfNa = webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.NOT_APPLICABLE).intValue(); int nbOfDetected = webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.DETECTED).intValue(); int nbOfSuspected = webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.SUSPECTED_FAILED).intValue() + webResourceStatisticsDataService.getResultCountByResultType(webResource.getId(), TestSolution.SUSPECTED_PASSED).intValue(); // if no test have been processed for any reason, mostly cause the source // code couldn't have been adapted, all theses values are set to -1 if (nbOfFailed + nbOfNa + nbOfNmi + nbOfPassed + nbOfDetected + nbOfSuspected == 0) { nbOfFailed = nbOfNa = nbOfNmi = nbOfPassed = nbOfSuspected = nbOfDetected = -1; } wrStatistics.setNbOfFailed(nbOfFailed); wrStatistics.setNbOfInvalidTest(nbOfFailed); wrStatistics.setNbOfPassed(nbOfPassed); wrStatistics.setNbOfNmi(nbOfNmi); wrStatistics.setNbOfNa(nbOfNa); wrStatistics.setNbOfDetected(nbOfDetected); wrStatistics.setNbOfSuspected(nbOfSuspected); wrStatistics.setNbOfNotTested( testSet.size() * nbOfWr - nbOfDetected - nbOfSuspected - nbOfFailed - nbOfNa - nbOfNmi - nbOfPassed); setWeightedResult(wrStatistics); wrStatistics.setAudit(audit); return wrStatistics; } /** * * @param wrStatistics */ private void setWeightedResult(WebResourceStatistics wrStatistics) { BigDecimal weightedPassed = webResourceStatisticsDataService.getWeightedResultByResultType( webResource.getId(), paramSet, TestSolution.PASSED, false); BigDecimal weightedFailed = webResourceStatisticsDataService.getWeightedResultByResultType( webResource.getId(), paramSet, TestSolution.FAILED, false); BigDecimal weightedNa = webResourceStatisticsDataService.getWeightedResultByResultType( webResource.getId(), paramSet, TestSolution.NOT_APPLICABLE, false); BigDecimal weightedNmi = webResourceStatisticsDataService.getWeightedResultByResultType( webResource.getId(), paramSet, TestSolution.NEED_MORE_INFO, false); wrStatistics.setWeightedFailed(weightedFailed); wrStatistics.setWeightedPassed(weightedPassed); wrStatistics.setWeightedNmi(weightedNmi); wrStatistics.setWeightedNa(weightedNa); } /** * This method compute the mark of the audit. Here is the algorithm formula * : ((1-ratioNMI) * passed/(passed+failed) + ratioNMI * * needMoreInfo/(passed+failed+needMoreInfo)) *100f where ratioNMI = * needMoreInfo / (passed+failed+needMoreInfo) * * @param wrStatistics * @return */ public WebResourceStatistics computeMark(WebResourceStatistics wrStatistics) { float passed = wrStatistics.getNbOfPassed(); // page on error, mark set to -1 if (passed == -1) { wrStatistics.setRawMark(Float.valueOf(-1)); return wrStatistics; } float failed = wrStatistics.getNbOfFailed(); float needMoreInfo = wrStatistics.getNbOfNmi(); if (failed == 0 && passed == 0) { wrStatistics.setMark(Float.valueOf(0)); return wrStatistics; } float ratioNMI = needMoreInfo / (passed + failed + needMoreInfo); float result = ((1 - ratioNMI) * passed / (passed + failed) + ratioNMI * needMoreInfo / (passed + failed + needMoreInfo)) * 100f; wrStatistics.setMark(result); return wrStatistics; } /** * This method compute the raw mark of the audit. Here is the algorithm * formula : passed/(passed+failed) * * @param wrStatistics * @return */ public WebResourceStatistics computeRawMark(WebResourceStatistics wrStatistics) { float passed = wrStatistics.getNbOfPassed(); // page on error, mark set to -1 if (passed == -1) { wrStatistics.setRawMark(Float.valueOf(-1)); return wrStatistics; } BigDecimal weightedPassed = wrStatistics.getWeightedPassed(); BigDecimal weightedFailed = wrStatistics.getWeightedFailed(); if ((weightedFailed.equals(BigDecimal.ZERO) || weightedFailed.equals(ZERO)) && (weightedPassed.equals(BigDecimal.ZERO) || weightedPassed.equals(ZERO))) { wrStatistics.setRawMark(Float.valueOf(0)); return wrStatistics; } float result = weightedPassed.divide(weightedPassed.add(weightedFailed), 4, RoundingMode.HALF_UP).floatValue() * 100f; wrStatistics.setRawMark(result); return wrStatistics; } /** * Gather the number of failed occurrence for a given web resource. * * @param wrStatistics * @return */ private WebResourceStatistics computeNumberOfFailedOccurrences(WebResourceStatistics wrStatistics) { int nbOfFailedOccurences = webResourceStatisticsDataService.getNumberOfOccurrencesByWebResourceAndResultType( webResource.getId(), TestSolution.FAILED, false).intValue(); wrStatistics.setNbOfFailedOccurences(nbOfFailedOccurences); return wrStatistics; } /** * * @param wrStatistics * @return */ private WebResourceStatistics computeThemeStatisticsFromDb(WebResourceStatistics wrStatistics) { for (Theme theme : themeMap.keySet()) { ThemeStatistics themeStatistics = themeStatisticsDataService.create(); themeStatistics.setTheme(theme); int nbOfFailed = themeStatisticsDataService.getResultCountByResultTypeAndTheme(webResource, TestSolution.FAILED, theme).intValue(); themeStatistics.setNbOfFailed(nbOfFailed); int nbOfPassed = themeStatisticsDataService.getResultCountByResultTypeAndTheme(webResource, TestSolution.PASSED, theme).intValue(); themeStatistics.setNbOfPassed(nbOfPassed); int nbOfNa = themeStatisticsDataService.getResultCountByResultTypeAndTheme(webResource, TestSolution.NOT_APPLICABLE, theme).intValue(); themeStatistics.setNbOfNa(nbOfNa); int nbOfNmi = themeStatisticsDataService.getResultCountByResultTypeAndTheme(webResource, TestSolution.NEED_MORE_INFO, theme).intValue(); nbOfNmi += themeStatisticsDataService.getResultCountByResultTypeAndTheme(webResource, TestSolution.SUSPECTED_FAILED, theme).intValue(); nbOfNmi += themeStatisticsDataService.getResultCountByResultTypeAndTheme(webResource, TestSolution.SUSPECTED_PASSED, theme).intValue(); themeStatistics.setNbOfNmi(nbOfNmi); int themeTestListSize = themeMap.get(theme); themeStatistics.setNbOfNotTested( themeTestListSize * nbOfWr - nbOfFailed - nbOfNa - nbOfNmi - nbOfPassed); wrStatistics.addThemeStatistics(themeStatistics); } return wrStatistics; } /** * * @param wrStatistics * @return */ private WebResourceStatistics computeCriterionStatisticsFromDb(WebResourceStatistics wrStatistics) { for (Criterion cr : criterionMap.keySet()) { CriterionStatistics criterionStatistics = criterionStatisticsDataService.create(); criterionStatistics.setCriterion(cr); int nbOfFailed = criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.FAILED, cr).intValue(); criterionStatistics.setNbOfFailed(nbOfFailed); int nbOfNa = criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.NOT_APPLICABLE, cr).intValue(); criterionStatistics.setNbOfNa(nbOfNa); int nbOfPassed = criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.PASSED, cr).intValue(); criterionStatistics.setNbOfPassed(nbOfPassed); int nbOfNmi = criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.NEED_MORE_INFO, cr).intValue(); nbOfNmi += criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.SUSPECTED_FAILED, cr).intValue(); nbOfNmi += criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.SUSPECTED_PASSED, cr).intValue(); nbOfNmi += criterionStatisticsDataService.getResultCountByResultTypeAndCriterion(webResource, TestSolution.DETECTED, cr).intValue(); criterionStatistics.setNbOfNmi(nbOfNmi); int criterionTestListSize = criterionMap.get(cr); criterionStatistics.setNbOfNotTested( criterionTestListSize * nbOfWr - nbOfFailed - nbOfNa - nbOfNmi - nbOfPassed); computeCriterionResult(criterionStatistics); wrStatistics.addCriterionStatistics(criterionStatistics); } return wrStatistics; } /** * * Gather the following statistics informations for each theme: - Number * of passed results - Number of failed results - Number of * need_more_information results - Number of not applicable results - Number * of failed tests * * @param wrStatistics * @return */ private WebResourceStatistics computeTestStatisticsFromDb(WebResourceStatistics wrStatistics) { for (Test test : testSet) { TestStatistics testStatistics = testStatisticsDataService.create(); testStatistics.setTest(test); int nbOfFailed = testStatisticsDataService.getResultCountByResultTypeAndTest(webResource, TestSolution.FAILED, test).intValue(); testStatistics.setNbOfFailed(nbOfFailed); int nbOfPassed = testStatisticsDataService.getResultCountByResultTypeAndTest(webResource, TestSolution.PASSED, test).intValue(); testStatistics.setNbOfPassed(nbOfPassed); int nbOfNmi = testStatisticsDataService.getResultCountByResultTypeAndTest(webResource, TestSolution.NEED_MORE_INFO, test).intValue(); testStatistics.setNbOfNmi(nbOfNmi); int nbOfNa = testStatisticsDataService.getResultCountByResultTypeAndTest(webResource, TestSolution.NOT_APPLICABLE, test).intValue(); testStatistics.setNbOfNa(nbOfNa); testStatistics.setNbOfNotTested(nbOfWr - nbOfFailed - nbOfPassed - nbOfNmi - nbOfNa); wrStatistics.addTestStatistics(testStatistics); } return wrStatistics; } /** * This method extracts a collection of themes for a given audit * * @return */ private void extractThemeAndCriterionSet() { themeMap = new HashMap<Theme, Integer>(); criterionMap = new HashMap<Criterion, Integer>(); for (Test test : testSet) { //Collect criterions given the set of tests for the audit, and keep // the number of tests for each criterion (needed to calculate the // not tested Criterion criterion = test.getCriterion(); if (criterionMap.containsKey(criterion)) { Integer testCounter = criterionMap.get(criterion) + 1; criterionMap.put(criterion, testCounter); } else { criterionMap.put(criterion, Integer.valueOf(1)); } //Collect themes given the set of tests for the audit, and keep // the number of tests for each criterion (needed to calculate the // not tested Theme theme = criterion.getTheme(); if (themeMap.containsKey(theme)) { Integer testCounter = themeMap.get(theme) + 1; themeMap.put(theme, testCounter); } else { themeMap.put(theme, Integer.valueOf(1)); } } } /** * This method extracts a collection of tests for a given audit * * @return */ private void extractTestSet(boolean extractThemeAndCriterion) { testSet = new HashSet<Test>(); testSet.addAll(auditDataService.getAuditWithTest(this.audit.getId()).getTestList()); if (extractThemeAndCriterion) { extractThemeAndCriterionSet(); } } /** * This computation is based on the priority of the results : - priority 1 : * Failed - priority 2 : NMI - priority 3 : Not Tested - priority 4 : Passed * - priority 5 : NA * * If at least one of the result type is found regarding the priority * definition, the criterion result is the result type * * @param crs * @param criterionTestListSize */ private void computeCriterionResult(CriterionStatistics crs) { if (crs.getNbOfFailed() > 0) { // at least one test is failed, the criterion is failed crs.setCriterionResult(TestSolution.FAILED); } else if (crs.getNbOfNmi() > 0) { // at least one test is nmi and no failed test encountered, the criterion is nmi crs.setCriterionResult(TestSolution.NEED_MORE_INFO); } else if (crs.getNbOfNotTested() > 0) { crs.setCriterionResult(TestSolution.NOT_TESTED); } else if (crs.getNbOfPassed() > 0) { crs.setCriterionResult(TestSolution.PASSED); } else if (crs.getNbOfNa() > 0) { crs.setCriterionResult(TestSolution.NOT_APPLICABLE); } else { crs.setCriterionResult(TestSolution.NEED_MORE_INFO); } } /** * Some tests may have not ProcessResult, but are needed to be displayed as * not tested test. For each test without ProcessResult, we create a new * ProcessResult with NOT_TESTED as the result. * * @param testList * @param themeCode * @param netResultList * @return */ private Collection<ProcessResult> getProcessResultWithNotTested( Collection<Test> testList, Collection<ProcessResult> netResultList) { Collection<Test> testedTestList = new ArrayList<Test>(); for (ProcessResult pr : netResultList) { testedTestList.add(pr.getTest()); } Collection<ProcessResult> fullProcessResultList = new ArrayList<ProcessResult>(); fullProcessResultList.addAll(netResultList); for (Test test : testList) { // if the test has no ProcessResult and its theme is part of the user // selection, a NOT_TESTED result ProcessRemark is created if (!testedTestList.contains(test)) { ProcessResult pr = processResultFactory.createDefiniteResult(); pr.setTest(test); pr.setValue(TestSolution.NOT_TESTED); fullProcessResultList.add(pr); } } return fullProcessResultList; } }
package com.fishercoder.solutions; import java.util.ArrayList; import java.util.List; /** * 216. Combination Sum III * * Find all possible combinations of k numbers that add up to a number n, * given that only numbers from 1 to 9 can be used and each combination should be a unique set of numbers. Example 1: Input: k = 3, n = 7 Output: [[1,2,4]] Example 2: Input: k = 3, n = 9 Output: [[1,2,6], [1,3,5], [2,3,4]]*/ public class _216 { public List<List<Integer>> combinationSum3(int k, int n) { List<List<Integer>> result = new ArrayList(); int[] nums = new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9}; backtracking(k, n, nums, 0, new ArrayList(), result); return result; } void backtracking(int k, int n, int[] nums, int start, List<Integer> curr, List<List<Integer>> result) { if (n > 0) { for (int i = start; i < nums.length; i++) { curr.add(nums[i]); backtracking(k, n - nums[i], nums, i + 1, curr, result); curr.remove(curr.size() - 1); } } else if (n == 0 && curr.size() == k) { result.add(new ArrayList(curr)); } } }
package com.fishercoder.solutions; import java.util.Arrays; import java.util.List; public class _315 { public static class Solution1 { class Node { int val; int sum; int dup = 1; Node left; Node right; public Node(int v, int s) { this.val = v; this.sum = s; } } public List<Integer> countSmaller(int[] nums) { Integer[] ans = new Integer[nums.length]; Node root = null; for (int i = nums.length - 1; i >= 0; i root = insertNode(nums[i], root, i, 0, ans); } return Arrays.asList(ans); } Node insertNode(int val, Node node, int i, int prevSum, Integer[] ans) { if (node == null) { node = new Node(val, 0); ans[i] = prevSum; } else if (val == node.val) { node.dup += 1; ans[i] = prevSum + node.sum; } else if (val > node.val) { node.right = insertNode(val, node.right, i, prevSum + node.sum + node.dup, ans); } else { node.sum += 1; node.left = insertNode(val, node.left, i, prevSum, ans); } return node; } } }
package com.fishercoder.solutions; public class _357 { public static class Solution1 { public int countNumbersWithUniqueDigits(int n) { if (n == 0) { return 1; } int res = 10; int uniqueDigits = 9; int availableNumber = 9; while (n-- > 1 && availableNumber > 0) { uniqueDigits = uniqueDigits * availableNumber; res += uniqueDigits; availableNumber } return res; } } }
package com.fishercoder.solutions; import java.util.Stack; public class _394 { public static class Solution1 { public String decodeString(String s) { Stack<Integer> count = new Stack<>(); Stack<String> str = new Stack<>(); int idx = 0; str.push(""); while (idx < s.length()) { if (s.charAt(idx) >= '0' && s.charAt(idx) <= '9') { int start = idx; while (s.charAt(idx + 1) >= '0' && s.charAt(idx + 1) <= '9') { idx++; } count.push(Integer.parseInt(s.substring(start, idx + 1))); } else if (s.charAt(idx) == '[') { str.push(""); } else if (s.charAt(idx) == ']') { String st = str.pop(); StringBuilder sb = new StringBuilder(); int n = count.pop(); for (int j = 0; j < n; j++) { sb.append(st); } str.push(str.pop() + sb.toString()); } else { str.push(str.pop() + s.charAt(idx)); } idx++; } return str.pop(); } } }
package com.fishercoder.solutions; import java.util.Arrays; public class _908 { public static class Solution1 { public int smallestRangeI(int[] A, int K) { Arrays.sort(A); int smallestPlus = A[0] + K; int biggestMinus = A[A.length - 1] - K; int diff = biggestMinus - smallestPlus; if (diff > 0) { return diff; } else { return 0; } } } public static class Solution2 { public int smallestRangeI(int[] A, int K) { int min = A[0]; int max = A[0]; for (int k : A) { min = Math.min(min, k); max = Math.max(max, k); } return Math.max(max - min - 2 * K, 0); } } }
package org.apache.felix.ipojo.everest.core; import org.apache.felix.ipojo.annotations.*; import org.apache.felix.ipojo.everest.impl.DefaultReadOnlyResource; import org.apache.felix.ipojo.everest.managers.everest.EverestRootResource; import org.apache.felix.ipojo.everest.services.*; import org.osgi.service.event.Event; import org.osgi.service.event.EventAdmin; import java.util.*; /** * Everest Core. */ @Component @Instantiate @Provides(specifications = EverestService.class) public class Everest extends DefaultReadOnlyResource implements EverestService { /** * The system property used to send events synchronously. */ public static final String SYNCHRONOUS_PROCESSING = "everest.processing.synchronous"; /** * The system property used to dump requests to System.out. */ public static final String DEBUG_REQUEST = "everest.debug.request"; private Map<Path, Resource> resources = new LinkedHashMap<Path, Resource>(); private List<ResourceExtender> extenders = new ArrayList<ResourceExtender>(); public Everest() { super(Path.from("/")); // Add the everest domain resources.put(Path.from("/everest"), new EverestRootResource(this)); } /** * The EventAdmin service, or {@code null} if it's not present. */ private static volatile EventAdmin eventAdmin; @Bind(optional = true, aggregate = true) public void bindRootResource(Resource resource) { synchronized (this) { resources.put(resource.getCanonicalPath(), resource); } } @Unbind public void unbindRootResource(Resource resource) { synchronized (this) { resources.remove(resource.getCanonicalPath()); } } @Bind(optional = true, aggregate = true) public void bindExtender(ResourceExtender extender) { synchronized (this) { extenders.add(extender); } } @Unbind public void unbindExtender(ResourceExtender extender) { synchronized (this) { extenders.remove(extender); } } public synchronized Map<Path, Resource> getEverestResources() { return new TreeMap<Path, Resource>(resources); } public synchronized List<Resource> getResources() { return new ArrayList<Resource>(resources.values()); } public synchronized List<ResourceExtender> getExtenders() { return new ArrayList<ResourceExtender>(extenders); } public Resource process(Request request) throws IllegalActionOnResourceException, ResourceNotFoundException { // We can't extend when the original action fails. Resource result = super.process(request); // Extensions // We must update the resulted resource with the extensions for (ResourceExtender extender : getExtenders()) { if (extender.getFilter().accept(result)) { result = extender.extend(request, result); } } return result; } @Bind(optional = true, proxy = false) public void bindEventAdmin(EventAdmin ea) { eventAdmin = ea; } @Unbind(optional = true, proxy = false) public void unbindEventAdmin(EventAdmin ea) { eventAdmin = null; } /** * Post (asynchronously) the state of the given resource. * <p> * The topic of the sent event is the complete canonical path of the resource ({@code /everest/...}). * </p> * * @param eventType type of posted resource event * @param resource concerned resource * @return true if event is posted to event admin, else false. */ public static boolean postResource(ResourceEvent eventType, Resource resource) { EventAdmin ea = eventAdmin; if (ea == null || !resource.isObservable()) { return false; } Map<String, Object> map = new LinkedHashMap<String, Object>(); map.put("eventType", eventType.toString()); map.put("canonicalPath", resource.getCanonicalPath().toString()); map.put("metadata", resource.getMetadata()); map.put("relations", resource.getRelations()); Event e = new Event(topicFromPath(resource.getCanonicalPath()), map); String mode = System.getProperty(SYNCHRONOUS_PROCESSING); try { if (mode != null && mode.equalsIgnoreCase("true")) { // Sync mode ea.sendEvent(e); } else { // Async mode (default) ea.postEvent(e); } } catch (SecurityException ex) { return false; } catch (IllegalStateException ex) { // The EventAdmin may be shutting down... return false; } return true; } /** * Transforms a path to event admin topic * * @param path resource path * @return topic string */ public static String topicFromPath(Path path) { String pathString = path.toString(); pathString = "everest".concat(pathString); pathString = pathString.replaceAll("\\.", "-"); return pathString; } }
package com.forana.please; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.http.Header; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.impl.client.CloseableHttpClient; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import com.forana.please.exceptions.HTTPResponseException; /** * Object symbolizing an HTTP response. * * @author forana */ public class HTTPResponse { private final CloseableHttpResponse response; private final CloseableHttpClient client; /** * Should not be called directly. * * @param response The response that this wraps. */ protected HTTPResponse(CloseableHttpResponse response, CloseableHttpClient client) { this.response = response; this.client = client; } /** * Closes the response. Should be called if there is a body whose contents are being streamed. * * @throws IOException */ public void close() throws IOException { response.close(); client.close(); } @Override protected final void finalize() throws Throwable { try { close(); } finally { super.finalize(); } } /** * Retrieve the status code of the response. */ public int getStatus() { return response.getStatusLine().getStatusCode(); } /** * Retrieve the textual status (reason) for the response. */ public String getStatusText() { return response.getStatusLine().getReasonPhrase(); } /** * Convenience helper that checks if the response is within the 200 (acceptable) range. */ public boolean isOk() { return getStatus() >= 200 && getStatus() <= 200; } /** * Retrieves the content-type of the response body (mime), or null if that header isn't set. */ public String getContentType() { Header header = response.getEntity().getContentType(); return header == null ? null : header.getValue(); } /** * Convenience helper that checks if the response has a body. */ public boolean hasBody() { return response.getEntity() != null; } /** * Retrieve the body as an @{link java.io.InputStream}. The user is then responsible for closing * the stream when finished. * * @throws HTTPResponseException If there's a general I/O error. */ public InputStream getBody() throws HTTPResponseException { try { return response.getEntity().getContent(); } catch (IOException e) { throw new HTTPResponseException(e); } } /** * Retrieve the body as an <code>byte[]</code>. The stream will be closed automatically. * * This method will not be spectacularly performant - if you're willing to add a third-party * dependency, look at commons-io's <code>IOUtils.toByteArray(InputStream)</code>. * * @throws HTTPResponseException If there's a general I/O error. */ public byte[] getBytes() throws HTTPResponseException { try { InputStream stream = getBody(); List<byte[]> byteArrays = new LinkedList<>(); final int CHUNK_SIZE = 1024; int lastRead = -1; int totalSize = 0; do { byte[] buffer = new byte[CHUNK_SIZE]; lastRead = stream.read(buffer); if (lastRead != -1) { totalSize += lastRead; byteArrays.add(Arrays.copyOf(buffer, lastRead)); } } while (lastRead > 0); stream.close(); byte[] result = new byte[totalSize]; int i = 0; for (byte[] array : byteArrays) { for (byte b : array) { result[i] = b; i++; } } return result; } catch (IOException e) { throw new HTTPResponseException(e); } } /** * Convenience method to return the body as a string. * * @throws HTTPResponseException */ public String getBodyText() throws HTTPResponseException { byte[] bytes = getBytes(); return new String(bytes); } /** * Retrieve the JSON body as a @{link org.codehaus.jackson.JsonNode}. * * @throws HTTPResponseException If there's an error reading or parsing the body. */ public JsonNode getJSON() throws HTTPResponseException { try { InputStream stream = getBody(); ObjectMapper mapper = new ObjectMapper(); return mapper.readTree(mapper.getJsonFactory() .createJsonParser(stream)); } catch (IOException e) { throw new HTTPResponseException(e); } } /** * Retrieve the value of a header in the response if it's set, or <code>null</code> if it's not. * * If there's more than one header with that name, the first will be returned. */ public String getHeader(String name) { Header header = response.getFirstHeader(name); return header == null ? null : header.getValue(); } /** * Retrieve all headers in the response. */ public Map<String, String> getHeaders() { Map<String, String> headers = new TreeMap<>(); // so that entrySet() is ordered for (Header header : response.getAllHeaders()) { headers.put(header.getName(), header.getValue()); } return headers; } /** * Print the status, reason, and headers to System.out. Useful for debugging. * * @return this */ public HTTPResponse dump() { return dump(System.out); } /** * Print the status, reason, and headers. Useful for debugging. * * @return this */ public HTTPResponse dump(PrintStream out) { out.println(String.format("HTTP %d: %s", getStatus(), getStatusText())); for (Map.Entry<String, String> entry : getHeaders().entrySet()) { out.println(String.format("%s: %s", entry.getKey(), entry.getValue())); } return this; } }
package org.intermine.util; import java.io.OutputStream; import java.io.PrintStream; import java.util.List; import java.util.Iterator; import java.util.StringTokenizer; /** * Utility methods for dealing with text files. * * @author Kim Rutherford */ public abstract class TextFileUtil { /** * Write a list of lists using tab characters to delimit the fields. * @param os the OutputStream to write to * @param listOfLists the table to write * @param columnOrder the real order of the column in the output - a map from the column index * in the output to the column index in the listOfLists * @param columnVisible an array mapping from columns in listOfLists to their visibility * @param maxRows the maximum number of rows to output - read only range 0..maxRows-1 from * listOfLists */ public static void writeTabDelimitedTable(OutputStream os, List listOfLists, int [] columnOrder, boolean [] columnVisible, int maxRows) { writeDelimitedTable(os, listOfLists, columnOrder, columnVisible, maxRows, '\t'); } /** * Write a list of lists using comma characters to delimit the fields. * @param os the OutputStream to write to * @param listOfLists the table to write * @param columnOrder the real order of the column in the output - a map from the column index * in the output to the column index in the listOfLists * @param columnVisible an array mapping from columns in listOfLists to their visibility * @param maxRows the maximum number of rows to output - read only range 0..maxRows-1 from * listOfLists */ public static void writeCSVTable(OutputStream os, List listOfLists, int [] columnOrder, boolean [] columnVisible, int maxRows) { writeDelimitedTable(os, listOfLists, columnOrder, columnVisible, maxRows, ','); } /** * Write a list of lists using the given delimiter character to delimit the fields. * @param os the OutputStream to write to * @param listOfLists the table to write * @param columnOrder the real order of the column in the output - a map from the column index * in the output to the column index in the listOfLists * @param columnVisible an array mapping from columns in listOfLists to their visibility * @param delimiter the character to use to separate the fields in the output * @param maxRows the maximum number of rows to output - read only range 0..maxRows-1 from * listOfLists */ public static void writeDelimitedTable(OutputStream os, List listOfLists, int [] columnOrder, boolean [] columnVisible, int maxRows, char delimiter) { PrintStream printStream = new PrintStream(os); String delimiters = "" + delimiter; // a count of the columns that are invisble - used to get the correct columnIndex int invisibleColumns = 0; for (int columnIndex = 0; columnIndex < columnVisible.length; columnIndex++) { if (!columnVisible[columnIndex]) { invisibleColumns++; } } int rowCount = 0; Iterator rowIterator = listOfLists.iterator(); while (rowIterator.hasNext()) { if (rowCount == maxRows) { break; } List row = (List) rowIterator.next(); for (int columnIndex = 0; columnIndex < row.size(); columnIndex++) { int realColumnIndex = columnOrder[columnIndex]; Object o = row.get(realColumnIndex); if (!columnVisible[columnIndex]) { continue; } if (o instanceof Number) { writeUnQuoted(printStream, o); } else { writeQuoted(printStream, o); } if (columnIndex < row.size () - 1 - invisibleColumns) { printStream.print(delimiter); } } printStream.println(); rowCount++; } printStream.flush(); } private static final String QUOTE = "\""; /** * Write an Object as a String to an OutputStream with quoting. Any double-quote characters * are quoted doubling them. The output will be surrounded by double quotes. * ie. fred"eric -> "fred""eric" * @param printStream the PrintStream to write to * @param o the Object to write */ public static void writeQuoted(PrintStream printStream, Object o) { // don't use toString() in case o is null String objectString = "" + o; StringBuffer buffer = new StringBuffer(); final StringTokenizer tokeniser = new StringTokenizer (objectString, QUOTE, true); buffer.append(QUOTE); while (tokeniser.hasMoreTokens ()) { final String tokenValue = tokeniser.nextToken (); if (tokenValue.equals(QUOTE)) { // quotes are quoted by doubling buffer.append(tokenValue); buffer.append(tokenValue); } else { buffer.append(tokenValue); } } buffer.append('"'); printStream.print(buffer); } /** * Write an Object as a String to an OutputStream with quoting special characters * @param printStream the PrintStream to write to * @param o the Object to write */ public static void writeUnQuoted(PrintStream printStream, Object o) { printStream.print(o.toString()); } }
package brooklyn.demo; import javax.jms.Connection; import javax.jms.MessageConsumer; import javax.jms.Queue; import javax.jms.Session; import javax.jms.TextMessage; import org.apache.qpid.client.AMQConnectionFactory; import org.apache.qpid.configuration.ClientProperties; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; /** Receives messages from a queue on a Qpid broker at a given URL. */ public class Subscribe { public static final String QUEUE = "'amq.direct'/'testQueue'; { node: { type: queue } }"; private static final long MESSAGE_TIMEOUT_MILLIS = 15000L; private static final int MESSAGE_COUNT = 100; public static void main(String...argv) throws Exception { Preconditions.checkElementIndex(0, argv.length, "Must specify broker URL"); String url = argv[0]; // Set Qpid client properties System.setProperty(ClientProperties.AMQP_VERSION, "0-10"); System.setProperty(ClientProperties.DEST_SYNTAX, "ADDR"); // Connect to the broker AMQConnectionFactory factory = new AMQConnectionFactory(url); Connection connection = factory.createConnection(); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); System.out.printf("Waiting up to %s milliseconds to receive %s messages\n", MESSAGE_TIMEOUT_MILLIS, MESSAGE_COUNT); try { // Create a producer for the queue Queue destination = session.createQueue(QUEUE); MessageConsumer messageConsumer = session.createConsumer(destination); // Try and receive 100 messages for (int n = 0; n < 100; n++) { TextMessage msg = (TextMessage) messageConsumer.receive(MESSAGE_TIMEOUT_MILLIS); if (msg == null) { System.out.printf("No message received in %s milliseconds, exiting", MESSAGE_TIMEOUT_MILLIS); break; } System.out.printf("Got message %d: '%s'\n", n+1, msg.getText()); } } catch (Exception e) { System.err.printf("Error while receiving - %s\n", e.getMessage()); System.err.printf("Cause: %s\n", Throwables.getStackTraceAsString(e)); } finally { session.close(); connection.close(); } } }
package de.roamingthings.expenses.user.domain; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; import lombok.ToString; import org.hibernate.validator.constraints.NotEmpty; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; /** * @author Alexander Sparkowsky [info@roamingthings.de] * @version 2017/05/07 */ @Entity @Data @NoArgsConstructor @ToString @EqualsAndHashCode public class Role { @Id @GeneratedValue private Long id; @NotEmpty @Column(nullable = false, unique = true) private String role; public Role(String role) { this.role = role; } }
package com.hearthsim.util; import java.util.ArrayList; import java.util.HashMap; import java.util.Random; import java.util.function.Predicate; import com.hearthsim.card.Card; import com.hearthsim.card.Deck; import com.hearthsim.card.ImplementedCardList; import com.hearthsim.card.ImplementedCardList.ImplementedCard; /** * This class provides a mechanism for generating random decks. * * @author dyllonmgagnier * */ public class DeckFactory { private ArrayList<ImplementedCard> cards; private boolean limitCopies; private Random gen; /** * This method initializes a new DeckFactory. * * @param filter * Any card for which this returns true will be removed from the * potential card pool. * @param limitCopies * If true, then any deck will contain no more than two copies of * any card no more than one copy of any legendary. */ protected DeckFactory(Predicate<ImplementedCard> filter, boolean limitCopies) { cards = ImplementedCardList.getInstance().getCardList(); cards.removeIf(filter); gen = new Random(); } /** * This method generates a new random deck as specified by the builder. The * decks are completely random so shuffling is unnecessary. * * @return */ public Deck generateRandomDeck() { Card[] result = new Card[30]; if (limitCopies) { HashMap<ImplementedCard, Integer> cardsInDeck = new HashMap<ImplementedCard, Integer>(); for (int i = 0; i < 30; i++) { ImplementedCard toAdd; // Keep going until a card is found that can be added to the // deck. while (true) { toAdd = cards.get(gen.nextInt(cards.size())); if (!cardsInDeck.containsKey(toAdd)) { cardsInDeck.put(toAdd, 1); break; } else if (cardsInDeck.get(toAdd).equals(1) && !toAdd.rarity_.equals("legendary")) { cardsInDeck.put(toAdd, 2); break; } } result[i] = toAdd.createCardInstance(); } } else { for (int i = 0; i < 30; i++) { result[i] = cards.get(gen.nextInt(cards.size())) .createCardInstance(); } } return new Deck(result); } /** * This class builds a DeckFactory and allows for various options to be * selected for the factory. * * @author dyllonmgagnier * */ public static class DeckFactoryBuilder { private Predicate<ImplementedCard> filter; private boolean limitCopies; private boolean allowUncollectible; /** * Constructs the default builder which does not allow for uncollectible * cards and will limit the number of copies of any card to no more than * two and limits the number of copies any particular legendar to no * more than one. */ public DeckFactoryBuilder() { filter = (card) -> false; limitCopies = true; allowUncollectible = false; } /** * Limits the the card pool to only those specified by the given * rarities. * * @param rarities */ public void filterByRarity(String... rarities) { filter.or((card) -> { boolean result = true; for (String rarity : rarities) result = result && !card.rarity_.equals(rarity); return result; }); } /** * Only select cards usable by the input character class (i.e. warlock, * priest, mage, rogue, etc.). * * @param characterClass * The class to filter by. */ public void filterByHero(String characterClass) { filter.or((card) -> card.charClass_ != characterClass); } /** * This method allows for uncollectible cards to be in the card pool. */ public void allowUncollectible() { allowUncollectible = true; } /** * This method generates a DeckFactory based on the previously selected * options. * * @return A DeckFactory limited by the various options. */ public DeckFactory createDeckFactory() { if (!allowUncollectible) filter.or((card) -> !card.collectible); return new DeckFactory(filter, limitCopies); } /** * This method only allows for cards between the minimum and maximum * mana cost. * * @param minimumCost * The minimum mana cost allowed. * @param maximumCost * The maximum mana cost allowed. */ public void filterByManaCost(int minimumCost, int maximumCost) { filter.or((card) -> card.mana_ < minimumCost || card.mana_ > maximumCost); } /** * This method allows for unlimited copies of cards to be used (i.e. * like in Arena). */ public void allowUnlimitedCopiesOfCards() { limitCopies = false; } } }
package com.librato.metrics; import java.util.HashMap; import java.util.Map; /** * Contains the status of a batch */ public class PostResult { private final Integer statusCode; private final Exception exception; private final Map<String,Object> data = new HashMap<String, Object>(); private final String response; public PostResult(Map<String, Object> chunk, int statusCode, String response) { this.data.putAll(chunk); this.statusCode = statusCode; this.exception = null; this.response = response; } public PostResult(Map<String, Object> chunk, Exception e) { this.data.putAll(chunk); this.exception = e; this.statusCode = null; this.response = null; } public boolean success() { return statusCode != null && statusCode / 100 == 2; } public Integer getStatusCode() { return statusCode; } public Exception getException() { return exception; } public Map<String, Object> getData() { return data; } public String getResponse() { return response; } @Override public String toString() { return "PostResult{" + "statusCode=" + statusCode + ", exception=" + exception + ", data=" + data + ", response='" + response + '\'' + '}'; } }
package com.mad.backend.models; import javax.persistence.*; @Entity public class Product { @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(nullable = false) private Integer id; @Column(nullable = false) private Integer price; @Column(nullable = false) private Integer quantity; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "supplier_id", nullable = false) private Supplier supplier; @Column(nullable = false) private String name; @Column(nullable = false) private String description; public Product() { // JPA constructor } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getPrice() { return price; } public void setPrice(Integer price) { this.price = price; } public Integer getQuantity() { return quantity; } public void setQuantity(Integer quantity) { this.quantity = quantity; } public Supplier getSupplier() { return supplier; } public void setSupplier(Supplier supplier) { this.supplier = supplier; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } }
package com.nerodesk.takes.doc; import com.jcabi.log.Logger; import com.nerodesk.om.Base; import com.nerodesk.om.Doc; import com.nerodesk.takes.RqDisposition; import com.nerodesk.takes.RqUser; import java.io.IOException; import java.io.InputStream; import org.takes.Request; import org.takes.Response; import org.takes.Take; import org.takes.facets.flash.RsFlash; import org.takes.facets.forward.RsForward; import org.takes.rq.RqLengthAware; import org.takes.rq.RqMultipart; /** * Write file content. * * @author Yegor Bugayenko (yegor@teamed.io) * @version $Id$ * @since 0.2 * @todo #151:30min This class should work with a batch of documents, not just * with a single document. Use the new interface Batch in place of the current * Doc interface. */ final class TkWrite implements Take { /** * Base. */ private final transient Base base; /** * Ctor. * @param bse Base */ TkWrite(final Base bse) { this.base = bse; } @Override public Response act(final Request req) throws IOException { final RqMultipart multi = new RqMultipart.Base(req); Logger.info( this, "%d bytes received and parsed", new RqLengthAware(req).body().available() ); final Request part = multi.part("file").iterator().next(); final Doc doc = new RqUser(req, this.base).user().docs().doc( new RqDisposition(part).filename() ); final InputStream body = part.body(); doc.write(body, body.available()); return new RsForward(new RsFlash("file uploaded")); } }
package edu.umd.cs.findbugs.ba.type; import java.util.Collections; import java.util.IdentityHashMap; import java.util.List; import java.util.Set; import org.apache.bcel.Constants; import org.apache.bcel.classfile.Attribute; import org.apache.bcel.classfile.Field; import org.apache.bcel.classfile.Signature; import org.apache.bcel.generic.*; import edu.umd.cs.findbugs.OpcodeStack.Item; import edu.umd.cs.findbugs.ba.AbstractFrameModelingVisitor; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.DataflowAnalysisException; import edu.umd.cs.findbugs.ba.Debug; import edu.umd.cs.findbugs.ba.FieldSummary; import edu.umd.cs.findbugs.ba.Hierarchy; import edu.umd.cs.findbugs.ba.Hierarchy2; import edu.umd.cs.findbugs.ba.InvalidBytecodeException; import edu.umd.cs.findbugs.ba.ObjectTypeFactory; import edu.umd.cs.findbugs.ba.SignatureParser; import edu.umd.cs.findbugs.ba.XField; import edu.umd.cs.findbugs.ba.XMethod; import edu.umd.cs.findbugs.ba.ch.Subtypes2; import edu.umd.cs.findbugs.ba.generic.GenericObjectType; import edu.umd.cs.findbugs.ba.generic.GenericSignatureParser; import edu.umd.cs.findbugs.ba.generic.GenericUtilities; import edu.umd.cs.findbugs.ba.vna.ValueNumber; import edu.umd.cs.findbugs.ba.vna.ValueNumberDataflow; import edu.umd.cs.findbugs.ba.vna.ValueNumberFrame; import edu.umd.cs.findbugs.util.Util; /** * Visitor to model the effects of bytecode instructions on the types of the * values (local and operand stack) in Java stack frames. This visitor does not * verify that the types are sensible for the bytecodes executed. In other * words, this isn't a bytecode verifier, although it wouldn't be too hard to * turn it into something vaguely verifier-like. * * @author David Hovemeyer * @see TypeFrame * @see TypeAnalysis */ public class TypeFrameModelingVisitor extends AbstractFrameModelingVisitor<Type, TypeFrame> implements Constants, Debug { static private final ObjectType COLLECTION_TYPE = ObjectTypeFactory.getInstance("java.util.Collection"); private ValueNumberDataflow valueNumberDataflow; // Fields for precise modeling of instanceof instructions. private boolean instanceOfFollowedByBranch; private ReferenceType instanceOfType; private ValueNumber instanceOfValueNumber; private FieldSummary fieldSummary; private FieldStoreTypeDatabase database; private Set<ReferenceType> typesComputedFromGenerics = Util.newSetFromMap(new IdentityHashMap<ReferenceType, Boolean>()); /** * Constructor. * * @param cpg * the ConstantPoolGen of the method whose instructions we are * examining * @param typesComputerFromGenerics TODO */ public TypeFrameModelingVisitor(ConstantPoolGen cpg) { super(cpg); fieldSummary = AnalysisContext.currentAnalysisContext().getFieldSummary(); } /** * Set ValueNumberDataflow for the method being analyzed. This is optional; * if set, we will use the information to more accurately model the effects * of instanceof instructions. * * @param valueNumberDataflow * the ValueNumberDataflow */ public void setValueNumberDataflow(ValueNumberDataflow valueNumberDataflow) { this.valueNumberDataflow = valueNumberDataflow; } /** * Return whether an instanceof instruction was followed by a branch. The * TypeAnalysis may use this to get more precise types in the resulting * frame. * * @return true if an instanceof instruction was followed by a branch, false * if not */ public boolean isInstanceOfFollowedByBranch() { return instanceOfFollowedByBranch; } /** * Get the type of the most recent instanceof instruction modeled. The * TypeAnalysis may use this to get more precise types in the resulting * frame. * * @return the Type checked by the most recent instanceof instruction */ public Type getInstanceOfType() { return instanceOfType; } /** * Get the value number of the most recent instanceof instruction modeled. * The TypeAnalysis may use this to get more precise types in the resulting * frame. * * @return the ValueNumber checked by the most recent instanceof instruction */ public ValueNumber getInstanceOfValueNumber() { return instanceOfValueNumber; } /** * Set the field store type database. We can use this to get more accurate * types for values loaded from fields. * * @param database * the FieldStoreTypeDatabase */ public void setFieldStoreTypeDatabase(FieldStoreTypeDatabase database) { this.database = database; } @Override public Type getDefaultValue() { return TypeFrame.getBottomType(); } boolean sawEffectiveInstanceOf; boolean previousWasEffectiveInstanceOf; @Override public void analyzeInstruction(Instruction ins) throws DataflowAnalysisException { instanceOfFollowedByBranch = false; sawEffectiveInstanceOf = false; super.analyzeInstruction(ins); previousWasEffectiveInstanceOf = sawEffectiveInstanceOf; } /** * This method must be called at the beginning of modeling a basic block in * order to clear information cached for instanceof modeling. */ public void startBasicBlock() { instanceOfType = null; instanceOfValueNumber = null; } /** * Consume stack. This is a convenience method for instructions where the * types of popped operands can be ignored. */ protected void consumeStack(Instruction ins) { ConstantPoolGen cpg = getCPG(); TypeFrame frame = getFrame(); int numWordsConsumed = ins.consumeStack(cpg); if (numWordsConsumed == Constants.UNPREDICTABLE) throw new InvalidBytecodeException("Unpredictable stack consumption for " + ins); try { while (numWordsConsumed frame.popValue(); } } catch (DataflowAnalysisException e) { throw new InvalidBytecodeException("Stack underflow for " + ins + ": " + e.getMessage()); } } /** * Work around some weirdness in BCEL (inherited from JVM Spec 1): BCEL * considers long and double types to consume two slots on the stack. This * method ensures that we push two types for each double or long value. */ protected void pushValue(Type type) { if (type.getType() == T_VOID) throw new IllegalArgumentException("Can't push void"); TypeFrame frame = getFrame(); if (type.getType() == T_LONG) { frame.pushValue(Type.LONG); frame.pushValue(TypeFrame.getLongExtraType()); } else if (type.getType() == T_DOUBLE) { frame.pushValue(Type.DOUBLE); frame.pushValue(TypeFrame.getDoubleExtraType()); } else frame.pushValue(type); } /** * Helper for pushing the return type of an invoke instruction. */ protected void pushReturnType(InvokeInstruction ins) { ConstantPoolGen cpg = getCPG(); Type type = ins.getType(cpg); if (type.getType() != T_VOID) pushValue(type); } /** * This is overridden only to ensure that we don't rely on the base class to * handle instructions that produce stack operands. */ @Override public void modelNormalInstruction(Instruction ins, int numWordsConsumed, int numWordsProduced) { if (VERIFY_INTEGRITY) { if (numWordsProduced > 0) throw new InvalidBytecodeException("missing visitor method for " + ins); } super.modelNormalInstruction(ins, numWordsConsumed, numWordsProduced); } // Instruction visitor methods // NOTES: // - Instructions that only consume operands need not be overridden, // because the base class visit methods handle them correctly. // - Instructions that simply move values around in the frame, // such as DUP, xLOAD, etc., do not need to be overridden because // the base class handles them. // - Instructions that consume and produce should call // consumeStack(Instruction) and then explicitly push produced operands. @Override public void visitATHROW(ATHROW obj) { // do nothing. The same value remains on the stack (but we jump to a new // location) } @Override public void visitACONST_NULL(ACONST_NULL obj) { pushValue(TypeFrame.getNullType()); } @Override public void visitDCONST(DCONST obj) { pushValue(Type.DOUBLE); } @Override public void visitFCONST(FCONST obj) { pushValue(Type.FLOAT); } @Override public void visitICONST(ICONST obj) { pushValue(Type.INT); } @Override public void visitLCONST(LCONST obj) { pushValue(Type.LONG); } @Override public void visitLDC(LDC obj) { pushValue(obj.getType(getCPG())); } @Override public void visitLDC2_W(LDC2_W obj) { pushValue(obj.getType(getCPG())); } @Override public void visitBIPUSH(BIPUSH obj) { pushValue(Type.INT); } @Override public void visitSIPUSH(SIPUSH obj) { pushValue(Type.INT); } @Override public void visitGETSTATIC(GETSTATIC obj) { modelFieldLoad(obj); } @Override public void visitGETFIELD(GETFIELD obj) { modelFieldLoad(obj); } public void modelFieldLoad(FieldInstruction obj) { consumeStack(obj); Type loadType = obj.getType(getCPG()); Type originalLoadType = loadType; try { // Check the field store type database to see if we can // get a more precise type for this load. XField xfield = Hierarchy.findXField(obj, getCPG()); if (xfield != null) { if (database != null && (loadType instanceof ReferenceType)) { FieldStoreType property = database.getProperty(xfield.getFieldDescriptor()); if (property != null) { loadType = property.getLoadType((ReferenceType) loadType); } } Item summary = fieldSummary.getSummary(xfield); if (xfield.isFinal() && summary.isNull()) { pushValue(TypeFrame.getNullType()); return; } if (loadType == originalLoadType && summary != null && !summary.getSignature().equals("Ljava/lang/Object;")) { loadType = Type.getType(summary.getSignature()); } // [Added: Support for Generics] // XXX If the loadType was not changed by the // FieldStoreTypeDatabase, then // we can assume, that the signature for obj is still relevant. // This should // be updated by inserting generic information in the // FieldStoreTypeDatabase // find the field and its signature Field field = Hierarchy.findField(xfield.getClassName(), xfield.getName()); String signature = null; for (Attribute a : field.getAttributes()) { if (a instanceof Signature) { signature = ((Signature) a).getSignature(); break; } } // replace loadType with information from field signature // (conservative) if (signature != null && (loadType instanceof ObjectType)) { loadType = GenericUtilities.merge(GenericUtilities.getType(signature), (ObjectType) loadType); } } } catch (ClassNotFoundException e) { AnalysisContext.reportMissingClass(e); } catch (RuntimeException e) { } // degrade gracefully pushValue(loadType); } @Override public void visitINVOKESTATIC(INVOKESTATIC obj) { String methodName = obj.getMethodName(cpg); String signature = obj.getSignature(cpg); String className = obj.getClassName(cpg); if (methodName.equals("asList") && className.equals("java.util.Arrays") && signature.equals("([Ljava/lang/Object;)Ljava/util/List;")) { consumeStack(obj); Type returnType = Type.getType("Ljava/util/Arrays$ArrayList;"); pushValue(returnType); return; } consumeStack(obj); pushReturnType(obj); } @Override public void visitINVOKESPECIAL(INVOKESPECIAL obj) { consumeStack(obj); pushReturnType(obj); } @Override public void visitINVOKEINTERFACE(INVOKEINTERFACE obj) { visitInvokeInstructionCommon(obj); } @Override public void visitINVOKEVIRTUAL(INVOKEVIRTUAL obj) { visitInvokeInstructionCommon(obj); } private boolean getResultTypeFromGenericType(TypeFrame frame, int index, int expectedParameters) { try { Type mapType = frame.getStackValue(0); if (mapType instanceof GenericObjectType) { GenericObjectType genericMapType = (GenericObjectType) mapType; List<? extends ReferenceType> parameters = genericMapType.getParameters(); if (parameters != null && parameters.size() == expectedParameters) { ReferenceType resultType = parameters.get(index); if (resultType instanceof GenericObjectType) resultType = ((GenericObjectType)resultType).produce(); typesComputedFromGenerics.add(resultType); frame.popValue(); frame.pushValue(resultType); return true; } } } catch (DataflowAnalysisException e) { AnalysisContext.logError("oops", e); } return false; } private boolean handleGetMapView(TypeFrame frame, String typeName, int index, int expectedNumberOfTypeParameters) { try { Type mapType = frame.getStackValue(0); if (mapType instanceof GenericObjectType) { GenericObjectType genericMapType = (GenericObjectType) mapType; List<? extends ReferenceType> parameters = genericMapType.getParameters(); if (parameters == null) return false; if (parameters.size() == expectedNumberOfTypeParameters) { ReferenceType keyType = parameters.get(index); frame.popValue(); typesComputedFromGenerics.add(keyType); GenericObjectType keySetType = GenericUtilities.getType(typeName, Collections.singletonList(keyType)); typesComputedFromGenerics.add(keySetType); frame.pushValue(keySetType); return true; } } } catch (DataflowAnalysisException e) { AnalysisContext.logError("oops", e); } return false; } public void visitInvokeInstructionCommon(InvokeInstruction obj) { TypeFrame frame = getFrame(); String methodName = obj.getMethodName(cpg); String signature = obj.getSignature(cpg); String className = obj.getClassName(cpg); String returnValueSignature = new SignatureParser(signature).getReturnTypeSignature(); if (returnValueSignature.equals("V")) { consumeStack(obj); return; } if (methodName.equals("cast") && className.equals("java.lang.Class")) { try { Type resultType = frame.popValue(); frame.popValue(); frame.pushValue(resultType); } catch (DataflowAnalysisException e) { AnalysisContext.logError("oops", e); } return; } if (methodName.equals("get") && signature.equals("(Ljava/lang/Object;)Ljava/lang/Object;") && className.endsWith("Map")) { try { Type mapType = frame.getStackValue(1); if (mapType instanceof GenericObjectType) { GenericObjectType genericMapType = (GenericObjectType) mapType; List<? extends ReferenceType> parameters = genericMapType.getParameters(); if (parameters != null && parameters.size() == 2) { ReferenceType valueType = parameters.get(1); consumeStack(obj); frame.pushValue(valueType); return; } } } catch (DataflowAnalysisException e) { AnalysisContext.logError("oops", e); } } if (className.equals("java.util.Map$Entry")) if (methodName.equals("getKey") && getResultTypeFromGenericType(frame, 0, 2) || methodName.equals("getValue") && getResultTypeFromGenericType(frame, 1, 2)) return; if (methodName.equals("entrySet") && signature.equals("()Ljava/util/Set;") && className.startsWith("java.util") && className.endsWith("Map")) { Type argType; try { argType = frame.popValue(); } catch (DataflowAnalysisException e) { AnalysisContext.logError("oops", e); return; } ObjectType mapType = (ObjectType) Type.getType("Ljava/util/Map$Entry;"); if (argType instanceof GenericObjectType) { GenericObjectType genericArgType = (GenericObjectType) argType; List<? extends ReferenceType> parameters = genericArgType.getParameters(); if (parameters != null && parameters.size() == 2) mapType = GenericUtilities.getType("java.util.Map$Entry", parameters); } GenericObjectType entrySetType = GenericUtilities.getType("java.util.Set", Collections.singletonList(mapType)); frame.pushValue(entrySetType); return; } if (className.startsWith("java.util") && className.endsWith("Map")) if (methodName.equals("keySet") && signature.equals("()Ljava/util/Set;") && handleGetMapView(frame, "java.util.Set", 0,2 ) || methodName.equals("values") && signature.equals("()Ljava/util/Collection;") && handleGetMapView(frame, "java.util.Collection", 1,2 )) return; if (methodName.equals("iterator") && signature.equals("()Ljava/util/Iterator;") && className.startsWith("java.util") && handleGetMapView(frame, "java.util.Iterator", 0,1 )) return; if (className.equals("java.util.Iterator") &&methodName.equals("next") && signature.equals("()Ljava/lang/Object;") && getResultTypeFromGenericType(frame, 0, 1)) return; if (methodName.equals("isInstance")) { if (className.equals("java.lang.Class") && valueNumberDataflow != null) { // Record the value number of the value checked by this // instruction, // and the type the value was compared to. try { ValueNumberFrame vnaFrame = valueNumberDataflow.getFactAtLocation(getLocation()); if (vnaFrame.isValid()) { ValueNumber stackValue = vnaFrame.getStackValue(1); if (stackValue.hasFlag(ValueNumber.CONSTANT_CLASS_OBJECT)) { String c = valueNumberDataflow.getClassName(stackValue); if (c != null) { if (c.charAt(0) != '[' && !c.endsWith(";")) c = "L" + c.replace('.', '/') + ";"; Type type = Type.getType(c); if (type instanceof ReferenceType) { instanceOfValueNumber = vnaFrame.getTopValue(); instanceOfType = (ReferenceType) type; sawEffectiveInstanceOf = true; } } } } } catch (DataflowAnalysisException e) { // Ignore } } } if (methodName.equals("initCause") && signature.equals("(Ljava/lang/Throwable;)Ljava/lang/Throwable;") && className.endsWith("Exception")) { try { frame.popValue(); return; } catch (DataflowAnalysisException e) { AnalysisContext.logError("Ooops", e); } } if (handleToArray(obj)) return; try { Set<XMethod> targets = Hierarchy2.resolveMethodCallTargets(obj, frame, cpg); if (targets.size() == 1) { XMethod m = targets.iterator().next(); XMethod m2 = m.bridgeTo(); if (m2 != null) m = m2; if (m.getSourceSignature() != null) { GenericSignatureParser p = new GenericSignatureParser(m.getSourceSignature()); String rv = p.getReturnTypeSignature(); if (rv.charAt(0) != 'T') { Type t = GenericUtilities.getType(rv); consumeStack(obj); assert t.getType() != T_VOID; pushValue(t); return; } } SignatureParser p = new SignatureParser(m.getSignature()); String rv = p.getReturnTypeSignature(); Type t = Type.getType(rv); consumeStack(obj); assert t.getType() != T_VOID; pushValue(t); return; } for(XMethod m : targets) { if (m.getSourceSignature() != null) { GenericSignatureParser p = new GenericSignatureParser(m.getSourceSignature()); String rv = p.getReturnTypeSignature(); if (rv.charAt(0) != 'T') { Type t = GenericUtilities.getType(rv); consumeStack(obj); pushValue(t); return; } } } } catch (DataflowAnalysisException e) { AnalysisContext.logError("Ooops", e); } catch (ClassNotFoundException e) { AnalysisContext.logError("Ooops", e); } consumeStack(obj); pushReturnType(obj); } private boolean handleToArray(InvokeInstruction obj) { try { TypeFrame frame = getFrame(); Type topValue = frame.getTopValue(); if (obj.getName(getCPG()).equals("toArray")) { ReferenceType target = obj.getReferenceType(getCPG()); String signature = obj.getSignature(getCPG()); if (signature.equals("([Ljava/lang/Object;)[Ljava/lang/Object;") && isCollection(target)) { boolean topIsExact = frame.isExact(frame.getStackLocation(0)); Type resultType = frame.popValue(); frame.popValue(); frame.pushValue(resultType); frame.setExact(frame.getStackLocation(0), topIsExact); return true; } else if (signature.equals("()[Ljava/lang/Object;") && isCollection(target) && !topValue.getSignature().equals("Ljava/util/Arrays$ArrayList;")) { consumeStack(obj); pushReturnType(obj); frame.setExact(frame.getStackLocation(0), true); return true; } } return false; } catch (DataflowAnalysisException e) { return false; } catch (ClassNotFoundException e) { AnalysisContext.reportMissingClass(e); return false; } } @Override public void handleStoreInstruction(StoreInstruction obj) { int numConsumed = obj.consumeStack(cpg); if (numConsumed == 1) { try { boolean isExact = isTopOfStackExact(); TypeFrame frame = getFrame(); int index = obj.getIndex(); Type value = frame.popValue(); frame.setValue(index, value); frame.setExact(index, isExact); } catch (DataflowAnalysisException e) { throw new InvalidBytecodeException(e.toString()); } } else super.handleStoreInstruction(obj); } /** * Handler for all instructions which load values from a local variable and * push them on the stack. Note that two locals are loaded for long and * double loads. */ @Override public void handleLoadInstruction(LoadInstruction obj) { int numProduced = obj.produceStack(cpg); if (numProduced == Constants.UNPREDICTABLE) throw new InvalidBytecodeException("Unpredictable stack production"); if (numProduced != 1) { super.handleLoadInstruction(obj); return; } int index = obj.getIndex(); TypeFrame frame = getFrame(); Type value = frame.getValue(index); boolean isExact = frame.isExact(index); frame.pushValue(value); if (isExact) setTopOfStackIsExact(); } private boolean isCollection(ReferenceType target) throws ClassNotFoundException { if (Subtypes2.ENABLE_SUBTYPES2) { Subtypes2 subtypes2 = AnalysisContext.currentAnalysisContext().getSubtypes2(); return subtypes2.isSubtype(target, COLLECTION_TYPE); } else { return target.isAssignmentCompatibleWith(COLLECTION_TYPE); } } @Override public void visitCHECKCAST(CHECKCAST obj) { try { Type t = getFrame().popValue(); if (t instanceof NullType) pushValue(t); else pushValue(obj.getType(getCPG())); } catch (DataflowAnalysisException e) { throw new InvalidBytecodeException("Stack underflow for " + obj + ": " + e.getMessage()); } } @Override public void visitINSTANCEOF(INSTANCEOF obj) { if (valueNumberDataflow != null) { // Record the value number of the value checked by this instruction, // and the type the value was compared to. try { ValueNumberFrame vnaFrame = valueNumberDataflow.getFactAtLocation(getLocation()); if (vnaFrame.isValid()) { final Type type = obj.getType(getCPG()); if (type instanceof ReferenceType) { instanceOfValueNumber = vnaFrame.getTopValue(); instanceOfType = (ReferenceType) type; sawEffectiveInstanceOf = true; } } } catch (DataflowAnalysisException e) { // Ignore } } consumeStack(obj); pushValue(Type.INT); } @Override public void visitIFNULL(IFNULL obj) { if (valueNumberDataflow != null) { // Record the value number of the value checked by this instruction, // and the type the value was compared to. try { ValueNumberFrame vnaFrame = valueNumberDataflow.getFactAtLocation(getLocation()); if (vnaFrame.isValid()) { instanceOfValueNumber = vnaFrame.getTopValue(); instanceOfType = NullType.instance(); instanceOfFollowedByBranch = true; } } catch (DataflowAnalysisException e) { // Ignore } } consumeStack(obj); } @Override public void visitIFNONNULL(IFNONNULL obj) { if (valueNumberDataflow != null) { // Record the value number of the value checked by this instruction, // and the type the value was compared to. try { ValueNumberFrame vnaFrame = valueNumberDataflow.getFactAtLocation(getLocation()); if (vnaFrame.isValid()) { instanceOfValueNumber = vnaFrame.getTopValue(); instanceOfType = NullType.instance(); instanceOfFollowedByBranch = true; } } catch (DataflowAnalysisException e) { // Ignore } } consumeStack(obj); } @Override public void visitFCMPL(FCMPL obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitFCMPG(FCMPG obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitDCMPL(DCMPL obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitDCMPG(DCMPG obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLCMP(LCMP obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitD2F(D2F obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitD2I(D2I obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitD2L(D2L obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitF2D(F2D obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitF2I(F2I obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitF2L(F2L obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitI2B(I2B obj) { consumeStack(obj); pushValue(Type.BYTE); } @Override public void visitI2C(I2C obj) { consumeStack(obj); pushValue(Type.CHAR); } @Override public void visitI2D(I2D obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitI2F(I2F obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitI2L(I2L obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitI2S(I2S obj) { } // no change @Override public void visitL2D(L2D obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitL2F(L2F obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitL2I(L2I obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitIAND(IAND obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLAND(LAND obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitIOR(IOR obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLOR(LOR obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitIXOR(IXOR obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLXOR(LXOR obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitISHR(ISHR obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitIUSHR(IUSHR obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLSHR(LSHR obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitLUSHR(LUSHR obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitISHL(ISHL obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLSHL(LSHL obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitDADD(DADD obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitFADD(FADD obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitIADD(IADD obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLADD(LADD obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitDSUB(DSUB obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitDUP(DUP obj) { try { TypeFrame frame = getFrame(); boolean isExact = isTopOfStackExact(); Type value = frame.popValue(); frame.pushValue(value); if (isExact) setTopOfStackIsExact(); frame.pushValue(value); if (isExact) setTopOfStackIsExact(); } catch (DataflowAnalysisException e) { throw new InvalidBytecodeException(e.toString()); } } @Override public void visitFSUB(FSUB obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitISUB(ISUB obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLSUB(LSUB obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitDMUL(DMUL obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitFMUL(FMUL obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitIMUL(IMUL obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLMUL(LMUL obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitDDIV(DDIV obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitFDIV(FDIV obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitIDIV(IDIV obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLDIV(LDIV obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitDREM(DREM obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitFREM(FREM obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitIREM(IREM obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLREM(LREM obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitIINC(IINC obj) { } // no change to types of stack or locals @Override public void visitDNEG(DNEG obj) { } // no change @Override public void visitFNEG(FNEG obj) { } // no change @Override public void visitINEG(INEG obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLNEG(LNEG obj) { } // no change @Override public void visitARRAYLENGTH(ARRAYLENGTH obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitAALOAD(AALOAD obj) { // To determine the type pushed on the stack, // we look at the type of the array reference which was // popped off of the stack. TypeFrame frame = getFrame(); try { frame.popValue(); // index Type arrayType = frame.popValue(); // arrayref if (arrayType instanceof ArrayType) { ArrayType arr = (ArrayType) arrayType; pushValue(arr.getElementType()); } else { pushValue(TypeFrame.getBottomType()); } } catch (DataflowAnalysisException e) { throw new InvalidBytecodeException("Stack underflow: " + e.getMessage()); } } @Override public void visitBALOAD(BALOAD obj) { consumeStack(obj); pushValue(Type.BYTE); } @Override public void visitCALOAD(CALOAD obj) { consumeStack(obj); pushValue(Type.CHAR); } @Override public void visitDALOAD(DALOAD obj) { consumeStack(obj); pushValue(Type.DOUBLE); } @Override public void visitFALOAD(FALOAD obj) { consumeStack(obj); pushValue(Type.FLOAT); } @Override public void visitIALOAD(IALOAD obj) { consumeStack(obj); pushValue(Type.INT); } @Override public void visitLALOAD(LALOAD obj) { consumeStack(obj); pushValue(Type.LONG); } @Override public void visitSALOAD(SALOAD obj) { consumeStack(obj); pushValue(Type.SHORT); } // The various xASTORE instructions only consume stack. @Override public void visitNEW(NEW obj) { // FIXME: type is technically "uninitialized" // However, we don't model that yet. pushValue(obj.getType(getCPG())); // We now have an exact type for this value. setTopOfStackIsExact(); } @Override public void visitNEWARRAY(NEWARRAY obj) { consumeStack(obj); Type elementType = obj.getType(); pushValue(elementType); // We now have an exact type for this value. setTopOfStackIsExact(); } @Override public void visitANEWARRAY(ANEWARRAY obj) { consumeStack(obj); Type elementType = obj.getType(getCPG()); pushValue(new ArrayType(elementType, 1)); // We now have an exact type for this value. setTopOfStackIsExact(); } @Override public void visitMULTIANEWARRAY(MULTIANEWARRAY obj) { consumeStack(obj); Type elementType = obj.getType(getCPG()); pushValue(elementType); // We now have an exact type for this value. setTopOfStackIsExact(); } private void setTopOfStackIsExact() { TypeFrame frame = getFrame(); frame.setExact(frame.getNumSlots() - 1, true); } private boolean isTopOfStackExact() { TypeFrame frame = getFrame(); return frame.isExact(frame.getNumSlots() - 1); } @Override public void visitJSR(JSR obj) { pushValue(ReturnaddressType.NO_TARGET); } @Override public void visitJSR_W(JSR_W obj) { pushValue(ReturnaddressType.NO_TARGET); } @Override public void visitRET(RET obj) { } // no change @Override public void visitIFEQ(IFEQ obj) { if (previousWasEffectiveInstanceOf) instanceOfFollowedByBranch = true; super.visitIFEQ(obj); } @Override public void visitIFGT(IFGT obj) { if (previousWasEffectiveInstanceOf) instanceOfFollowedByBranch = true; super.visitIFGT(obj); } @Override public void visitIFLE(IFLE obj) { if (previousWasEffectiveInstanceOf) instanceOfFollowedByBranch = true; super.visitIFLE(obj); } @Override public void visitIFNE(IFNE obj) { if (previousWasEffectiveInstanceOf) instanceOfFollowedByBranch = true; super.visitIFNE(obj); } public boolean isImpliedByGenericTypes(ReferenceType t) { return typesComputedFromGenerics.contains(t); } } // vim:ts=4
package com.ociweb.gl.impl; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.ociweb.gl.api.ArgumentParser; import com.ociweb.gl.api.Behavior; import com.ociweb.gl.api.Builder; import com.ociweb.gl.api.ClientHostPortInstance; import com.ociweb.gl.api.GreenCommandChannel; import com.ociweb.gl.api.HTTPClientConfig; import com.ociweb.gl.api.HTTPRequestReader; import com.ociweb.gl.api.HTTPServerConfig; import com.ociweb.gl.api.ListenerTransducer; import com.ociweb.gl.api.MsgCommandChannel; import com.ociweb.gl.api.MsgRuntime; import com.ociweb.gl.api.NetResponseWriter; import com.ociweb.gl.api.TelemetryConfig; import com.ociweb.gl.api.TimeTrigger; import com.ociweb.gl.api.transducer.HTTPResponseListenerTransducer; import com.ociweb.gl.api.transducer.PubSubListenerTransducer; import com.ociweb.gl.api.transducer.RestListenerTransducer; import com.ociweb.gl.api.transducer.StateChangeListenerTransducer; import com.ociweb.gl.impl.http.client.HTTPClientConfigImpl; import com.ociweb.gl.impl.http.server.HTTPResponseListenerBase; import com.ociweb.gl.impl.http.server.HTTPServerConfigImpl; import com.ociweb.gl.impl.mqtt.MQTTConfigImpl; import com.ociweb.gl.impl.schema.IngressMessages; import com.ociweb.gl.impl.schema.MessagePrivate; import com.ociweb.gl.impl.schema.MessagePubSub; import com.ociweb.gl.impl.schema.MessageSubscription; import com.ociweb.gl.impl.schema.TrafficAckSchema; import com.ociweb.gl.impl.schema.TrafficOrderSchema; import com.ociweb.gl.impl.schema.TrafficReleaseSchema; import com.ociweb.gl.impl.stage.HTTPClientRequestTrafficStage; import com.ociweb.gl.impl.stage.MessagePubSubStage; import com.ociweb.gl.impl.stage.ReactiveListenerStage; import com.ociweb.gl.impl.stage.ReactiveManagerPipeConsumer; import com.ociweb.gl.impl.stage.ReactiveOperators; import com.ociweb.gl.impl.stage.TrafficCopStage; import com.ociweb.gl.impl.telemetry.TelemetryConfigImpl; import com.ociweb.json.JSONExtractorCompleted; import com.ociweb.pronghorn.network.ClientCoordinator; import com.ociweb.pronghorn.network.NetGraphBuilder; import com.ociweb.pronghorn.network.TLSCertificates; import com.ociweb.pronghorn.network.config.HTTPContentTypeDefaults; import com.ociweb.pronghorn.network.config.HTTPHeaderDefaults; import com.ociweb.pronghorn.network.config.HTTPRevisionDefaults; import com.ociweb.pronghorn.network.config.HTTPSpecification; import com.ociweb.pronghorn.network.config.HTTPVerbDefaults; import com.ociweb.pronghorn.network.http.CompositePath; import com.ociweb.pronghorn.network.http.HTTP1xRouterStageConfig; import com.ociweb.pronghorn.network.schema.ClientHTTPRequestSchema; import com.ociweb.pronghorn.network.schema.HTTPRequestSchema; import com.ociweb.pronghorn.network.schema.NetPayloadSchema; import com.ociweb.pronghorn.network.schema.NetResponseSchema; import com.ociweb.pronghorn.network.schema.ServerResponseSchema; import com.ociweb.pronghorn.pipe.DataInputBlobReader; import com.ociweb.pronghorn.pipe.DataOutputBlobWriter; import com.ociweb.pronghorn.pipe.MessageSchema; import com.ociweb.pronghorn.pipe.Pipe; import com.ociweb.pronghorn.pipe.PipeConfig; import com.ociweb.pronghorn.pipe.PipeConfigManager; import com.ociweb.pronghorn.pipe.PipeWriter; import com.ociweb.pronghorn.pipe.util.hash.IntHashTable; import com.ociweb.pronghorn.stage.PronghornStage; import com.ociweb.pronghorn.stage.route.ReplicatorStage; import com.ociweb.pronghorn.stage.scheduling.GraphManager; import com.ociweb.pronghorn.stage.scheduling.StageScheduler; import com.ociweb.pronghorn.util.Appendables; import com.ociweb.pronghorn.util.Blocker; import com.ociweb.pronghorn.util.TrieParser; import com.ociweb.pronghorn.util.TrieParserReader; public class BuilderImpl implements Builder { protected static final int MINIMUM_TLS_BLOB_SIZE = 1<<15; protected long timeTriggerRate; protected long timeTriggerStart; private Blocker channelBlocker; public final GraphManager gm; public final ArgumentParser args; private int threadLimit = -1; private boolean threadLimitHard = false; private boolean hasPrivateTopicsChecked = false; private boolean isAllPrivateTopics = false; private static final int DEFAULT_LENGTH = 16; protected static final long MS_TO_NS = 1_000_000; private static final Logger logger = LoggerFactory.getLogger(BuilderImpl.class); public final PipeConfigManager pcm = new PipeConfigManager(); public Enum<?> beginningState; private int parallelismTracks = 1;//default is one private static final int BehaviorMask = 1<<31;//high bit on //all non shutdown listening reactors will be shutdown only after the listeners have finished. public AtomicInteger liveShutdownListeners = new AtomicInteger(); public AtomicInteger totalLiveReactors = new AtomicInteger(); public AtomicBoolean shutdownRequsted = new AtomicBoolean(false); public Runnable lastCall; //TODO: group these into an object for ReactiveListenerStage to use... ///Pipes for initial startup declared subscriptions. (Not part of graph) //TODO: should be zero unless startup is used. private final int maxStartupSubs = 256; //TODO: make a way to adjust this outside??? private final int maxTopicLengh = 128; private Pipe<MessagePubSub> tempPipeOfStartupSubscriptions; private long defaultSleepRateNS = 5_000;// should normally be between 900 and 20_000; private final int shutdownTimeoutInSeconds = 1; protected ReentrantLock devicePinConfigurationLock = new ReentrantLock(); private MQTTConfigImpl mqtt = null; private HTTPServerConfigImpl server = null; private TelemetryConfigImpl telemetry = null; private HTTPClientConfigImpl client = null; private ClientCoordinator ccm; protected int IDX_MSG = -1; protected int IDX_NET = -1; //These topics are enforced so that they only go from one producer to a single consumer //No runtime subscriptions can pick them up //They do not go though the public router //private topics never share a pipe so the topic is not sent on the pipe only the payload //private topics are very performant and much more secure than pub/sub. //private topics have their own private pipe and can not be "imitated" by public messages //WARNING: private topics do not obey traffic cops and allow for immediate communications. //private String[] privateTopics = null; ///gather and store the server module pipes private ArrayList<Pipe<HTTPRequestSchema>>[][] collectedHTTPRequstPipes; private ArrayList<Pipe<ServerResponseSchema>>[] collectedServerResponsePipes; //support for REST modules and routing public final HTTPSpecification<HTTPContentTypeDefaults, HTTPRevisionDefaults, HTTPVerbDefaults, HTTPHeaderDefaults> httpSpec = HTTPSpecification.defaultSpec(); private HTTP1xRouterStageConfig<HTTPContentTypeDefaults, HTTPRevisionDefaults, HTTPVerbDefaults, HTTPHeaderDefaults> routerConfig; public void usePrivateTopicsExclusively() { if (hasPrivateTopicsChecked) { throw new UnsupportedOperationException("Must set in declare configuration section before startup"); } isAllPrivateTopics = true; } public boolean isAllPrivateTopics() { hasPrivateTopicsChecked = true; return isAllPrivateTopics; } public final ReactiveOperators operators; private final Set<String> behaviorNames = new HashSet<String>(); //will throw if a duplicate stage name is detected. public String validateUniqueName(String behaviorName, int trackId) { String fullName = behaviorName; //confirm stage name is not found.. if (behaviorNames.contains(behaviorName)) { throw new UnsupportedOperationException("Duplicate name detected: "+behaviorName); } if (trackId>=0) { fullName = behaviorName+"."+trackId; //additional check for name+"."+trackId if (behaviorNames.contains(fullName)) { throw new UnsupportedOperationException("Duplicate name detected: "+fullName); } //add the name+"."+name behaviorNames.add(fullName);//never add the root since we are watching that no one else did. } else { //add the stage name behaviorNames.add(behaviorName); } return fullName; } private IntHashTable netPipeLookup = new IntHashTable(7);//Initial default size public void registerHTTPClientId(int routeId, int pipeIdx) { if ( (IntHashTable.count(netPipeLookup)<<1) >= IntHashTable.size(netPipeLookup) ) { //must grow first since we are adding many entries netPipeLookup = IntHashTable.doubleSize(netPipeLookup); } //TODO: netPipeLookup is the entry point for JSON extraction?? // we need to stroe extracor so its done when we do the lookup. boolean addedItem = IntHashTable.setItem(netPipeLookup, routeId, pipeIdx); if (!addedItem) { logger.warn("The route {} has already been assigned to a listener and can not be assigned to another.\n" + "Check that each HTTP Client consumer does not share an Id with any other.",routeId); } } public int lookupHTTPClientPipe(int routeId) { return IntHashTable.getItem(netPipeLookup, routeId); } public int pubSubIndex() { return IDX_MSG; } public int netIndex() { return IDX_NET; } public void releasePubSubTraffic(int count, MsgCommandChannel<?> gcc) { MsgCommandChannel.publishGo(count, IDX_MSG, gcc); } public ClientCoordinator getClientCoordinator() { return ccm; } @Override public HTTPServerConfig useHTTP1xServer(int bindPort) { if (server != null) throw new RuntimeException("Server already enabled"); this.server = new HTTPServerConfigImpl(bindPort,pcm); server.beginDeclarations(); return server; } public final HTTPServerConfig getHTTPServerConfig() { return this.server; } public int behaviorId(Behavior b) { return BehaviorMask | System.identityHashCode(b); } public final HTTP1xRouterStageConfig<HTTPContentTypeDefaults, HTTPRevisionDefaults, HTTPVerbDefaults, HTTPHeaderDefaults> routerConfig() { if (null==routerConfig) { routerConfig = new HTTP1xRouterStageConfig<HTTPContentTypeDefaults, HTTPRevisionDefaults, HTTPVerbDefaults, HTTPHeaderDefaults>(httpSpec); } return routerConfig; } public final boolean appendPipeMappingIncludingGroupIds(Pipe<HTTPRequestSchema> pipe, int parallelId, int ... groupIds) { lazyCreatePipeLookupMatrix(); return routerConfig().appendPipeIdMappingForIncludedGroupIds(pipe, parallelId, collectedHTTPRequstPipes, groupIds); } public final boolean appendPipeMappingExcludingGroupIds(Pipe<HTTPRequestSchema> pipe, int parallelId, int ... groupIds) { lazyCreatePipeLookupMatrix(); return routerConfig().appendPipeIdMappingForExcludedGroupIds(pipe, parallelId, collectedHTTPRequstPipes, groupIds); } public final boolean appendPipeMappingAllGroupIds(Pipe<HTTPRequestSchema> pipe, int parallelId) { lazyCreatePipeLookupMatrix(); return routerConfig().appendPipeIdMappingForAllGroupIds(pipe, parallelId, collectedHTTPRequstPipes); } final ArrayList<Pipe<HTTPRequestSchema>>[][] targetPipeMapping() { lazyCreatePipeLookupMatrix(); return collectedHTTPRequstPipes; } public final ArrayList<Pipe<HTTPRequestSchema>> buildFromRequestArray(int r, int p) { assert(r<collectedHTTPRequstPipes.length); assert(p<collectedHTTPRequstPipes[r].length) : "p "+p+" vs "+collectedHTTPRequstPipes[r].length; return null!=collectedHTTPRequstPipes ? collectedHTTPRequstPipes[r][p] : new ArrayList<Pipe<HTTPRequestSchema>>(); } private void lazyCreatePipeLookupMatrix() { if (null==collectedHTTPRequstPipes) { int parallelism = parallelTracks(); int routesCount = routerConfig().totalPathsCount(); assert(parallelism>=1); assert(routesCount>-1); //for catch all route since we have no specific routes. if (routesCount==0) { routesCount = 1; } collectedHTTPRequstPipes = (ArrayList<Pipe<HTTPRequestSchema>>[][]) new ArrayList[parallelism][routesCount]; int p = parallelism; while (--p>=0) { int r = routesCount; while (--r>=0) { collectedHTTPRequstPipes[p][r] = new ArrayList(); } } } } public final void recordPipeMapping(Pipe<ServerResponseSchema> netResponse, int parallelInstanceId) { if (null == collectedServerResponsePipes) { int parallelism = parallelTracks(); collectedServerResponsePipes = (ArrayList<Pipe<ServerResponseSchema>>[]) new ArrayList[parallelism]; int p = parallelism; while (--p>=0) { collectedServerResponsePipes[p] = new ArrayList(); } } collectedServerResponsePipes[parallelInstanceId].add(netResponse); } public final Pipe<ServerResponseSchema>[] buildToOrderArray(int r) { if (null==collectedServerResponsePipes || collectedServerResponsePipes.length==0) { return new Pipe[0]; } else { ArrayList<Pipe<ServerResponseSchema>> list = collectedServerResponsePipes[r]; return (Pipe<ServerResponseSchema>[]) list.toArray(new Pipe[list.size()]); } } public final Pipe<ServerResponseSchema> newNetResponsePipe(PipeConfig<ServerResponseSchema> config, int parallelInstanceId) { Pipe<ServerResponseSchema> pipe = new Pipe<ServerResponseSchema>(config) { @SuppressWarnings("unchecked") @Override protected DataOutputBlobWriter<ServerResponseSchema> createNewBlobWriter() { return new NetResponseWriter(this); } }; recordPipeMapping(pipe, parallelInstanceId); return pipe; } public BuilderImpl(GraphManager gm, String[] args) { this.operators = ReactiveListenerStage.reactiveOperators(); this.gm = gm; this.getTempPipeOfStartupSubscriptions().initBuffers(); this.args = new ArgumentParser(args); int requestQueue = 4; this.pcm.addConfig(new PipeConfig<NetPayloadSchema>(NetPayloadSchema.instance, requestQueue, MINIMUM_TLS_BLOB_SIZE)); int maxMessagesQueue = 8; int maxMessageSize = 256; this.pcm.addConfig(new PipeConfig<MessageSubscription>(MessageSubscription.instance, maxMessagesQueue, maxMessageSize)); this.pcm.addConfig(new PipeConfig<TrafficReleaseSchema>(TrafficReleaseSchema.instance, DEFAULT_LENGTH)); this.pcm.addConfig(new PipeConfig<TrafficAckSchema>(TrafficAckSchema.instance, DEFAULT_LENGTH)); int defaultCommandChannelLength = 16; int defaultCommandChannelHTTPMaxPayload = 1<<14; //must be at least 32K for TLS support this.pcm.addConfig(new PipeConfig<NetResponseSchema>(NetResponseSchema.instance, defaultCommandChannelLength, defaultCommandChannelHTTPMaxPayload)); //for MQTT ingress int maxMQTTMessagesQueue = 8; int maxMQTTMessageSize = 1024; this.pcm.addConfig(new PipeConfig(IngressMessages.instance, maxMQTTMessagesQueue, maxMQTTMessageSize)); } public final <E extends Enum<E>> boolean isValidState(E state) { if (null!=beginningState) { return beginningState.getClass()==state.getClass(); } return false; } public final <E extends Enum<E>> Builder startStateMachineWith(E state) { beginningState = state; return this; } public final Builder setTimerPulseRate(long rateInMS) { timeTriggerRate = rateInMS; timeTriggerStart = System.currentTimeMillis()+rateInMS; return this; } public final Builder setTimerPulseRate(TimeTrigger trigger) { long period = trigger.getRate(); timeTriggerRate = period; long now = System.currentTimeMillis(); long soFar = (now % period); timeTriggerStart = (now - soFar) + period; return this; } @Override public final HTTPClientConfig useNetClient() { return useNetClient(TLSCertificates.defaultCerts); } @Override public final HTTPClientConfig useInsecureNetClient() { return useNetClient((TLSCertificates) null); } @Override public HTTPClientConfigImpl useNetClient(TLSCertificates certificates) { if (client != null) throw new RuntimeException("Client already enabled"); this.client = new HTTPClientConfigImpl(certificates); this.client.beginDeclarations(); return client; } public final HTTPClientConfig getHTTPClientConfig() { return this.client; } public final long getTriggerRate() { return timeTriggerRate; } public final long getTriggerStart() { return timeTriggerStart; } public <R extends ReactiveListenerStage> R createReactiveListener(GraphManager gm, Behavior listener, Pipe<?>[] inputPipes, Pipe<?>[] outputPipes, ArrayList<ReactiveManagerPipeConsumer> consumers, int parallelInstance, String nameId) { assert(null!=listener); return (R) new ReactiveListenerStage(gm, listener, inputPipes, outputPipes, consumers, this, parallelInstance, nameId); } public <G extends MsgCommandChannel> G newCommandChannel( int features, int parallelInstanceId, PipeConfigManager pcm ) { return (G) new GreenCommandChannel(gm, this, features, parallelInstanceId, pcm); } public <G extends MsgCommandChannel> G newCommandChannel( int parallelInstanceId, PipeConfigManager pcm ) { return (G) new GreenCommandChannel(gm, this, 0, parallelInstanceId, pcm); } static final boolean debug = false; public void shutdown() { if (null!=ccm) { ccm.shutdown(); } //can be overridden by specific hardware impl if shutdown is supported. } protected void initChannelBlocker(int maxGoPipeId) { channelBlocker = new Blocker(maxGoPipeId+1); } protected final boolean useNetClient(Pipe<ClientHTTPRequestSchema>[] netRequestPipes) { return (netRequestPipes.length!=0); } protected final void createMessagePubSubStage( MsgRuntime<?,?> runtime, IntHashTable subscriptionPipeLookup, Pipe<IngressMessages>[] ingressMessagePipes, Pipe<MessagePubSub>[] messagePubSub, Pipe<TrafficReleaseSchema>[] masterMsggoOut, Pipe<TrafficAckSchema>[] masterMsgackIn, Pipe<MessageSubscription>[] subscriptionPipes) { new MessagePubSubStage(this.gm, runtime, subscriptionPipeLookup, this, ingressMessagePipes, messagePubSub, masterMsggoOut, masterMsgackIn, subscriptionPipes); } public StageScheduler createScheduler(final MsgRuntime runtime) { final StageScheduler scheduler = runtime.builder.threadLimit>0 ? StageScheduler.defaultScheduler(gm, runtime.builder.threadLimit, runtime.builder.threadLimitHard) : StageScheduler.defaultScheduler(gm); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { scheduler.shutdown(); scheduler.awaitTermination(getShutdownSeconds(), TimeUnit.SECONDS); } }); return scheduler; } private final int getShutdownSeconds() { return shutdownTimeoutInSeconds; } protected final ChildClassScannerVisitor deepListener = new ChildClassScannerVisitor<ListenerTransducer>() { @Override public boolean visit(ListenerTransducer child, Object topParent) { return false; } }; public final boolean isListeningToSubscription(Behavior listener) { //NOTE: we only call for scan if the listener is not already of this type return listener instanceof PubSubMethodListenerBase || listener instanceof StateChangeListenerBase<?> || !ChildClassScanner.visitUsedByClass(listener, deepListener, PubSubListenerTransducer.class) || !ChildClassScanner.visitUsedByClass(listener, deepListener, StateChangeListenerTransducer.class); } public final boolean isListeningToHTTPResponse(Object listener) { return listener instanceof HTTPResponseListenerBase || //will return false if HTTPResponseListenerBase was encountered !ChildClassScanner.visitUsedByClass(listener, deepListener, HTTPResponseListenerTransducer.class); } public final boolean isListeningHTTPRequest(Object listener) { return listener instanceof RestMethodListenerBase || //will return false if RestListenerBase was encountered !ChildClassScanner.visitUsedByClass(listener, deepListener, RestListenerTransducer.class); } /** * access to system time. This method is required so it can be monitored and simulated by unit tests. */ public long currentTimeMillis() { return System.currentTimeMillis(); } public final void blockChannelUntil(int channelId, long timeInMillis) { channelBlocker.until(channelId, timeInMillis); } public final boolean isChannelBlocked(int channelId) { if (null != channelBlocker) { return channelBlocker.isBlocked(channelId); } else { return false; } } public final long releaseChannelBlocks(long now) { if (null != channelBlocker) { channelBlocker.releaseBlocks(now); return channelBlocker.durationToNextRelease(now, -1); } else { return -1; //was not init so there are no possible blocked channels. } } public final long nanoTime() { return System.nanoTime(); } public final Enum[] getStates() { return null==beginningState? new Enum[0] : beginningState.getClass().getEnumConstants(); } public final void addStartupSubscription(CharSequence topic, int systemHash, int parallelInstance) { Pipe<MessagePubSub> pipe = getTempPipeOfStartupSubscriptions(); if (PipeWriter.tryWriteFragment(pipe, MessagePubSub.MSG_SUBSCRIBE_100)) { DataOutputBlobWriter<MessagePubSub> output = PipeWriter.outputStream(pipe); output.openField(); output.append(topic); //this is in a track amay need a suffix. if (parallelInstance>=0) { if (BuilderImpl.hasNoUnscopedTopics()) { //add suffix.. output.append('/'); Appendables.appendValue(output, parallelInstance); } else { if (BuilderImpl.notUnscoped(reader, output)) { //add suffix output.append('/'); Appendables.appendValue(output, parallelInstance); } } } output.closeHighLevelField(MessagePubSub.MSG_SUBSCRIBE_100_FIELD_TOPIC_1); //PipeWriter.writeUTF8(pipe, MessagePubSub.MSG_SUBSCRIBE_100_FIELD_TOPIC_1, topic); PipeWriter.writeInt(pipe, MessagePubSub.MSG_SUBSCRIBE_100_FIELD_SUBSCRIBERIDENTITYHASH_4, systemHash); PipeWriter.publishWrites(pipe); } else { throw new UnsupportedOperationException("Limited number of startup subscriptions "+maxStartupSubs+" encountered."); } } private final Pipe<MessagePubSub> getTempPipeOfStartupSubscriptions() { if (null==tempPipeOfStartupSubscriptions) { final PipeConfig<MessagePubSub> messagePubSubConfig = new PipeConfig<MessagePubSub>(MessagePubSub.instance, maxStartupSubs, maxTopicLengh); tempPipeOfStartupSubscriptions = new Pipe<MessagePubSub>(messagePubSubConfig); } return tempPipeOfStartupSubscriptions; } public final Pipe<MessagePubSub> consumeStartupSubscriptions() { Pipe<MessagePubSub> result = tempPipeOfStartupSubscriptions; tempPipeOfStartupSubscriptions = null;//no longer needed return result; } @Override public final void limitThreads(int threadLimit) { if (telemetry != null && threadLimit>0 && threadLimit<64) { //must ensure telemetry has the threads it needs. threadLimit+=2; } this.threadLimit = threadLimit; this.threadLimitHard = true; } @Override public void limitThreads() { this.threadLimit = idealThreadCount(); this.threadLimitHard = true; } private int idealThreadCount() { return Runtime.getRuntime().availableProcessors()*4; } @Override public final int parallelTracks() { return parallelismTracks; } @Override public final void parallelTracks(int trackCount) { assert(trackCount>0); parallelismTracks = trackCount; } private final TrieParserReader localReader = new TrieParserReader(0, true); @Override public long fieldId(int routeId, byte[] fieldName) { return TrieParserReader.query(localReader, this.routeExtractionParser(routeId), fieldName, 0, fieldName.length, Integer.MAX_VALUE); } @Override public final CompositePath defineRoute(JSONExtractorCompleted extractor, byte[] ... headers) { return routerConfig().registerCompositeRoute(extractor, headers); } @Override public final CompositePath defineRoute(byte[] ... headers) { return routerConfig().registerCompositeRoute(headers); } @Override public final int defineRoute(CharSequence route, JSONExtractorCompleted extractor, byte[] ... headers) { return routerConfig().registerCompositeRoute(extractor,headers).path(route).routeId(); } @Override public final int defineRoute(CharSequence route, byte[] ... headers) { return routerConfig().registerRoute(route, headers); } public final TrieParser routeExtractionParser(int route) { return routerConfig().extractionParser(route).getRuntimeParser(); } public final int routeExtractionParserIndexCount(int route) { return routerConfig().extractionParser(route).getIndexCount(); } public IntHashTable routeHeaderToPositionTable(int routeId) { return routerConfig().headerToPositionTable(routeId); } public TrieParser routeHeaderTrieParser(int routeId) { return routerConfig().httpSpec.headerParser(); } public final Pipe<HTTPRequestSchema> newHTTPRequestPipe(PipeConfig<HTTPRequestSchema> restPipeConfig) { final boolean hasNoRoutes = (0==routerConfig().totalPathsCount()); Pipe<HTTPRequestSchema> pipe = new Pipe<HTTPRequestSchema>(restPipeConfig) { @SuppressWarnings("unchecked") @Override protected DataInputBlobReader<HTTPRequestSchema> createNewBlobReader() { return new HTTPRequestReader(this, hasNoRoutes); } }; return pipe; } @Override public TelemetryConfig enableTelemetry() { return enableTelemetry(null, TelemetryConfig.defaultTelemetryPort); } @Override public TelemetryConfig enableTelemetry(int port) { return enableTelemetry(null, port); } @Override public TelemetryConfig enableTelemetry(String host) { return enableTelemetry(host, TelemetryConfig.defaultTelemetryPort); } @Override public TelemetryConfig enableTelemetry(String host, int port) { if (telemetry != null) { throw new RuntimeException("Telemetry already enabled"); } this.telemetry = new TelemetryConfigImpl(host, port); this.telemetry.beginDeclarations(); if (threadLimit>0 && threadLimit>0 && threadLimit<64) { //we must increase the thread limit to ensure telemetry is not started threadLimit += 2; } return this.telemetry; } public TelemetryConfig getTelemetryConfig() { return this.telemetry; } public final long getDefaultSleepRateNS() { return defaultSleepRateNS; } @Override public final void setDefaultRate(long ns) { //new Exception("setting new rate "+ns).printStackTrace(); defaultSleepRateNS = Math.max(ns, 2000); //protect against too small } public void buildStages(MsgRuntime runtime) { IntHashTable subscriptionPipeLookup2 = MsgRuntime.getSubPipeLookup(runtime); GraphManager gm = MsgRuntime.getGraphManager(runtime); Pipe<NetResponseSchema>[] httpClientResponsePipes = GraphManager.allPipesOfTypeWithNoProducer(gm, NetResponseSchema.instance); Pipe<MessageSubscription>[] subscriptionPipes = GraphManager.allPipesOfTypeWithNoProducer(gm, MessageSubscription.instance); Pipe<TrafficOrderSchema>[] orderPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm, TrafficOrderSchema.instance); Pipe<ClientHTTPRequestSchema>[] httpClientRequestPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm, ClientHTTPRequestSchema.instance); Pipe<MessagePubSub>[] messagePubSub = GraphManager.allPipesOfTypeWithNoConsumer(gm, MessagePubSub.instance); Pipe<IngressMessages>[] ingressMessagePipes = GraphManager.allPipesOfTypeWithNoConsumer(gm, IngressMessages.instance); //TODO: no longer right now that we have no cops.. int commandChannelCount = orderPipes.length; int eventSchemas = 0; IDX_MSG = (IntHashTable.isEmpty(subscriptionPipeLookup2) && subscriptionPipes.length==0 && messagePubSub.length==0) ? -1 : eventSchemas++; IDX_NET = useNetClient(httpClientRequestPipes) ? eventSchemas++ : -1; long timeout = 20_000; //20 seconds int maxGoPipeId = 0; Pipe<TrafficReleaseSchema>[][] masterGoOut = new Pipe[eventSchemas][0]; Pipe<TrafficAckSchema>[][] masterAckIn = new Pipe[eventSchemas][0]; if (IDX_MSG >= 0) { masterGoOut[IDX_MSG] = new Pipe[messagePubSub.length]; masterAckIn[IDX_MSG] = new Pipe[messagePubSub.length]; } if (IDX_NET >= 0) { masterGoOut[IDX_NET] = new Pipe[httpClientResponsePipes.length]; masterAckIn[IDX_NET] = new Pipe[httpClientResponsePipes.length]; } int copGoAck = commandChannelCount; //logger.info("command channel count to be checked {}",copGoAck); while (--copGoAck>=0) { Pipe<TrafficReleaseSchema>[] goOut = new Pipe[eventSchemas]; Pipe<TrafficAckSchema>[] ackIn = new Pipe[eventSchemas]; //only setup the go and in pipes if the cop is used. if (null != orderPipes[copGoAck]) { int features = getFeatures(gm, orderPipes[copGoAck]); boolean hasConnections = false; if ((features&Behavior.DYNAMIC_MESSAGING) != 0) { hasConnections = true; maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_MSG); } if ((features&Behavior.NET_REQUESTER) != 0) { hasConnections = true; maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_NET); } TrafficCopStage.newInstance(gm, timeout, orderPipes[copGoAck], ackIn, goOut, runtime, this); } else { logger.info("oops get features skipped since no cops but needed for private topics"); } // if (true | hasConnections) { // TrafficCopStage trafficCopStage = new TrafficCopStage(gm, // timeout, orderPipes[t], // ackIn, goOut, // runtime, this); // } else { // //this optimization can no longer be done due to the use of shutdown on command channel. // // revisit this later... // //TODO: we can reintroduce this as long has we have a stage here which does shutdown on -1; // PipeCleanerStage.newInstance(gm, orderPipes[t]); } initChannelBlocker(maxGoPipeId); buildHTTPClientGraph(runtime, httpClientResponsePipes, httpClientRequestPipes, masterGoOut, masterAckIn); //always create the pub sub and state management stage? //TODO: only create when subscriptionPipeLookup is not empty and subscriptionPipes has zero length. if (IDX_MSG<0) { logger.trace("saved some resources by not starting up the unused pub sub service."); } else { if (!isAllPrivateTopics) { //logger.info("builder created pub sub"); createMessagePubSubStage(runtime, subscriptionPipeLookup2, ingressMessagePipes, messagePubSub, masterGoOut[IDX_MSG], masterAckIn[IDX_MSG], subscriptionPipes); } } } public void buildHTTPClientGraph( MsgRuntime<?,?> runtime, Pipe<NetResponseSchema>[] netResponsePipes, Pipe<ClientHTTPRequestSchema>[] netRequestPipes, Pipe<TrafficReleaseSchema>[][] masterGoOut, Pipe<TrafficAckSchema>[][] masterAckIn) { //create the network client stages if (useNetClient(netRequestPipes)) { int maxPartialResponses = Math.max(2,ClientHostPortInstance.getSessionCount()); int connectionsInBits = (int)Math.ceil(Math.log(maxPartialResponses)/Math.log(2)); int netResponseCount = 8; int responseQueue = 10; //must be adjusted together int outputsCount = 8; //Multipler per session for total connections ,count of pipes to channel writer int clientWriters = 1; //count of channel writer stages PipeConfig<NetPayloadSchema> clientNetRequestConfig = pcm.getConfig(NetPayloadSchema.class); //BUILD GRAPH ccm = new ClientCoordinator(connectionsInBits, maxPartialResponses, this.client.getCertificates()); Pipe<NetPayloadSchema>[] clientRequests = new Pipe[outputsCount]; int r = outputsCount; while (--r>=0) { clientRequests[r] = new Pipe<NetPayloadSchema>(clientNetRequestConfig); } //TODO: if the go pipes are not used then create the simpler stage // new HTTPClientRequestStage(gm, ccm, netRequestPipes, clientRequests); new HTTPClientRequestTrafficStage( gm, runtime, this, ccm, netRequestPipes, masterGoOut[IDX_NET], masterAckIn[IDX_NET], clientRequests); int releaseCount = 1024; int writeBufferMultiplier = 30; int responseUnwrapCount = 2; int clientWrapperCount = 2; NetGraphBuilder.buildHTTPClientGraph(gm, ccm, responseQueue, clientRequests, netResponsePipes, netResponseCount, releaseCount, writeBufferMultiplier, responseUnwrapCount, clientWrapperCount, clientWriters); } } protected int populateGoAckPipes(int maxGoPipeId, Pipe<TrafficReleaseSchema>[][] masterGoOut, Pipe<TrafficAckSchema>[][] masterAckIn, Pipe<TrafficReleaseSchema>[] goOut, Pipe<TrafficAckSchema>[] ackIn, int p) { if (p>=0) { addToLastNonNull(masterGoOut[p], goOut[p] = new Pipe<TrafficReleaseSchema>(this.pcm.getConfig(TrafficReleaseSchema.class))); maxGoPipeId = Math.max(maxGoPipeId, goOut[p].id); addToLastNonNull(masterAckIn[p], ackIn[p] = new Pipe<TrafficAckSchema>(this.pcm.getConfig(TrafficAckSchema.class))); } return maxGoPipeId; } private <S extends MessageSchema<S>> void addToLastNonNull(Pipe<S>[] pipes, Pipe<S> pipe) { int i = pipes.length; while (--i>=0) { if (null == pipes[i]) { pipes[i] = pipe; return; } } } protected int getFeatures(GraphManager gm, Pipe<TrafficOrderSchema> orderPipe) { PronghornStage producer = GraphManager.getRingProducer(gm, orderPipe.id); assert(producer instanceof ReactiveListenerStage) : "TrafficOrderSchema must only come from Reactor stages but was "+producer.getClass().getSimpleName(); return ((ReactiveListenerStage)producer).getFeatures(orderPipe); } @Override public MQTTConfigImpl useMQTT(CharSequence host, int port, CharSequence clientId) { return useMQTT(host, port, clientId, MQTTConfigImpl.DEFAULT_MAX_MQTT_IN_FLIGHT, MQTTConfigImpl.DEFAULT_MAX__MQTT_MESSAGE); } @Override public MQTTConfigImpl useMQTT(CharSequence host, int port, CharSequence clientId, int maxInFlight) { return useMQTT(host, port, clientId, maxInFlight, MQTTConfigImpl.DEFAULT_MAX__MQTT_MESSAGE); } @Override public MQTTConfigImpl useMQTT(CharSequence host, int port, CharSequence clientId, int maxInFlight, int maxMessageLength) { ClientCoordinator.registerDomain(host); if (maxInFlight>(1<<15)) { throw new UnsupportedOperationException("Does not suppport more than "+(1<<15)+" in flight"); } if (maxMessageLength>(256*(1<<20))) { throw new UnsupportedOperationException("Specification does not support values larger than 256M"); } pcm.ensureSize(MessageSubscription.class, maxInFlight, maxMessageLength); //all these use a smaller rate to ensure MQTT can stay ahead of the internal message passing long rate = defaultSleepRateNS>200_000?defaultSleepRateNS/4:defaultSleepRateNS; MQTTConfigImpl mqttBridge = new MQTTConfigImpl(host, port, clientId, this, rate, (short)maxInFlight, maxMessageLength); mqtt = mqttBridge; mqttBridge.beginDeclarations(); return mqtt; } //fields supporting private topics private final TrieParser privateTopicSource = new TrieParser(); private final TrieParser privateTopicTarget = new TrieParser(); private final List<List<PrivateTopic>> privateSourceTopics = new ArrayList<List<PrivateTopic>>(); private final List<List<PrivateTopic>> privateTargetTopics = new ArrayList<List<PrivateTopic>>(); private final TrieParserReader reader = new TrieParserReader(); private final List<String> dynamicTopicPublishers = new ArrayList<String>(); private final List<String> dynamicTopicSubscribers = new ArrayList<String>(); //TODO: MUST HAVE ARRAY OF TOPICS TO LOOK UP BY PIPE? public List<PrivateTopic> getPrivateTopicsFromSource(String source) { int sourceId = (int)TrieParserReader.query(reader, privateTopicSource, source); List<PrivateTopic> result = (sourceId<0) ? Collections.EMPTY_LIST : privateSourceTopics.get(sourceId); return result; } public List<PrivateTopic> getPrivateTopicsFromTarget(String target) { int targetId = (int)TrieParserReader.query(reader, privateTopicTarget, target); List<PrivateTopic> result = (targetId<0) ? Collections.EMPTY_LIST: privateTargetTopics.get(targetId); return result; } public static TrieParser unScopedTopics = null; //package protected static, all unscoped topics @Override public void defineUnScopedTopic(String topic) { if (null == unScopedTopics) { unScopedTopics = new TrieParser(); } unScopedTopics.setUTF8Value(topic, 1); } @Override public void definePrivateTopic(String topic, String source, String ... targets) { definePrivateTopic(10, 10000, topic, source, targets); } @Override public void definePrivateTopic(int queueLength, int maxMessageSize, String topic, String source, String ... targets) { if (targets.length<=1) { throw new UnsupportedOperationException("only call this with multiple targets"); } boolean hideTopics = false; PrivateTopic sourcePT = new PrivateTopic(topic, queueLength, maxMessageSize, hideTopics, this); List<PrivateTopic> localSourceTopics = null; int sourceId = (int)TrieParserReader.query(reader, privateTopicSource, source); if (sourceId<0) { localSourceTopics = new ArrayList<PrivateTopic>(); privateTopicSource.setUTF8Value(source, privateSourceTopics.size()); privateSourceTopics.add(localSourceTopics); } else { localSourceTopics = privateSourceTopics.get(sourceId); } localSourceTopics.add(sourcePT); int pt = parallelismTracks<1?1:parallelismTracks; while (--pt>=0) { Pipe<MessagePrivate> src = sourcePT.getPipe(pt); PipeConfig<MessagePrivate> trgtConfig = src.config().grow2x(); int t = targets.length; Pipe[] trgts = new Pipe[t]; PrivateTopic[] trgtTopics = new PrivateTopic[t]; while (--t>=0) { trgtTopics[t] = new PrivateTopic(topic, trgtConfig, this); trgts[t] = trgtTopics[t].getPipe(pt); List<PrivateTopic> localTargetTopics = null; int targetId = (int)TrieParserReader.query(reader, privateTopicTarget, targets[t]); if (targetId<0) { localTargetTopics = new ArrayList<PrivateTopic>(); privateTopicTarget.setUTF8Value( targets[t], privateTargetTopics.size()); privateTargetTopics.add(localTargetTopics); } else { localTargetTopics = privateTargetTopics.get(targetId); } localTargetTopics.add(trgtTopics[t]); } ReplicatorStage.newInstance(gm, src, trgts); } } @Override public void definePrivateTopic(String topic, String source, String target) { definePrivateTopic(10, 10000, topic, source, target); } @Override public void definePrivateTopic(int queueLength, int maxMessageSize, String topic, String source, String target) { boolean hideTopics = false; PrivateTopic obj = new PrivateTopic(topic, queueLength, maxMessageSize, hideTopics, this); List<PrivateTopic> localSourceTopics = null; int sourceId = (int)TrieParserReader.query(reader, privateTopicSource, source); if (sourceId<0) { localSourceTopics = new ArrayList<PrivateTopic>(); //logger.info("record source '{}'",source); privateTopicSource.setUTF8Value(source, privateSourceTopics.size()); privateSourceTopics.add(localSourceTopics); } else { localSourceTopics = privateSourceTopics.get(sourceId); } localSourceTopics.add(obj); List<PrivateTopic> localTargetTopics = null; int targetId = (int)TrieParserReader.query(reader, privateTopicTarget, target); if (targetId<0) { localTargetTopics = new ArrayList<PrivateTopic>(); //logger.info("record target '{}'",target); privateTopicTarget.setUTF8Value(target, privateTargetTopics.size()); privateTargetTopics.add(localTargetTopics); } else { localTargetTopics = privateTargetTopics.get(targetId); } localTargetTopics.add(obj); } @Override public void enableDynamicTopicPublish(String id) { dynamicTopicPublishers.add(id); } @Override public void enableDynamicTopicSubscription(String id) { dynamicTopicSubscribers.add(id); } @Override public String[] args() { return args.args(); } @Override public boolean hasArgument(String longName, String shortName) { return args.hasArgument(longName, shortName); } @Override public String getArgumentValue(String longName, String shortName, String defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } @Override public Boolean getArgumentValue(String longName, String shortName, Boolean defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } @Override public Character getArgumentValue(String longName, String shortName, Character defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } @Override public Byte getArgumentValue(String longName, String shortName, Byte defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } @Override public Short getArgumentValue(String longName, String shortName, Short defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } @Override public Long getArgumentValue(String longName, String shortName, Long defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } @Override public Integer getArgumentValue(String longName, String shortName, Integer defaultValue) { return args.getArgumentValue(longName, shortName, defaultValue); } public void blockChannelDuration(long durationNanos, int pipeId) { final long durationMills = durationNanos/1_000_000; final long remaningNanos = durationNanos%1_000_000; if (remaningNanos>0) { try { long limit = System.nanoTime()+remaningNanos; Thread.sleep(0L, (int)remaningNanos); long dif; while ((dif = (limit-System.nanoTime()))>0) { if (dif>100) { Thread.yield(); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); return; } } if (durationMills>0) { //now pull the current time and wait until ms have passed blockChannelUntil(pipeId, currentTimeMillis() + durationMills ); } } /** * Enables the child classes to modify which schemas are used. * For the pi this allows for using i2c instead of digital or analog in transducers. * * @param schema */ public MessageSchema schemaMapper(MessageSchema schema) { return schema; } public void finalizeDeclareConnections() { // two of these are recalculating the same local host address when host is null if (server != null) { server.finalizeDeclareConnections(); } if (client != null) { client.finalizeDeclareConnections(); } if (telemetry != null) { telemetry.finalizeDeclareConnections(); } } public static boolean notUnscoped(TrieParserReader reader, DataOutputBlobWriter<MessagePubSub> output){ return (-1 == output.startsWith(reader, BuilderImpl.unScopedTopics )); } public static boolean hasNoUnscopedTopics() { return null==BuilderImpl.unScopedTopics; } }
package com.ociweb.iot.grove; import com.ociweb.iot.hardware.IODevice; import com.ociweb.pronghorn.iot.schema.GroveResponseSchema; import com.ociweb.pronghorn.pipe.Pipe; public enum GroveTwig implements IODevice { UVSensor(){ @Override public void writeInt(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int intValue, int average) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_ANALOGSAMPLE_30); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(intValue, responsePipe); Pipe.addIntValue(average, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, LightSensor(){ @Override public void writeInt(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int intValue, int average) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_ANALOGSAMPLE_30); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(intValue, responsePipe); Pipe.addIntValue(average, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, SoundSensor(){ @Override public void writeInt(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int intValue, int average) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_ANALOGSAMPLE_30); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(intValue, responsePipe); Pipe.addIntValue(average, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, AngleSensor(){ @Override public void writeInt(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int intValue, int average) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_ANALOGSAMPLE_30); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(intValue, responsePipe); Pipe.addIntValue(average, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, MoistureSensor(){ @Override public void writeInt(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int intValue, int average) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_ANALOGSAMPLE_30); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(intValue, responsePipe); Pipe.addIntValue(average, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, Button() { @Override public void writeBit(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int bitValue) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_DIGITALSAMPLE_20); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(bitValue, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, MotionSensor(){ @Override public void writeBit(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int bitValue) { int size = Pipe.addMsgIdx(responsePipe,GroveResponseSchema.MSG_DIGITALSAMPLE_20); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(bitValue, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, RotaryEncoder() { @Override public void writeRotation(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int value, int delta, int speed) { int size = Pipe.addMsgIdx(responsePipe, GroveResponseSchema.MSG_ENCODER_70); Pipe.addIntValue(connector, responsePipe); Pipe.addLongValue(time, responsePipe); Pipe.addIntValue(value, responsePipe); Pipe.addIntValue(delta, responsePipe); Pipe.addIntValue(speed, responsePipe); Pipe.publishWrites(responsePipe); Pipe.confirmLowLevelWrite(responsePipe, size); } @Override public boolean isInput() { return true; } }, Buzzer() { @Override public boolean isOutput() { return true; } }, LED() { @Override public boolean isOutput() { return true; } @Override public boolean isPWM() { return true; } }, Relay() { @Override public boolean isOutput() { return true; } }, Servo() { @Override public boolean isOutput() { return true; } }, I2C() { @Override public boolean isInput() { return true; } @Override public boolean isOutput() { return true; } }, Nunchuck() { @Override public boolean isInput() { return true; } }, TempHumid() { @Override public boolean isInput(){ return true; } }; public void writeBit(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int bitValue) { System.err.println(this); throw new UnsupportedOperationException(); } public void writeInt(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int intValue, int average) { System.err.println(this); throw new UnsupportedOperationException(); } public void writeRotation(Pipe<GroveResponseSchema> responsePipe, int connector, long time, int value, int delta, int speed) { System.err.println(this); throw new UnsupportedOperationException(); } public boolean isInput() { return false; } public boolean isOutput() { return false; } public boolean isPWM() { return false; } public boolean isI2C(){ return false; } public byte[] getReadMessage(){ return null; } public int pwmRange() { return 256; } public boolean isGrove(){ return true; } }
package com.restfb; import static com.restfb.logging.RestFBLogger.HTTP_LOGGER; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.*; import java.util.function.BiConsumer; import com.restfb.util.StringUtils; import com.restfb.util.UrlUtils; public class DefaultWebRequestor implements WebRequestor { /** * Arbitrary unique boundary marker for multipart {@code POST}s. */ private static final String MULTIPART_BOUNDARY = "**boundarystringwhichwill**neverbeencounteredinthewild**"; /** * Line separator for multipart {@code POST}s. */ private static final String MULTIPART_CARRIAGE_RETURN_AND_NEWLINE = "\r\n"; /** * Hyphens for multipart {@code POST}s. */ private static final String MULTIPART_TWO_HYPHENS = " /** * Default buffer size for multipart {@code POST}s. */ private static final int MULTIPART_DEFAULT_BUFFER_SIZE = 8192; /** * By default, how long should we wait for a response (in ms)? */ private static final int DEFAULT_READ_TIMEOUT_IN_MS = 180000; private Map<String, List<String>> currentHeaders; private DebugHeaderInfo debugHeaderInfo; /** * By default this is true, to prevent breaking existing usage */ private boolean autocloseBinaryAttachmentStream = true; protected enum HttpMethod { GET, DELETE, POST } @Override public Response executeGet(String url) throws IOException { return execute(url, HttpMethod.GET); } @Override public Response executePost(String url, String parameters) throws IOException { return executePost(url, parameters, null); } @Override public Response executePost(String url, String parameters, List<BinaryAttachment> binaryAttachments) throws IOException { binaryAttachments = Optional.ofNullable(binaryAttachments).orElse(new ArrayList<>()); if (HTTP_LOGGER.isDebugEnabled()) { HTTP_LOGGER.debug("Executing a POST to " + url + " with parameters " + (!binaryAttachments.isEmpty() ? "" : "(sent in request body): ") + UrlUtils.urlDecode(parameters) + (!binaryAttachments.isEmpty() ? " and " + binaryAttachments.size() + " binary attachment[s]." : "")); } HttpURLConnection httpUrlConnection = null; OutputStream outputStream = null; try { httpUrlConnection = openConnection(new URL(url + (!binaryAttachments.isEmpty() ? "?" + parameters : ""))); httpUrlConnection.setReadTimeout(DEFAULT_READ_TIMEOUT_IN_MS); // Allow subclasses to customize the connection if they'd like to - set // their own headers, timeouts, etc. customizeConnection(httpUrlConnection); httpUrlConnection.setRequestMethod(HttpMethod.POST.name()); httpUrlConnection.setDoOutput(true); httpUrlConnection.setUseCaches(false); if (!binaryAttachments.isEmpty()) { httpUrlConnection.setRequestProperty("Connection", "Keep-Alive"); httpUrlConnection.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + MULTIPART_BOUNDARY); } httpUrlConnection.connect(); outputStream = httpUrlConnection.getOutputStream(); // If we have binary attachments, the body is just the attachments and the // other parameters are passed in via the URL. // Otherwise the body is the URL parameter string. if (!binaryAttachments.isEmpty()) { for (BinaryAttachment binaryAttachment : binaryAttachments) { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append(MULTIPART_TWO_HYPHENS).append(MULTIPART_BOUNDARY) .append(MULTIPART_CARRIAGE_RETURN_AND_NEWLINE).append("Content-Disposition: form-data; name=\"") .append(createFormFieldName(binaryAttachment)).append("\"; filename=\"") .append(binaryAttachment.getFilename()).append("\""); stringBuilder.append(MULTIPART_CARRIAGE_RETURN_AND_NEWLINE).append("Content-Type: ") .append(binaryAttachment.getContentType()); stringBuilder.append(MULTIPART_CARRIAGE_RETURN_AND_NEWLINE).append(MULTIPART_CARRIAGE_RETURN_AND_NEWLINE); outputStream.write(stringBuilder.toString().getBytes(StringUtils.ENCODING_CHARSET)); write(binaryAttachment.getData(), outputStream, MULTIPART_DEFAULT_BUFFER_SIZE); outputStream.write((MULTIPART_CARRIAGE_RETURN_AND_NEWLINE + MULTIPART_TWO_HYPHENS + MULTIPART_BOUNDARY + MULTIPART_TWO_HYPHENS + MULTIPART_CARRIAGE_RETURN_AND_NEWLINE).getBytes(StringUtils.ENCODING_CHARSET)); } } else { outputStream.write(parameters.getBytes(StringUtils.ENCODING_CHARSET)); } HTTP_LOGGER.debug("Response headers: {}", httpUrlConnection.getHeaderFields()); fillHeaderAndDebugInfo(httpUrlConnection); Response response = fetchResponse(httpUrlConnection); HTTP_LOGGER.debug("Facebook responded with {}", response); return response; } finally { if (autocloseBinaryAttachmentStream && !binaryAttachments.isEmpty()) { binaryAttachments.forEach(binaryAttachment -> closeQuietly(binaryAttachment.getData())); } closeQuietly(outputStream); closeQuietly(httpUrlConnection); } } /** * Given a {@code url}, opens and returns a connection to it. * <p> * If you'd like to pipe your connection through a proxy, this is the place to do so. * * @param url * The URL to connect to. * @return A connection to the URL. * @throws IOException * If an error occurs while establishing the connection. * @since 1.6.3 */ protected HttpURLConnection openConnection(URL url) throws IOException { return (HttpURLConnection) url.openConnection(); } protected void customizeConnection(HttpURLConnection connection) { // This implementation is a no-op } /** * Attempts to cleanly close a resource, swallowing any exceptions that might occur since there's no way to recover * anyway. * <p> * It's OK to pass {@code null} in, this method will no-op in that case. * * @param closeable * The resource to close. */ protected void closeQuietly(Closeable closeable) { if (closeable == null) { return; } try { closeable.close(); } catch (Exception t) { HTTP_LOGGER.warn("Unable to close {}: ", closeable, t); } } /** * Attempts to cleanly close an {@code HttpURLConnection}, swallowing any exceptions that might occur since there's no * way to recover anyway. * <p> * It's OK to pass {@code null} in, this method will no-op in that case. * * @param httpUrlConnection * The connection to close. */ protected void closeQuietly(HttpURLConnection httpUrlConnection) { try { Optional.ofNullable(httpUrlConnection).ifPresent(HttpURLConnection::disconnect); } catch (Exception t) { HTTP_LOGGER.warn("Unable to disconnect {}: ", httpUrlConnection, t); } } /** * Writes the contents of the {@code source} stream to the {@code destination} stream using the given * {@code bufferSize}. * * @param source * The source stream to copy from. * @param destination * The destination stream to copy to. * @param bufferSize * The size of the buffer to use during the copy operation. * @throws IOException * If an error occurs when reading from {@code source} or writing to {@code destination}. * @throws NullPointerException * If either {@code source} or @{code destination} is {@code null}. */ protected void write(InputStream source, OutputStream destination, int bufferSize) throws IOException { if (source == null || destination == null) { throw new IllegalArgumentException("Must provide non-null source and destination streams."); } int read; byte[] chunk = new byte[bufferSize]; while ((read = source.read(chunk)) > 0) destination.write(chunk, 0, read); } /** * Creates the form field name for the binary attachment filename by stripping off the file extension - for example, * the filename "test.png" would return "test". * * @param binaryAttachment * The binary attachment for which to create the form field name. * @return The form field name for the given binary attachment. */ protected String createFormFieldName(BinaryAttachment binaryAttachment) { if (binaryAttachment.getFieldName() != null) { return binaryAttachment.getFieldName(); } String name = binaryAttachment.getFilename(); return Optional.ofNullable(name).filter(f -> f.contains(".")).map(f -> f.substring(0, f.lastIndexOf('.'))).orElse(name); } /** * returns if the binary attachment stream is closed automatically * * @since 1.7.0 * @return {@code true} if the binary stream should be closed automatically, {@code false} otherwise */ public boolean isAutocloseBinaryAttachmentStream() { return autocloseBinaryAttachmentStream; } /** * define if the binary attachment stream is closed automatically after sending the content to facebook * * @since 1.7.0 * @param autocloseBinaryAttachmentStream * {@code true} if the {@link BinaryAttachment} stream should be closed automatically, {@code false} * otherwise */ public void setAutocloseBinaryAttachmentStream(boolean autocloseBinaryAttachmentStream) { this.autocloseBinaryAttachmentStream = autocloseBinaryAttachmentStream; } /** * access to the current response headers * * @return the current reponse header map */ public Map<String, List<String>> getCurrentHeaders() { return currentHeaders; } @Override public Response executeDelete(String url) throws IOException { return execute(url, HttpMethod.DELETE); } @Override public DebugHeaderInfo getDebugHeaderInfo() { return debugHeaderInfo; } private Response execute(String url, HttpMethod httpMethod) throws IOException { HTTP_LOGGER.debug("Making a {} request to {}", httpMethod.name(), url); HttpURLConnection httpUrlConnection = null; try { httpUrlConnection = openConnection(new URL(url)); httpUrlConnection.setReadTimeout(DEFAULT_READ_TIMEOUT_IN_MS); httpUrlConnection.setUseCaches(false); httpUrlConnection.setRequestMethod(httpMethod.name()); // Allow subclasses to customize the connection if they'd like to - set // their own headers, timeouts, etc. customizeConnection(httpUrlConnection); httpUrlConnection.connect(); HTTP_LOGGER.trace("Response headers: {}", httpUrlConnection.getHeaderFields()); fillHeaderAndDebugInfo(httpUrlConnection); Response response = fetchResponse(httpUrlConnection); HTTP_LOGGER.debug("Facebook responded with {}", response); return response; } finally { closeQuietly(httpUrlConnection); } } protected void fillHeaderAndDebugInfo(HttpURLConnection httpUrlConnection) { currentHeaders = Collections.unmodifiableMap(httpUrlConnection.getHeaderFields()); String usedApiVersion = StringUtils.trimToEmpty(httpUrlConnection.getHeaderField("facebook-api-version")); HTTP_LOGGER.debug("Facebook used the API {} to answer your request", usedApiVersion); Version usedVersion = Version.getVersionFromString(usedApiVersion); DebugHeaderInfo.DebugHeaderInfoFactory factory = DebugHeaderInfo.DebugHeaderInfoFactory.create().setVersion(usedVersion); Arrays.stream(FbHeaderField.values()).forEach(f -> f.getPutHeader().accept(httpUrlConnection, factory)); debugHeaderInfo = factory.build(); } protected Response fetchResponse(HttpURLConnection httpUrlConnection) throws IOException { InputStream inputStream = null; try { inputStream = httpUrlConnection.getResponseCode() != HttpURLConnection.HTTP_OK ? httpUrlConnection.getErrorStream() : httpUrlConnection.getInputStream(); } catch (IOException e) { HTTP_LOGGER.warn("An error occurred while making a {} request to {}:", httpUrlConnection.getRequestMethod(), httpUrlConnection.getURL(), e); } return new Response(httpUrlConnection.getResponseCode(), StringUtils.fromInputStream(inputStream)); } private enum FbHeaderField { X_FB_TRACE_ID((c, f) -> f.setTraceId(getHeaderOrEmpty(c,"x-fb-trace-id"))), X_FB_REV((c, f) -> f.setRev(getHeaderOrEmpty(c,"x-fb-rev"))), X_FB_DEBUG((c, f) -> f.setDebug(getHeaderOrEmpty(c,"x-fb-debug"))), X_APP_USAGE((c, f) -> f.setAppUsage(getHeaderOrEmpty(c,"x-app-usage"))), X_PAGE_USAGE((c, f) -> f.setPageUsage(getHeaderOrEmpty(c,"x-page-usage"))), X_AD_ACCOUNT_USAGE((c, f) -> f.setAdAccountUsage(getHeaderOrEmpty(c,"x-ad-account-usage"))), X_BUSINESS_USE_CASE_USAGE((c, f) -> f.setBusinessUseCaseUsage(getHeaderOrEmpty(c,"x-business-use-case-usage"))); private final BiConsumer<HttpURLConnection, DebugHeaderInfo.DebugHeaderInfoFactory> putHeader; FbHeaderField(BiConsumer<HttpURLConnection, DebugHeaderInfo.DebugHeaderInfoFactory> headerFunction) { this.putHeader = headerFunction; } public BiConsumer<HttpURLConnection, DebugHeaderInfo.DebugHeaderInfoFactory> getPutHeader() { return putHeader; } private static String getHeaderOrEmpty(HttpURLConnection connection, String fieldName) { return StringUtils.trimToEmpty(connection.getHeaderField(fieldName)); } } }
package com.scylladb.jmx.api; import java.io.StringReader; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonObject; import javax.json.JsonReader; import javax.json.JsonReaderFactory; import javax.json.JsonString; import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularDataSupport; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.Invocation; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import org.glassfish.jersey.client.ClientConfig; import com.scylladb.jmx.utils.EstimatedHistogram; import com.scylladb.jmx.utils.SnapshotDetailsTabularData; import com.yammer.metrics.core.HistogramValues; public class APIClient { Map<String, CacheEntry> cache = new HashMap<String, CacheEntry>(); String getCacheKey(String key, MultivaluedMap<String, String> param, long duration) { if (duration <= 0) { return null; } if (param != null) { StringBuilder sb = new StringBuilder(key); sb.append("?"); for (String k : param.keySet()) { sb.append(k).append('=').append(param.get(k)).append('&'); } return sb.toString(); } return key; } String getStringFromCache(String key, long duration) { if (key == null) { return null; } CacheEntry value = cache.get(key); return (value!= null && value.valid(duration))? value.stringValue() : null; } EstimatedHistogram getEstimatedHistogramFromCache(String key, long duration) { if (key == null) { return null; } CacheEntry value = cache.get(key); return (value!= null && value.valid(duration))? value.getEstimatedHistogram() : null; } JsonReaderFactory factory = Json.createReaderFactory(null); private static final java.util.logging.Logger logger = java.util.logging.Logger .getLogger(APIClient.class.getName()); public static String getBaseUrl() { return APIConfig.getBaseUrl(); } public Invocation.Builder get(String path, MultivaluedMap<String, String> queryParams) { Client client = ClientBuilder.newClient( new ClientConfig()); WebTarget webTarget = client.target(getBaseUrl()).path(path); if (queryParams != null) { for (Entry<String, List<String>> qp : queryParams.entrySet()) { for (String e : qp.getValue()) { webTarget = webTarget.queryParam(qp.getKey(), e); } } } return webTarget.request(MediaType.APPLICATION_JSON); } public Invocation.Builder get(String path) { return get(path, null); } public Response post(String path, MultivaluedMap<String, String> queryParams) { Response response = get(path, queryParams).post(Entity.entity(null, MediaType.TEXT_PLAIN)); if (response.getStatus() != Response.Status.OK.getStatusCode() ) { throw getException(response.readEntity(String.class)); } return response; } public void post(String path) { post(path, null); } public RuntimeException getException(String txt) { JsonReader reader = factory.createReader(new StringReader(txt)); JsonObject res = reader.readObject(); return new RuntimeException(res.getString("message")); } public String postGetVal(String path, MultivaluedMap<String, String> queryParams) { return post(path, queryParams).readEntity(String.class); } public int postInt(String path, MultivaluedMap<String, String> queryParams) { return Integer.parseInt(postGetVal(path, queryParams)); } public int postInt(String path) { return postInt(path, null); } public void delete(String path, MultivaluedMap<String, String> queryParams) { if (queryParams != null) { get(path, queryParams).delete(); return; } get(path).delete(); } public void delete(String path) { delete(path, null); } public String getRawValue(String string, MultivaluedMap<String, String> queryParams, long duration) { if (string.equals("")) { return ""; } String key = getCacheKey(string, queryParams, duration); String res = getStringFromCache(key, duration); if (res != null) { return res; } Response response = get(string, queryParams).get(Response.class); if (response.getStatus() != Response.Status.OK.getStatusCode() ) { // TBD // We are currently not caching errors, // it should be reconsider. throw getException(response.readEntity(String.class)); } res = response.readEntity(String.class); if (duration > 0) { cache.put(key, new CacheEntry(res)); } return res; } public String getRawValue(String string, MultivaluedMap<String, String> queryParams) { return getRawValue(string, queryParams, 0); } public String getRawValue(String string, long duration) { return getRawValue(string, null, duration); } public String getRawValue(String string) { return getRawValue(string, null, 0); } public String getStringValue(String string, MultivaluedMap<String, String> queryParams) { return getRawValue(string, queryParams).replaceAll("^\"|\"$", ""); } public String getStringValue(String string, MultivaluedMap<String, String> queryParams, long duration) { return getRawValue(string, queryParams, duration).replaceAll("^\"|\"$", ""); } public String getStringValue(String string) { return getStringValue(string, null); } public JsonReader getReader(String string, MultivaluedMap<String, String> queryParams) { return factory.createReader(new StringReader(getRawValue(string, queryParams))); } public JsonReader getReader(String string) { return getReader(string, null); } public String[] getStringArrValue(String string) { List<String> val = getListStrValue(string); return val.toArray(new String[val.size()]); } public int getIntValue(String string, MultivaluedMap<String, String> queryParams) { return Integer.parseInt(getRawValue(string, queryParams)); } public int getIntValue(String string) { return getIntValue(string, null); } public boolean getBooleanValue(String string) { return Boolean.parseBoolean(getRawValue(string)); } public double getDoubleValue(String string) { return Double.parseDouble(getRawValue(string)); } public List<String> getListStrValue(String string, MultivaluedMap<String, String> queryParams) { JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); List<String> res = new ArrayList<String>(arr.size()); for (int i = 0; i < arr.size(); i++) { res.add(arr.getString(i)); } reader.close(); return res; } public List<String> getListStrValue(String string) { return getListStrValue(string, null); } public static List<String> listStrFromJArr(JsonArray arr) { List<String> res = new ArrayList<String>(); for (int i = 0; i < arr.size(); i++) { res.add(arr.getString(i)); } return res; } public static Map<String, String> mapStrFromJArr(JsonArray arr) { Map<String, String> res = new HashMap<String, String>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); if (obj.containsKey("key") && obj.containsKey("value")) { res.put(obj.getString("key"), obj.getString("value")); } } return res; } public static String join(String[] arr, String joiner) { String res = ""; if (arr != null) { for (String name : arr) { if (name != null && !name.equals("")) { if (!res.equals("")) { res = res + ","; } res = res + name; } } } return res; } public static String join(String[] arr) { return join(arr, ","); } public static String mapToString(Map<String, String> mp, String pairJoin, String joiner) { String res = ""; if (mp != null) { for (String name : mp.keySet()) { if (!res.equals("")) { res = res + joiner; } res = res + name + pairJoin + mp.get(name); } } return res; } public static String mapToString(Map<String, String> mp) { return mapToString(mp, "=", ","); } public static boolean set_query_param( MultivaluedMap<String, String> queryParams, String key, String value) { if (queryParams != null && key != null && value != null && !value.equals("")) { queryParams.add(key, value); return true; } return false; } public static boolean set_bool_query_param( MultivaluedMap<String, String> queryParams, String key, boolean value) { if (queryParams != null && key != null && value) { queryParams.add(key, "true"); return true; } return false; } public Map<String, List<String>> getMapStringListStrValue(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Map<String, List<String>> map = new HashMap<String, List<String>>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); if (obj.containsKey("key") && obj.containsKey("value")) { map.put(obj.getString("key"), listStrFromJArr(obj.getJsonArray("value"))); } } reader.close(); return map; } public Map<String, List<String>> getMapStringListStrValue(String string) { return getMapStringListStrValue(string, null); } public Map<List<String>, List<String>> getMapListStrValue(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Map<List<String>, List<String>> map = new HashMap<List<String>, List<String>>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); if (obj.containsKey("key") && obj.containsKey("value")) { map.put(listStrFromJArr(obj.getJsonArray("key")), listStrFromJArr(obj.getJsonArray("value"))); } } reader.close(); return map; } public Map<List<String>, List<String>> getMapListStrValue(String string) { return getMapListStrValue(string, null); } public Set<String> getSetStringValue(String string, MultivaluedMap<String, String> queryParams) { JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Set<String> res = new HashSet<String>(); for (int i = 0; i < arr.size(); i++) { res.add(arr.getString(i)); } reader.close(); return res; } public Set<String> getSetStringValue(String string) { return getSetStringValue(string, null); } public Map<String, String> getMapStrValue(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Map<String, String> map = new HashMap<String, String>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); if (obj.containsKey("key") && obj.containsKey("value")) { map.put(obj.getString("key"), obj.getString("value")); } } reader.close(); return map; } public Map<String, String> getMapStrValue(String string) { return getMapStrValue(string, null); } public List<InetAddress> getListInetAddressValue(String string, MultivaluedMap<String, String> queryParams) { List<String> vals = getListStrValue(string, queryParams); List<InetAddress> res = new ArrayList<InetAddress>(); for (String val : vals) { try { res.add(InetAddress.getByName(val)); } catch (UnknownHostException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return res; } public List<InetAddress> getListInetAddressValue(String string) { return getListInetAddressValue(string, null); } public Map<String, TabularData> getMapStringTabularDataValue(String string) { // TODO Auto-generated method stub return null; } private TabularDataSupport getSnapshotData(String key, JsonArray arr) { TabularDataSupport data = new TabularDataSupport( SnapshotDetailsTabularData.TABULAR_TYPE); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); if (obj.containsKey("ks") && obj.containsKey("cf")) { SnapshotDetailsTabularData.from(key, obj.getString("ks"), obj.getString("cf"), obj.getInt("total"), obj.getInt("live"), data); } } return data; } public Map<String, TabularData> getMapStringSnapshotTabularDataValue( String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Map<String, TabularData> map = new HashMap<>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); if (obj.containsKey("key") && obj.containsKey("value")) { String key = obj.getString("key"); map.put(key, getSnapshotData(key, obj.getJsonArray("value"))); } } reader.close(); return map; } public long getLongValue(String string) { return Long.parseLong(getRawValue(string)); } public Map<InetAddress, Float> getMapInetAddressFloatValue(String string, MultivaluedMap<String, String> queryParams) { Map<InetAddress, Float> res = new HashMap<InetAddress, Float>(); JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); JsonObject obj = null; for (int i = 0; i < arr.size(); i++) { try { obj = arr.getJsonObject(i); res.put(InetAddress.getByName(obj.getString("key")), Float.parseFloat(obj.getString("value"))); } catch (UnknownHostException e) { logger.warning("Bad formatted address " + obj.getString("key")); } } return res; } public Map<InetAddress, Float> getMapInetAddressFloatValue(String string) { return getMapInetAddressFloatValue(string, null); } public Map<String, Long> getMapStringLongValue(String string, MultivaluedMap<String, String> queryParams) { Map<String, Long> res = new HashMap<String, Long>(); JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); JsonObject obj = null; for (int i = 0; i < arr.size(); i++) { obj = arr.getJsonObject(i); res.put(obj.getString("key"), obj.getJsonNumber("value").longValue()); } return res; } public Map<String, Long> getMapStringLongValue(String string) { return getMapStringLongValue(string, null); } public long[] getLongArrValue(String string, MultivaluedMap<String, String> queryParams) { JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); long[] res = new long[arr.size()]; for (int i = 0; i < arr.size(); i++) { res[i] = arr.getJsonNumber(i).longValue(); } reader.close(); return res; } public long[] getLongArrValue(String string) { return getLongArrValue(string, null); } public Map<String, Integer> getMapStringIntegerValue(String string, MultivaluedMap<String, String> queryParams) { Map<String, Integer> res = new HashMap<String, Integer>(); JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); JsonObject obj = null; for (int i = 0; i < arr.size(); i++) { obj = arr.getJsonObject(i); res.put(obj.getString("key"), obj.getInt("value")); } return res; } public Map<String, Integer> getMapStringIntegerValue(String string) { return getMapStringIntegerValue(string, null); } public int[] getIntArrValue(String string, MultivaluedMap<String, String> queryParams) { JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); int[] res = new int[arr.size()]; for (int i = 0; i < arr.size(); i++) { res[i] = arr.getInt(i); } reader.close(); return res; } public int[] getIntArrValue(String string) { return getIntArrValue(string, null); } public Map<String, Long> getListMapStringLongValue(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Map<String, Long> map = new HashMap<String, Long>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); Iterator<String> it = obj.keySet().iterator(); String key = ""; long val = -1; while (it.hasNext()) { String k = it.next(); if (obj.get(k) instanceof JsonString) { key = obj.getString(k); } else { val = obj.getJsonNumber(k).longValue(); } } if (val > 0 && !key.equals("")) { map.put(key, val); } } reader.close(); return map; } public Map<String, Long> getListMapStringLongValue(String string) { return getListMapStringLongValue(string, null); } public JsonArray getJsonArray(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray res = reader.readArray(); reader.close(); return res; } public JsonArray getJsonArray(String string) { return getJsonArray(string, null); } public List<Map<String, String>> getListMapStrValue(String string, MultivaluedMap<String, String> queryParams) { JsonArray arr = getJsonArray(string, queryParams); List<Map<String, String>> res = new ArrayList<Map<String, String>>(); for (int i = 0; i < arr.size(); i++) { res.add(mapStrFromJArr(arr.getJsonArray(i))); } return res; } public List<Map<String, String>> getListMapStrValue(String string) { return getListMapStrValue(string, null); } public TabularData getCQLResult(String string) { // TODO Auto-generated method stub return null; } public JsonObject getJsonObj(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonObject res = reader.readObject(); reader.close(); return res; } public HistogramValues getHistogramValue(String url, MultivaluedMap<String, String> queryParams) { HistogramValues res = new HistogramValues(); JsonObject obj = getJsonObj(url, queryParams); res.count = obj.getJsonNumber("count").longValue(); res.max = obj.getJsonNumber("max").longValue(); res.min = obj.getJsonNumber("min").longValue(); res.sum = obj.getJsonNumber("sum").longValue(); res.variance = obj.getJsonNumber("variance").doubleValue(); res.mean = obj.getJsonNumber("mean").doubleValue(); JsonArray arr = obj.getJsonArray("sample"); if (arr != null) { res.sample = new long[arr.size()]; for (int i = 0; i < arr.size(); i++) { res.sample[i] = arr.getJsonNumber(i).longValue(); } } return res; } public HistogramValues getHistogramValue(String url) { return getHistogramValue(url, null); } public EstimatedHistogram getEstimatedHistogram(String string, MultivaluedMap<String, String> queryParams, long duration) { String key = getCacheKey(string, queryParams, duration); EstimatedHistogram res = getEstimatedHistogramFromCache(key, duration); if (res != null) { return res; } res = new EstimatedHistogram(getEstimatedHistogramAsLongArrValue(string, queryParams)); if (duration > 0) { cache.put(key, new CacheEntry(res)); } return res; } public long[] getEstimatedHistogramAsLongArrValue(String string, MultivaluedMap<String, String> queryParams) { JsonObject obj = getJsonObj(string, queryParams); JsonArray arr = obj.getJsonArray("buckets"); if (arr == null) { return new long[0]; } long res[] = new long[arr.size()]; for (int i = 0; i< arr.size(); i++) { res[i] = arr.getJsonNumber(i).longValue(); } return res; } public long[] getEstimatedHistogramAsLongArrValue(String string) { return getEstimatedHistogramAsLongArrValue(string, null); } public Map<String, Double> getMapStringDouble(String string, MultivaluedMap<String, String> queryParams) { if (string.equals("")) { return null; } JsonReader reader = getReader(string, queryParams); JsonArray arr = reader.readArray(); Map<String, Double> map = new HashMap<String, Double>(); for (int i = 0; i < arr.size(); i++) { JsonObject obj = arr.getJsonObject(i); Iterator<String> it = obj.keySet().iterator(); String key = ""; double val = -1; while (it.hasNext()) { String k = it.next(); if (obj.get(k) instanceof JsonString) { key = obj.getString(k); } else { val = obj.getJsonNumber(k).doubleValue(); } } if (!key.equals("")) { map.put(key, val); } } reader.close(); return map; } public Map<String, Double> getMapStringDouble(String string) { return getMapStringDouble(string, null); } }
package net.fortuna.ical4j.model; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import junit.framework.TestCase; import net.fortuna.ical4j.util.CompatibilityHints; import net.fortuna.ical4j.util.TimeZones; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * @author Ben Fortuna * */ public class DateTimeTest extends TestCase { private static Log log = LogFactory.getLog(DateTimeTest.class); private TimeZoneRegistry registry; /** * Default constructor. */ public DateTimeTest() { registry = TimeZoneRegistryFactory.getInstance().createRegistry(); } /* (non-Javadoc) * @see junit.framework.TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); // ensure relaxing parsing is disabled for these tests.. CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, false); } /* * Class under test for void DateTime(long) */ public void testDateTimelong() { DateTime dt = new DateTime(0); // dt.setTimeZone(TimeZoneRegistryFactory.getInstance().createRegistry().getTimeZone(TimeZones.GMT_ID)); // assertEquals("19700101T000000", dt.toString()); dt.setUtc(true); assertEquals("19700101T000000Z", dt.toString()); } /* * Class under test for void DateTime(Date) */ public void testDateTimeDate() { Calendar cal = Calendar.getInstance(); //TimeZone.getTimeZone("GMT")); cal.set(Calendar.YEAR, 1984); // months are zero-based.. cal.set(Calendar.MONTH, 3); cal.set(Calendar.DAY_OF_MONTH, 17); cal.set(Calendar.HOUR_OF_DAY, 3); cal.set(Calendar.MINUTE, 15); cal.set(Calendar.SECOND, 34); assertEquals("19840417T031534", new DateTime(cal.getTime()).toString()); } /* * Class under test for void DateTime(String) */ public void testDateTimeString() throws Exception { try { new DateTime("20050630"); fail("Should throw ParseException"); } catch (ParseException pe) { log.info("Exception occurred: " + pe.getMessage()); } assertEquals("20000827T020000", new DateTime("20000827T020000").toString()); assertEquals("20070101T080000", new DateTime("20070101T080000").toString()); assertEquals("20050630T093000", new DateTime("20050630T093000").toString()); assertEquals("20050630T093000Z", new DateTime("20050630T093000Z").toString()); try { new DateTime("20000402T020000", registry.getTimeZone("America/Los_Angeles")); fail("Should throw ParseException"); } catch (ParseException pe) { log.info("Exception occurred: " + pe.getMessage()); } assertEquals("20000402T020000", new DateTime("20000402T020000", registry.getTimeZone("Australia/Melbourne")).toString()); assertEquals("20000402T020000", new DateTime("20000402T020000").toString()); DateFormat df = new SimpleDateFormat("yyyyMMdd'T'HHmmss"); Calendar cal = Calendar.getInstance(); //java.util.TimeZone.getTimeZone("America/Los_Angeles")); cal.clear(); cal.set(2000, 0, 1, 2, 0, 0); for (int i = 0; i < 365; i++) { String dateString = df.format(cal.getTime()); assertEquals(dateString, new DateTime(dateString).toString()); cal.add(Calendar.DAY_OF_YEAR, 1); } } /** * Test equality of DateTime instances created using different constructors. * @throws ParseException */ public void testDateTimeEquals() throws ParseException { DateTime date1 = new DateTime("20050101T093000"); Calendar calendar = Calendar.getInstance(); //TimeZone.getTimeZone("Etc/UTC")); calendar.clear(); calendar.set(2005, 0, 1, 9, 30, 00); calendar.set(Calendar.MILLISECOND, 1); DateTime date2 = new DateTime(calendar.getTime()); assertEquals(date1.toString(), date2.toString()); assertEquals(date1, date2); } /** * Test UTC date-times. */ public void testUtc() throws ParseException { // ordinary date.. DateTime date1 = new DateTime("20050101T093000"); assertFalse(date1.isUtc()); // UTC date.. DateTime date2 = new DateTime(true); assertTrue(date2.isUtc()); TimeZone utcTz = registry.getTimeZone(TimeZones.UTC_ID); utcTz.setID(TimeZones.UTC_ID); // UTC timezone, but not UTC.. DateTime date3 = new DateTime("20050101T093000", utcTz); // date3.setUtc(false); assertFalse(date3.isUtc()); DateTime date4 = new DateTime(); date4.setUtc(true); assertTrue(date4.isUtc()); date4.setUtc(false); assertFalse(date4.isUtc()); DateTime date5 = new DateTime(false); date5.setTimeZone(utcTz); assertFalse(date5.isUtc()); } }
package net.pgp2p.networkhandler; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import net.jxta.endpoint.Message; import net.jxta.endpoint.MessageElement; import net.jxta.endpoint.StringMessageElement; public class PGP2PMessage extends Message { /** * Generated serial version UID */ private static final long serialVersionUID = -3521104394888893998L; private static final String NAMESPACE = PGP2PService.NAMESPACE; public static final String SOURCE_USER_ID_FIELD = "SOURCE_USER_ID"; public static final String FINAL_USER_ID_FIELD = "FINAL_USER_ID"; public static final String FROM_USER_ID_FIELD = "FROM_USER_ID"; public static final String KEY_ID_FIELD = "KEY_ID"; public static final String PUBLIC_KEY_FIELD = "PUBLIC_KEY"; public static final String TYPE_FIELD = "TYPE"; public static final String STATUS_FIELD = "STATUS"; public static final String TRACK_FIELD = "TRACK"; public static final String IS_FROM_CONNECT_FIELD = "IS_FROM_CONNECT"; private String sourceUserID; private String finalUserID; private String fromUserID; private long keyID; private String armoredPublicKey; private int type; private int status; private Collection<String> track = new HashSet<String>(); private boolean isFromConnect = false; public PGP2PMessage fromMessage(Message message) { String fromUserID = message.getMessageElement(NAMESPACE, FROM_USER_ID_FIELD).toString(); long keyID = new BigInteger(message.getMessageElement(NAMESPACE, KEY_ID_FIELD).toString(), 16).longValue(); String publicKey = message.getMessageElement(NAMESPACE, PUBLIC_KEY_FIELD).toString(); int type = Integer.valueOf(message.getMessageElement(NAMESPACE, TYPE_FIELD).toString()); this.setFromUserID(fromUserID) .setKeyID(keyID) .setArmoredPublicKey(publicKey) .setType(type); int status; if (message.getMessageElement(NAMESPACE, STATUS_FIELD) != null ) { status = Integer.valueOf(message.getMessageElement(NAMESPACE, STATUS_FIELD).toString()); this.setStatus(status); } String sourceUserID; if (message.getMessageElement(NAMESPACE, SOURCE_USER_ID_FIELD) != null ) { sourceUserID = message.getMessageElement(NAMESPACE, SOURCE_USER_ID_FIELD).toString(); this.setSourceUserID(sourceUserID); } String finalUserID; if (message.getMessageElement(NAMESPACE, FINAL_USER_ID_FIELD) != null ) { finalUserID = message.getMessageElement(NAMESPACE, FINAL_USER_ID_FIELD).toString(); this.setFinalUserID(finalUserID); } List<String> tracks = new ArrayList<String>(); if (message.getMessageElement(NAMESPACE, TRACK_FIELD) != null ) { String element = message.getMessageElement(NAMESPACE, TRACK_FIELD).toString(); tracks.addAll(Arrays.asList( element.split( ", " ) )); this.track.clear(); this.addTrack(tracks); } boolean isFromConnect = false; if ( message.getMessageElement(NAMESPACE, IS_FROM_CONNECT_FIELD) != null ) { isFromConnect = Boolean.valueOf(message.getMessageElement(NAMESPACE, IS_FROM_CONNECT_FIELD).toString()); this.setFromConnect(isFromConnect); } return this; } public PGP2PMessage() { super(); } public PGP2PMessage setKeyID(Long keyID) { this.keyID = keyID; MessageElement elemKeyID = new StringMessageElement(KEY_ID_FIELD, Long.toHexString(keyID), null); replaceMessageElement(NAMESPACE, elemKeyID); return this; } public long getKeyID() { return this.keyID; } public PGP2PMessage setArmoredPublicKey(String publicKey) { this.armoredPublicKey = publicKey; MessageElement elemArmoredPublicKey = new StringMessageElement(PUBLIC_KEY_FIELD, publicKey, null); replaceMessageElement(NAMESPACE, elemArmoredPublicKey); return this; } public String getArmoredPublicKey() { return this.armoredPublicKey; } public PGP2PMessage setType(int messageType) { this.type = messageType; MessageElement elemMessageType = new StringMessageElement(TYPE_FIELD, String.valueOf(messageType), null); replaceMessageElement(NAMESPACE, elemMessageType); return this; } public int getType() { return this.type; } public PGP2PMessage setStatus(int status) { this.status = status; MessageElement elemStatus = new StringMessageElement(STATUS_FIELD, String.valueOf(status), null); replaceMessageElement(NAMESPACE, elemStatus); return this; } public int getStatus() { return this.status; } public PGP2PMessage setSourceUserID(String sourceUserID) { this.sourceUserID = sourceUserID; MessageElement elemSourceUserID = new StringMessageElement(SOURCE_USER_ID_FIELD, sourceUserID, null); replaceMessageElement(NAMESPACE, elemSourceUserID); return this; } public String getSourceUserID() { return this.sourceUserID; } public PGP2PMessage setFinalUserID(String finalUserID) { this.finalUserID = finalUserID; MessageElement elemFinalUserID = new StringMessageElement(FINAL_USER_ID_FIELD, finalUserID, null); replaceMessageElement(NAMESPACE, elemFinalUserID); return this; } public String getFinalUserID() { return this.finalUserID; } public PGP2PMessage setFromUserID(String fromUserID) { this.fromUserID = fromUserID; MessageElement elemFromUserID = new StringMessageElement(FROM_USER_ID_FIELD, fromUserID, null); replaceMessageElement(NAMESPACE, elemFromUserID); return this; } public String getFromUserID() { return this.fromUserID; } public PGP2PMessage addTrack(String track) { this.track.add(track); MessageElement elemTrack = new StringMessageElement(TRACK_FIELD, this.track.toString().replace("[", "").replace("]", ""), null); replaceMessageElement(NAMESPACE, elemTrack); return this; } public PGP2PMessage addTrack(Collection<String> tracks) { this.track.addAll(tracks); MessageElement elemTrack = new StringMessageElement(TRACK_FIELD, this.track.toString().replace("[", "").replace("]", ""), null); replaceMessageElement(NAMESPACE, elemTrack); return this; } public Collection<String> getTrack() { return this.track; } public PGP2PMessage setFromConnect(boolean isFromConnect) { this.isFromConnect = isFromConnect; MessageElement elemIsFromConnect = new StringMessageElement(IS_FROM_CONNECT_FIELD, String.valueOf(isFromConnect), null); replaceMessageElement(NAMESPACE, elemIsFromConnect); return this; } public boolean isFromConnect() { return this.isFromConnect; } }
package com.st.maven.apt; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.MessageDigest; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.TimeZone; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import org.apache.commons.codec.binary.Hex; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.archivers.ar.ArArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.utils.IOUtils; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.gpg.AbstractGpgSigner; import org.apache.maven.plugin.gpg.GpgMojo; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.apache.maven.wagon.ConnectionException; import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.TransferFailedException; import org.apache.maven.wagon.Wagon; import org.apache.maven.wagon.authentication.AuthenticationInfo; import org.apache.maven.wagon.authorization.AuthorizationException; import org.apache.maven.wagon.repository.Repository; import org.codehaus.plexus.PlexusContainer; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; @Mojo(name = "deploy", defaultPhase = LifecyclePhase.DEPLOY, threadSafe = false) public class AptDeployMojo extends GpgMojo { @Parameter(defaultValue = "${maven.deploy.skip}", readonly = true) private boolean skip; @Parameter(defaultValue = "${project}", readonly = true, required = true) private MavenProject project; @Component private PlexusContainer container; @Parameter(defaultValue = "${maven.apt.file}", readonly = true) private String file; @Parameter(readonly = true, required = true) private String codename; @Parameter(readonly = true, required = true) private String component; @Parameter(property = "gpg.sign", readonly = true) private boolean sign; @Override public void execute() throws MojoExecutionException, MojoFailureException { if (skip) { getLog().info("Skipping artifact deployment"); return; } List<File> deb = getDebFiles(); if (deb.isEmpty()) { getLog().info("\"deb\" artifacts not found. skipping"); return; } AbstractGpgSigner signer = null; if (sign) { signer = newSigner(project); } ArtifactRepository repository = project.getDistributionManagementArtifactRepository(); if (repository == null) { throw new MojoExecutionException("no repository found for distribution"); } Wagon w = null; Repository repositoryForWagon = new Repository(repository.getId(), repository.getUrl()); AuthenticationInfo info = null; if (repository.getAuthentication() != null) { info = new AuthenticationInfo(); info.setUserName(repository.getAuthentication().getUsername()); info.setPassword(repository.getAuthentication().getPassword()); } try { w = container.lookup(Wagon.class, repository.getProtocol()); } catch (ComponentLookupException e) { throw new MojoExecutionException("unable to find wagon", e); } if (w == null) { throw new MojoExecutionException("unable to find wagon for: " + repository.getProtocol()); } try { w.connect(repositoryForWagon, info); Map<Architecture, Packages> packagesPerArch = new HashMap<Architecture, Packages>(); for (File f : deb) { ControlFile controlFile = readControl(f); if (controlFile == null) { throw new MojoExecutionException("invalid .deb format. Missing control file: " + f.getAbsolutePath()); } String path = "pool/" + component + "/" + controlFile.getPackageName().charAt(0) + "/" + controlFile.getPackageName() + "/" + controlFile.getPackageName() + "_" + controlFile.getVersion() + "_" + controlFile.getArch() + ".deb"; try { FileInfo fileInfo = getFileInfo(f); controlFile.append("Filename: " + path); controlFile.append("Size: " + fileInfo.getSize()); controlFile.append("MD5sum: " + fileInfo.getMd5()); controlFile.append("SHA1: " + fileInfo.getSha1()); controlFile.append("SHA256: " + fileInfo.getSha256()); } catch (Exception e) { throw new MojoExecutionException("unable to calculate checksum for: " + f.getAbsolutePath(), e); } if (controlFile.getArch().isWildcard()) { for (Architecture cur : Architecture.values()) { if (cur.isWildcard()) { continue; } addControlFile(w, cur, controlFile, packagesPerArch); } } else { addControlFile(w, controlFile.getArch(), controlFile, packagesPerArch); } getLog().info("uploading: " + f.getAbsolutePath()); w.put(f, path); } Release release = loadRelease(w); // retain old fileinfo Map<String, FileInfo> fileinfoByFilename = new HashMap<>(); for (FileInfo cur : release.getFiles()) { fileinfoByFilename.put(cur.getFilename(), cur); } // add and override with new fileinfo for (Packages cur : packagesPerArch.values()) { for (FileInfo resultInfo : uploadPackages(w, cur)) { fileinfoByFilename.put(resultInfo.getFilename(), resultInfo); } } release.setFiles(new HashSet<>(fileinfoByFilename.values())); File releaseFile = File.createTempFile("apt", "releaseFile"); uploadRelease(w, releaseFile, release); if (signer != null) { File releaseSignature = signer.generateSignatureForArtifact(releaseFile); getLog().info("uploading: Release.gpg"); w.put(releaseSignature, getReleasePath() + ".gpg"); signer.setArgs(Collections.singletonList("--clearsign")); File clearsigned = signer.generateSignatureForArtifact(releaseFile); getLog().info("uploading: InRelease"); w.put(clearsigned, "dists/" + codename + "/InRelease"); } } catch (Exception e) { throw new MojoExecutionException("unable to process", e); } finally { try { w.disconnect(); } catch (ConnectionException e) { getLog().error("unable to disconnect", e); } } } private void addControlFile(Wagon w, Architecture arch, ControlFile file, Map<Architecture, Packages> packagesPerArch) throws MojoExecutionException { Packages curPackages = packagesPerArch.get(arch); if (curPackages == null) { curPackages = loadPackages(w, arch); packagesPerArch.put(arch, curPackages); } curPackages.add(file); } private List<FileInfo> uploadPackages(Wagon w, Packages packages) throws MojoExecutionException, TransferFailedException, ResourceDoesNotExistException, AuthorizationException { OutputStream fos = null; List<FileInfo> result = new ArrayList<FileInfo>(); File file; try { file = File.createTempFile("apt", packages.getArchitecture().name()); fos = new FileOutputStream(file); packages.save(fos); } catch (Exception e) { throw new MojoExecutionException("unable to write packages", e); } finally { if (fos != null) { try { fos.close(); } catch (IOException e) { getLog().error("unable to close cursor", e); } } } try { FileInfo fileInfo = getFileInfo(file); fileInfo.setFilename(getPackagesBasePath(packages.getArchitecture())); result.add(fileInfo); } catch (Exception e) { throw new MojoExecutionException("unable to calculate checksum for: " + file.getAbsolutePath(), e); } String path = getPackagesPath(packages.getArchitecture()); getLog().info("uploading: " + path); w.put(file, path); // gzipped try { file = File.createTempFile("apt", packages.getArchitecture().name()); fos = new GZIPOutputStream(new FileOutputStream(file)); packages.save(fos); } catch (Exception e) { throw new MojoExecutionException("unable to write packages", e); } finally { if (fos != null) { try { fos.close(); } catch (IOException e) { getLog().error("unable to close cursor", e); } } } try { FileInfo fileInfo = getFileInfo(file); fileInfo.setFilename(getPackagesBasePath(packages.getArchitecture()) + ".gz"); result.add(fileInfo); } catch (Exception e) { throw new MojoExecutionException("unable to calculate checksum for: " + file.getAbsolutePath(), e); } getLog().info("uploading: " + path + ".gz"); w.put(file, path + ".gz"); return result; } private Packages loadPackages(Wagon w, Architecture architecture) throws MojoExecutionException { Packages packages = new Packages(); packages.setArchitecture(architecture); InputStream fis = null; try { File tempFile = File.createTempFile("packages", architecture.name()); w.get(getPackagesPath(architecture) + ".gz", tempFile); fis = new GZIPInputStream(new FileInputStream(tempFile)); packages.load(fis); } catch (ResourceDoesNotExistException e) { getLog().info(packages.getArchitecture() + "/Packages.gz do not exist. creating..."); } catch (Exception e) { throw new MojoExecutionException("unable to load " + getPackagesPath(architecture), e); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { getLog().error("unable to close cursor", e); } } } return packages; } private void uploadRelease(Wagon w, File releaseFile, Release release) throws MojoExecutionException, TransferFailedException, ResourceDoesNotExistException, AuthorizationException { OutputStream fos = null; try { fos = new FileOutputStream(releaseFile); release.save(fos); } catch (Exception e) { throw new MojoExecutionException("unable to write releases", e); } finally { if (fos != null) { try { fos.close(); } catch (IOException e) { getLog().error("unable to close cursor", e); } } } getLog().info("uploading: Release"); w.put(releaseFile, getReleasePath()); } private Release loadRelease(Wagon w) throws MojoExecutionException { InputStream fis = null; Release release = new Release(); try { File tempFile = File.createTempFile("release", "file"); w.get(getReleasePath(), tempFile); fis = new FileInputStream(tempFile); release.load(fis); } catch (ResourceDoesNotExistException e) { getLog().info("Release do not exist. creating..."); release.setArchitectures("amd64"); release.setCodename(codename); release.setComponents(component); release.setLabel(codename); release.setOrigin(codename); } catch (Exception e) { throw new MojoExecutionException("unable to read Release from: " + getReleasePath(), e); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { getLog().error("unable to close cursor", e); } } } SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz"); sdf.setTimeZone(TimeZone.getTimeZone("UTC")); release.setDate(sdf.format(new Date())); return release; } private String getPackagesBasePath(Architecture architecture) { String packagesBaseFilename = component + "/binary-" + architecture.name() + "/Packages"; return packagesBaseFilename; } private String getPackagesPath(Architecture architecture) { String packagesFilename = "dists/" + codename + "/" + getPackagesBasePath(architecture); return packagesFilename; } private String getReleasePath() { String releaseFilename = "dists/" + codename + "/Release"; return releaseFilename; } private List<File> getDebFiles() throws MojoExecutionException { List<Artifact> attachedArtefacts = project.getAttachedArtifacts(); List<File> deb = new ArrayList<File>(); for (Artifact cur : attachedArtefacts) { if (cur.getType().equals("deb")) { deb.add(cur.getFile()); } } if (file != null && file.trim().length() != 0) { File f = new File(file); if (!f.exists()) { throw new MojoExecutionException("specified file not found: " + f.getAbsolutePath()); } deb.add(f); } return deb; } private static FileInfo getFileInfo(File f) throws Exception { FileInfo result = new FileInfo(); result.setSize(String.valueOf(f.length())); BufferedInputStream bis = null; try { bis = new BufferedInputStream(new FileInputStream(f)); MessageDigest md5Alg = MessageDigest.getInstance("MD5"); md5Alg.reset(); MessageDigest sha1 = MessageDigest.getInstance("SHA-1"); sha1.reset(); MessageDigest sha256 = MessageDigest.getInstance("SHA-256"); sha256.reset(); byte[] buf = new byte[2048]; int curByte = -1; while ((curByte = bis.read(buf)) != -1) { md5Alg.update(buf, 0, curByte); sha1.update(buf, 0, curByte); sha256.update(buf, 0, curByte); } result.setMd5(new String(Hex.encodeHex(md5Alg.digest()))); result.setSha1(new String(Hex.encodeHex(sha1.digest()))); result.setSha256(new String(Hex.encodeHex(sha256.digest()))); } finally { if (bis != null) { bis.close(); } } return result; } private ControlFile readControl(File deb) throws MojoExecutionException { ArArchiveEntry entry; TarArchiveEntry controlEntry; ArchiveInputStream debStream = null; try { debStream = new ArchiveStreamFactory().createArchiveInputStream("ar", new FileInputStream(deb)); while ((entry = (ArArchiveEntry) debStream.getNextEntry()) != null) { if (entry.getName().equals("control.tar.gz")) { try (ArchiveInputStream controlTgz = new ArchiveStreamFactory().createArchiveInputStream("tar", new GZIPInputStream(debStream))) { while ((controlEntry = (TarArchiveEntry) controlTgz.getNextEntry()) != null) { getLog().debug("control entry: " + controlEntry.getName()); if (controlEntry.getName().equals("./control") || controlEntry.getName().equals("control")) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); IOUtils.copy(controlTgz, outputStream); String contentString = outputStream.toString("UTF-8"); outputStream.close(); ControlFile controlFile = new ControlFile(); controlFile.load(contentString); return controlFile; } } } } } return null; } catch (Exception e) { throw new MojoExecutionException("invalid .deb. unable to find control at: " + deb.getAbsolutePath(), e); } finally { if (debStream != null) { try { debStream.close(); } catch (IOException e) { getLog().error("unable to close .deb", e); } } } } }
package com.netflix.spinnaker.front50.config; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.S3ClientOptions; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.awsobjectmapper.AmazonObjectMapperConfigurer; import com.netflix.spectator.api.Registry; import com.netflix.spinnaker.clouddriver.aws.bastion.BastionConfig; import com.netflix.spinnaker.clouddriver.aws.security.AmazonClientProvider; import com.netflix.spinnaker.front50.model.S3StorageService; import com.netflix.spinnaker.front50.model.StorageService; import com.netflix.spinnaker.front50.model.application.ApplicationDAO; import com.netflix.spinnaker.front50.model.application.ApplicationPermissionDAO; import com.netflix.spinnaker.front50.model.application.DefaultApplicationDAO; import com.netflix.spinnaker.front50.model.application.DefaultApplicationPermissionDAO; import com.netflix.spinnaker.front50.model.notification.DefaultNotificationDAO; import com.netflix.spinnaker.front50.model.notification.NotificationDAO; import com.netflix.spinnaker.front50.model.pipeline.DefaultPipelineDAO; import com.netflix.spinnaker.front50.model.pipeline.DefaultPipelineStrategyDAO; import com.netflix.spinnaker.front50.model.pipeline.PipelineDAO; import com.netflix.spinnaker.front50.model.pipeline.PipelineStrategyDAO; import com.netflix.spinnaker.front50.model.project.DefaultProjectDAO; import com.netflix.spinnaker.front50.model.project.ProjectDAO; import com.netflix.spinnaker.front50.model.serviceaccount.DefaultServiceAccountDAO; import com.netflix.spinnaker.front50.model.serviceaccount.ServiceAccountDAO; import com.netflix.spinnaker.front50.model.snapshot.DefaultSnapshotDAO; import com.netflix.spinnaker.front50.model.snapshot.SnapshotDAO; import com.netflix.spinnaker.front50.model.tag.DefaultEntityTagsDAO; import com.netflix.spinnaker.front50.model.tag.EntityTagsDAO; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.web.client.RestTemplate; import rx.schedulers.Schedulers; import java.util.Optional; import java.util.concurrent.Executors; @Configuration @ConditionalOnExpression("${spinnaker.s3.enabled:false}") @Import(BastionConfig.class) @EnableConfigurationProperties(S3Properties.class) public class S3Config { @Bean public AmazonClientProvider amazonClientProvider() { return new AmazonClientProvider(); } @Bean public AmazonS3 awsS3Client(AWSCredentialsProvider awsCredentialsProvider, S3Properties s3Properties) { ClientConfiguration clientConfiguration = new ClientConfiguration(); if (s3Properties.getProxyProtocol() != null) { if (s3Properties.getProxyProtocol().equalsIgnoreCase("HTTPS")) { clientConfiguration.setProtocol(Protocol.HTTPS); } else { clientConfiguration.setProtocol(Protocol.HTTP); } Optional.ofNullable(s3Properties.getProxyHost()) .ifPresent(clientConfiguration::setProxyHost); Optional.ofNullable(s3Properties.getProxyPort()) .map(Integer::parseInt) .ifPresent(clientConfiguration::setProxyPort); } AmazonS3Client client = new AmazonS3Client(awsCredentialsProvider, clientConfiguration); if (s3Properties.getEndpoint() != null) { client.setEndpoint(s3Properties.getEndpoint()); client.setS3ClientOptions(S3ClientOptions.builder().setPathStyleAccess(true).build()); } else { Optional.ofNullable(s3Properties.getRegion()) .map(Regions::fromName) .map(Region::getRegion) .ifPresent(client::setRegion); } return client; } @Bean @ConditionalOnMissingBean(RestTemplate.class) public RestTemplate restTemplate() { return new RestTemplate(); } @Bean public S3StorageService s3StorageService(AmazonS3 amazonS3, S3Properties s3Properties) { ObjectMapper awsObjectMapper = new ObjectMapper(); AmazonObjectMapperConfigurer.configure(awsObjectMapper); return new S3StorageService(awsObjectMapper, amazonS3, s3Properties.getBucket(), s3Properties.getRootFolder(), s3Properties.isFailoverEnabled()); } @Bean public ApplicationDAO applicationDAO(StorageService storageService, Registry registry) { return new DefaultApplicationDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 15000, registry); } @Bean public ApplicationPermissionDAO applicationPermissionDAO(StorageService storageService, Registry registry) { return new DefaultApplicationPermissionDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 45000, registry); } @Bean public ServiceAccountDAO serviceAccountDAO(StorageService storageService, Registry registry) { return new DefaultServiceAccountDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 30000, registry); } @Bean public ProjectDAO projectDAO(StorageService storageService, Registry registry) { return new DefaultProjectDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 30000, registry); } @Bean public NotificationDAO notificationDAO(StorageService storageService, Registry registry) { return new DefaultNotificationDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 30000, registry); } @Bean public PipelineStrategyDAO pipelineStrategyDAO(StorageService storageService, Registry registry) { return new DefaultPipelineStrategyDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 20000, registry); } @Bean public PipelineDAO pipelineDAO(StorageService storageService, Registry registry) { return new DefaultPipelineDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(25)), 10000, registry); } @Bean public SnapshotDAO snapshotDAO(StorageService storageService, Registry registry) { return new DefaultSnapshotDAO(storageService, Schedulers.from(Executors.newFixedThreadPool(20)), 60000, registry); } @Bean public EntityTagsDAO entityTagsDAO(StorageService storageService) { return new DefaultEntityTagsDAO(storageService, null, -1); } }
package com.wikigraph.algorithms; import com.google.common.base.Stopwatch; import com.wikigraph.graph.Article; import java.util.ArrayDeque; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Queue; import java.util.Set; import static java.util.concurrent.TimeUnit.MILLISECONDS; /* Collection of static helper functions regarding the graph */ public class Algos { /* returns : map from (article) -> (depth, children) */ public static GraphVertex getSubGraph(Article start, int maxDepth, int maxDegree, int maxArticles) { GraphVertex root = new GraphVertex(start.getTitle(), start.getId()); Queue<NodeDepth> frontier = new ArrayDeque<>(); Map<Article, GraphVertex> seen = new HashMap<>(); frontier.add(NodeDepth.of(start, 0, root)); int articlesLeft = maxArticles - 1; // Includes articles in the frontier in amount subtracted while (!frontier.isEmpty()) { NodeDepth nd = frontier.remove(); int depth = nd.depth; Article article = nd.article; GraphVertex vertex = nd.graphVertex; Collection<Article> children; if (depth < maxDepth) { children = article.getOutgoingLinks(Math.min(articlesLeft, maxDegree)); for (Article child : children) { GraphVertex childVertex = new GraphVertex(child.getTitle(), child.getId()); vertex.children.add(childVertex); if (!seen.containsKey(child)) { seen.put(child, childVertex); frontier.add(NodeDepth.of(child, depth + 1, childVertex)); articlesLeft } } } } return root; } public static Path shortestPath(Article start, Article end) { Queue<Path> frontier = new ArrayDeque<>(); Set<Article> seen = new HashSet<>(); frontier.add(Path.of(0, start, null)); int depth = 0; int searched = 0; Stopwatch s = new Stopwatch().start(); while (!frontier.isEmpty()) { Path path = frontier.remove(); searched++; if (searched % 1000 == 0) System.out.printf("\r%d : Searched %,d/%,d (%,d) in %,dms", depth, searched, seen.size(), frontier.size(), s.elapsed(MILLISECONDS)); if (path.depth != depth) { depth = path.depth; } for (Article child : path.end.getOutgoingLinks(-1)) { Path newPath = Path.of(path.depth + 1, child, path); if (child.equals(end)) { System.out.println(); return newPath; } if (!seen.contains(child)) { frontier.add(newPath); seen.add(child); } } } return null; } public static class Path { public int depth; public Article end; public Path previous; public static Path of(int depth, Article end, Path previous) { Path path = new Path(); path.depth = depth; path.previous = previous; path.end = end; return path; } public String toString() { if(previous == null) { return end.getTitle(); } return previous + " -> " + end.getTitle(); } } /* Convenience value classes for algorithms. */ public static class NodeDepth { public int depth; public Article article; public GraphVertex graphVertex; public static NodeDepth of(Article article, int depth, GraphVertex graphVertex) { NodeDepth entry = new NodeDepth(); entry.article = article; entry.depth = depth; entry.graphVertex = graphVertex; return entry; } } }
package tb.client.render.item; import java.awt.Color; import java.lang.reflect.Field; import org.lwjgl.opengl.GL11; import tb.common.item.ItemCastingBracelet; import thaumcraft.client.lib.UtilsFX; import thaumcraft.client.renderers.block.BlockRenderer; import thaumcraft.client.renderers.models.gear.ModelWand; import thaumcraft.common.Thaumcraft; import thaumcraft.common.items.wands.ItemWandCasting; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityClientPlayerMP; import net.minecraft.client.model.ModelRenderer; import net.minecraft.client.renderer.ItemRenderer; import net.minecraft.client.renderer.OpenGlHelper; import net.minecraft.client.renderer.RenderBlocks; import net.minecraft.client.renderer.Tessellator; import net.minecraft.client.renderer.entity.Render; import net.minecraft.client.renderer.entity.RenderManager; import net.minecraft.client.renderer.entity.RenderPlayer; import net.minecraft.client.renderer.texture.TextureMap; import net.minecraft.entity.EntityLivingBase; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack; import net.minecraft.util.IIcon; import net.minecraft.util.MathHelper; import net.minecraft.util.ResourceLocation; import net.minecraft.util.Timer; import net.minecraftforge.client.IItemRenderer; import net.minecraftforge.client.model.AdvancedModelLoader; import net.minecraftforge.client.model.IModelCustom; public class CastingBraceletRenderer implements IItemRenderer{ public static final IModelCustom bracelet = AdvancedModelLoader.loadModel(new ResourceLocation("thaumicbases","models/bracelet/bracelet.obj")); public static final RenderBlocks renderBlocks = new RenderBlocks(); public static final ModelWand wand = new ModelWand(); public static ModelRenderer getWandFociModel() { try { Class<ModelWand> wandModelClass = ModelWand.class; Field foci = wandModelClass.getDeclaredField("Focus");//<- If only that field had been declared as public... Oh well. boolean accessible = foci.isAccessible(); if(!accessible) foci.setAccessible(true); ModelRenderer model = ModelRenderer.class.cast(foci.get(wand)); if(!accessible) foci.setAccessible(false); return model; } catch(Exception e) { return null; } } @Override public boolean handleRenderType(ItemStack item, ItemRenderType type) { return true; } @Override public boolean shouldUseRenderHelper(ItemRenderType type, ItemStack item,ItemRendererHelper helper) { if (helper == IItemRenderer.ItemRendererHelper.BLOCK_3D) return false; return true; } public static Timer getMCTimer() { try { Class<Minecraft> mcClass = Minecraft.class; Field timerField = mcClass.getDeclaredFields()[16]; if(!timerField.isAccessible()) timerField.setAccessible(true); return Timer.class.cast(timerField.get(Minecraft.getMinecraft())); } catch(Exception e) { return null; } } @Override public void renderItem(ItemRenderType type, ItemStack item, Object... data) { EntityClientPlayerMP entityclientplayermp = Minecraft.getMinecraft().thePlayer; ItemWandCasting wand = (ItemWandCasting)item.getItem(); ItemStack focusStack = wand.getFocusItem(item); //float renderPartialTicks = getMCTimer().renderPartialTicks; if(type == ItemRenderType.EQUIPPED_FIRST_PERSON) { GL11.glPushMatrix(); Minecraft.getMinecraft().getTextureManager().bindTexture(entityclientplayermp.getLocationSkin()); GL11.glTranslated(0.5D, 1, 1); GL11.glRotated(90, 0, 0, 1); GL11.glRotated(220, 1, 0, 0); if(entityclientplayermp.isUsingItem()) { GL11.glTranslated(-0.5D, 0D, 0); GL11.glRotated(-45, 0, 0, 1); } Render render = RenderManager.instance.getEntityRenderObject(Minecraft.getMinecraft().thePlayer); RenderPlayer renderplayer = (RenderPlayer)render; float f10 = 3.0F; GL11.glScalef(f10, f10, f10); renderplayer.renderFirstPersonArm(Minecraft.getMinecraft().thePlayer); GL11.glPopMatrix(); GL11.glRotated(-45, 0, 1, 0); GL11.glTranslated(1.2D, 0.1D, -1); GL11.glScaled(0.6D, 0.6D, 0.6D); if(entityclientplayermp.isUsingItem()) { GL11.glTranslated(0D, 1D, 0); GL11.glRotated(35, 1, 0, 0); GL11.glTranslated(0D, 0D, -0.7D); GL11.glScaled(0.8D, 0.8D, 0.8D); } } if(type == ItemRenderType.INVENTORY) { GL11.glScaled(0.5D, 0.5D, 0.5D); } if(type == ItemRenderType.EQUIPPED) { GL11.glScaled(0.25D, 0.25D, 0.25D); GL11.glRotated(-45, 0, 1, 0); GL11.glTranslated(2.85D, 2.9D, 1.3D); EntityLivingBase wielder = (EntityLivingBase) data[1]; if(wielder.isInvisible()) return; } if(type == ItemRenderType.ENTITY) GL11.glScaled(0.5D, 0.5D, 0.5D); double ds = 0.2D; GL11.glScaled(ds, ds, ds); Minecraft.getMinecraft().renderEngine.bindTexture(ItemCastingBracelet.braceletTextures[Math.min(ItemCastingBracelet.braceletTextures.length-1, item.getItemDamage())]); GL11.glPushMatrix(); if(wand.getRod(item) != null && wand.getRod(item).isGlowing()) { int j = (int)(200.0F + MathHelper.sin(entityclientplayermp.ticksExisted) * 5.0F + 5.0F); int k = j % 65536; int l = j / 65536; OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, k / 1.0F, l / 1.0F); } bracelet.renderAll(); GL11.glPopMatrix(); if(focusStack != null) { if (wand.getFocus(item).getOrnament(focusStack) != null) { GL11.glPushMatrix(); GL11.glScaled(5, 5, 5); GL11.glTranslated(0, 0.7D, 0); Tessellator tessellator = Tessellator.instance; IIcon icon = wand.getFocus(item).getOrnament(focusStack); float minU = icon.getMinU(); float minV = icon.getMinV(); float maxU = icon.getMaxU(); float maxV = icon.getMaxV(); Minecraft.getMinecraft().renderEngine.bindTexture(TextureMap.locationItemsTexture); GL11.glPushMatrix(); GL11.glTranslatef(-0.25F, -0.1F, 0.0275F); GL11.glScaled(0.5D, 0.5D, 0.5D); ItemRenderer.renderItemIn2D(tessellator, maxU, minV, minU, maxV, icon.getIconWidth(), icon.getIconHeight(), 0.1F); GL11.glPopMatrix(); GL11.glPushMatrix(); GL11.glRotatef(90.0F, 0.0F, 1.0F, 0.0F); GL11.glTranslatef(-0.25F, -0.1F, 0.0275F); GL11.glScaled(0.5D, 0.5D, 0.5D); ItemRenderer.renderItemIn2D(tessellator, maxU, minV, minU, maxV, icon.getIconWidth(), icon.getIconHeight(), 0.1F); GL11.glPopMatrix(); GL11.glPopMatrix(); } float alpha = 0.95F; if (wand.getFocus(item).getFocusDepthLayerIcon(focusStack) != null) { GL11.glPushMatrix(); GL11.glScaled(5, 5, 5); GL11.glTranslated(0, 0.87D, 0); GL11.glTranslatef(0.0F, -0.15F, 0.0F); GL11.glScaled(0.165D, 0.1765D, 0.165D); //<- using the same numbers as Azanor, since the rendering should be identical to his. Minecraft.getMinecraft().renderEngine.bindTexture(TextureMap.locationItemsTexture); renderBlocks.setRenderBoundsFromBlock(Blocks.glass); BlockRenderer.drawFaces(renderBlocks, null, wand.getFocus(item).getFocusDepthLayerIcon(focusStack), false); alpha = 0.6F; GL11.glPopMatrix(); } if (Thaumcraft.isHalloween) UtilsFX.bindTexture("textures/models/spec_h.png"); else UtilsFX.bindTexture("textures/models/wand.png"); GL11.glPushMatrix(); GL11.glDisable(GL11.GL_ALPHA_TEST); GL11.glEnable(GL11.GL_BLEND); GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GL11.glScaled(5, 5, 5); GL11.glTranslated(0, 0.87D, 0); GL11.glTranslatef(0.0F, -0.0475F, 0.0F); GL11.glScaled(0.525D, 0.5525D, 0.525D); Color c = new Color(wand.getFocus(item).getFocusColor(focusStack)); GL11.glColor4f(c.getRed() / 255.0F, c.getGreen() / 255.0F, c.getBlue() / 255.0F, alpha); int j = (int)(195.0F + MathHelper.sin(entityclientplayermp.ticksExisted / 3.0F) * 10.0F + 10.0F); int k = j % 65536; int l = j / 65536; OpenGlHelper.setLightmapTextureCoords(OpenGlHelper.lightmapTexUnit, k / 1.0F, l / 1.0F); getWandFociModel().render(0.0625F); GL11.glDisable(GL11.GL_BLEND); GL11.glEnable(GL11.GL_ALPHA_TEST); GL11.glPopMatrix(); } } }
package nu.validator.servlet; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.Writer; import java.net.MalformedURLException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import nu.validator.gnu.xml.aelfred2.SAXDriver; import nu.validator.htmlparser.common.Heuristics; import nu.validator.htmlparser.common.XmlViolationPolicy; import nu.validator.io.BoundedInputStream; import nu.validator.io.StreamBoundException; import nu.validator.xml.ContentTypeParser; import nu.validator.xml.NullEntityResolver; import nu.validator.xml.PrudentHttpEntityResolver; import nu.validator.xml.TypedInputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import com.hp.hpl.jena.iri.IRIException; import com.hp.hpl.jena.iri.IRIFactory; public class ParseTreePrinter { private static final String FORM_HTML = "<!DOCTYPE html><title>Parse Tree Dump</title><form><p><input type='url' name='doc' id='doc' pattern='(?:https?: private static final long SIZE_LIMIT = Integer.parseInt(System.getProperty( "nu.validator.servlet.max-file-size", "2097152")); private final HttpServletRequest request; private final HttpServletResponse response; /** * @param request * @param response */ public ParseTreePrinter(final HttpServletRequest request, final HttpServletResponse response) { this.request = request; this.response = response; } private String scrubUrl(String urlStr) { if (urlStr == null) { return null; } try { return IRIFactory.iriImplementation().construct(urlStr).toASCIIString(); } catch (IRIException e) { return null; } catch (MalformedURLException e) { return null; } } public void service() throws IOException { request.setCharacterEncoding("utf-8"); String content = null; String document = scrubUrl(request.getParameter("doc")); document = ("".equals(document)) ? null : document; Writer writer = new OutputStreamWriter(response.getOutputStream(), "UTF-8"); if (document == null && methodIsGet() && (content = request.getParameter("content")) == null) { response.setContentType("text/html; charset=utf-8"); writer.write(FORM_HTML); writer.flush(); writer.close(); return; } else { response.setContentType("text/plain; charset=utf-8"); try { PrudentHttpEntityResolver entityResolver = new PrudentHttpEntityResolver( 2048 * 1024, false, null); entityResolver.setAllowGenericXml(false); entityResolver.setAcceptAllKnownXmlTypes(false); entityResolver.setAllowHtml(true); entityResolver.setAllowXhtml(true); TypedInputSource documentInput; if (methodIsGet()) { if (content == null) { documentInput = (TypedInputSource) entityResolver.resolveEntity( null, document); } else { documentInput = new TypedInputSource(new StringReader(content)); if ("xml".equals(request.getParameter("parser"))) { documentInput.setType("application/xhtml+xml"); } else { documentInput.setType("text/html"); } } } else { // POST String postContentType = request.getContentType(); if (postContentType == null) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Content-Type missing"); return; } else if (postContentType.trim().toLowerCase().startsWith( "application/x-www-form-urlencoded")) { response.sendError( HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, "application/x-www-form-urlencoded not supported. Please use multipart/form-data."); return; } long len = request.getContentLength(); if (len > SIZE_LIMIT) { throw new StreamBoundException("Resource size exceeds limit."); } ContentTypeParser contentTypeParser = new ContentTypeParser(null, false); contentTypeParser.setAllowGenericXml(false); contentTypeParser.setAcceptAllKnownXmlTypes(false); contentTypeParser.setAllowHtml(true); contentTypeParser.setAllowXhtml(true); documentInput = contentTypeParser.buildTypedInputSource(document, null, postContentType); documentInput.setByteStream(len < 0 ? new BoundedInputStream( request.getInputStream(), SIZE_LIMIT, document) : request.getInputStream()); documentInput.setSystemId(request.getHeader("Content-Location")); } String type = documentInput.getType(); XMLReader parser; if ("text/html".equals(type)) { writer.write("HTML parser\n\n#document\n"); parser = new nu.validator.htmlparser.sax.HtmlParser(); parser.setProperty("http://validator.nu/properties/heuristics", Heuristics.ALL); parser.setProperty("http://validator.nu/properties/xml-policy", XmlViolationPolicy.ALLOW); } else if ("application/xhtml+xml".equals(type)) { writer.write("XML parser\n\n#document\n"); parser = new SAXDriver(); parser.setFeature( "http://xml.org/sax/features/external-general-entities", false); parser.setFeature( "http://xml.org/sax/features/external-parameter-entities", false); parser.setEntityResolver(new NullEntityResolver()); } else { writer.write("Unsupported content type.\n"); writer.flush(); writer.close(); return; } TreeDumpContentHandler treeDumpContentHandler = new TreeDumpContentHandler(writer, false); ListErrorHandler listErrorHandler = new ListErrorHandler(); parser.setContentHandler(treeDumpContentHandler); parser.setProperty("http://xml.org/sax/properties/lexical-handler", treeDumpContentHandler); parser.setErrorHandler(listErrorHandler); parser.parse(documentInput); writer.write("#errors\n"); for (String err : listErrorHandler.getErrors()) { writer.write(err); writer.write('\n'); } } catch (SAXException e) { writer.write("SAXException:\n"); writer.write(e.getMessage()); writer.write("\n"); } catch (IOException e) { writer.write("IOException:\n"); writer.write(e.getMessage()); writer.write("\n"); } finally { writer.flush(); writer.close(); } } } private boolean methodIsGet() { return "GET".equals(request.getMethod()) || "HEAD".equals(request.getMethod()); } }
package opendap.threddsHandler; import net.sf.saxon.s9api.SaxonApiException; import opendap.PathBuilder; import opendap.bes.BESError; import opendap.bes.BadConfigurationException; import opendap.bes.dap2Responders.BesApi; import opendap.coreServlet.RequestCache; import opendap.coreServlet.Scrub; import opendap.namespaces.THREDDS; import opendap.ncml.NcmlManager; import opendap.ppt.PPTException; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.filter.ElementFilter; import org.jdom.output.Format; import org.jdom.output.XMLOutputter; import org.slf4j.Logger; import java.io.IOException; import java.util.Iterator; import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantLock; public class CatalogManager { private static Logger _log; private static String _contentPath; private static String _catalogIngestTransformFilename; private static String _besCatalogToThreddsCatalogTransformFilename; private static BesApi _besApi; private static ConcurrentHashMap<String, DatasetScan> _datasetScans = new ConcurrentHashMap<>(); private static ConcurrentHashMap<String, Catalog> _catalogs = new ConcurrentHashMap<>(); private static ConcurrentHashMap<String, String[]> _children = new ConcurrentHashMap<>(); private static ReentrantLock _catalogLock = new ReentrantLock(); private static boolean _isInitialized = false; public static final String DEFAULT_CATALOG_NAME = "catalog.xml"; public static void init(String contentPath, String catalogIngestTransformFilename, String besCatalogToThreddsCatalogTransformFilename, BesApi besApi) { _log = org.slf4j.LoggerFactory.getLogger(CatalogManager.class); _log.debug("Configuring..."); if (_isInitialized) { _log.error(" Configuration has already been done. isInitialized(): " + _isInitialized); return; } _contentPath = contentPath; _catalogIngestTransformFilename = catalogIngestTransformFilename; _besCatalogToThreddsCatalogTransformFilename = besCatalogToThreddsCatalogTransformFilename; _besApi = besApi; _isInitialized = true; } public static void addCatalog(String pathPrefix, String urlPrefix, String fname, boolean cacheCatalogFileContent) throws Exception { LocalFileCatalog catalog = null; try { catalog = new LocalFileCatalog(pathPrefix, urlPrefix, fname, _catalogIngestTransformFilename, cacheCatalogFileContent); } catch (Exception e){ _log.error("Failed to build catalog from file: "+fname); } if(catalog !=null){ try{ _catalogLock.lock(); _log.debug("addCatalog(): Catalog locked."); addCatalog(catalog, cacheCatalogFileContent); } finally { _catalogLock.unlock(); _log.debug("addCatalog(): Catalog unlocked."); } } } private static void addCatalog(Catalog catalog, boolean cacheCatalogFileContent) throws Exception { String catalogKey = catalog.getCatalogKey(); // If this catalog has already been added, then don't mess with it. if (_catalogs.containsKey(catalogKey)){ _log.warn("The catalog '"+catalogKey+"' is already in the collection. It must be removed (purgeCatalog()) " + "before it can be added again."); return; } if (_children.containsKey(catalogKey)) { String msg = "addCatalog() Invalid State! Although the list of catalogs does not contain a " + "reference to the catalog '" + catalogKey + "' the list of children does!!!"; _log.error(msg); throw new Exception(msg); } Document catDoc = catalog.getRawCatalogDocument(); if(catDoc==null){ String msg = "FAILED to get catalog Document object for the catalog associated with file "+catalog.getFileName()+"'"; _log.error("addCatalog() - {}", msg); throw new BadConfigurationException(msg); } _log.debug("addCatalog() - Loaded Catalog document: \n{}",new XMLOutputter(Format.getPrettyFormat()).outputString(catDoc)); Element catRef; String href, catFname, thisUrlPrefix, thisPathPrefix; // Get all of the catalogRef elements in the catalog document. Iterator i = catDoc.getRootElement().getDescendants(new ElementFilter(THREDDS.CATALOG_REF, opendap.namespaces.THREDDS.NS)); Vector<String> catalogChildren = new Vector<String>(); while (i.hasNext()) { // For each one of them... catRef = (Element) i.next(); // get the URL of the catalog. href = catRef.getAttributeValue(XLINK.HREF, XLINK.NS); if (href.startsWith("http: _log.info("Found catalogRef that references an external " + "catalog: '"+href+"' Target catalog not processed. The catalogRef element " + "will remain in the catalog and will not be cached."); // @todo Add remote catalog caching support? } else if (href.startsWith("/")) { _log.info("Found thredds:catalogRef whose xlink:href attribute " + "begins with a \"/\" character: '" + href +"' "+ "This may mean that the catalog is pointing " + "to another catalog service. Also, it is not an href " + "expressed in terms of the relative content path. " + "Target catalog not processed as a file. " + "The catalogRef element " + "will remain in the catalog. This will allow it to " + "appear correctly in thredds catalog output. But it's contents " + "will not be cached."); // @todo Add support for catalog caching within the local server? Mabye not. } else { // Since it's not a remote catalog, or an absolute path (starting with '/') then // we will conclude that it is a static THREDDS catalog file. Let's slurp it up into // a LocalFileCatalog object. thisUrlPrefix = catalog.getUrlPrefix() + href.substring(0, href.length() - Util.basename(href).length()); thisPathPrefix = catalog.getPathPrefix() + href; catFname = Util.basename(thisPathPrefix); thisPathPrefix = thisPathPrefix.substring(0, thisPathPrefix.lastIndexOf(catFname)); LocalFileCatalog thisCatalog = null; try { thisCatalog = new LocalFileCatalog(thisPathPrefix, thisUrlPrefix, catFname, _catalogIngestTransformFilename, cacheCatalogFileContent); addCatalog(thisCatalog, cacheCatalogFileContent); String thisCatalogIndex = thisCatalog.getCatalogKey(); catalogChildren.add(thisCatalogIndex); } catch( Exception e){ _log.error("addCatalog() - Failed to build catalog. Dropping. File: "+thisPathPrefix + catFname+ " Msg: "+e.getMessage()); } } } if (!catalogChildren.isEmpty()) { String[] s = new String[catalogChildren.size()]; _children.put(catalogKey, catalogChildren.toArray(s)); } Document rawCatalog = catalog.getRawCatalogDocument(); // Get all of the datasetScan elements in the catalog document. i = rawCatalog.getRootElement().getDescendants(new ElementFilter(THREDDS.DATASET_SCAN, THREDDS.NS)); while (i.hasNext()) { // For each one of them... Element dssElem = (Element) i.next(); addDatasetScan(catalog,dssElem); } _log.debug("Ingesting inherited metadata (if any) for catalog '"+catalog.getName()+"'"); InheritedMetadataManager.ingestInheritedMetadata(catalog); _log.debug("Ingesting NcML datasets (if any) for catalog '"+catalog.getName()+"'"); NcmlManager.ingestNcml(catalog); _catalogs.put(catalogKey, catalog); } private static void addDatasetScan(Catalog catalog,Element dssElem) throws BadConfigurationException{ DatasetScan ds = new DatasetScan(catalog, dssElem, _besCatalogToThreddsCatalogTransformFilename, _besApi); PathBuilder pb = new PathBuilder(); pb.append(catalog.getPathPrefix()).append(ds.getPath()); _datasetScans.put(pb.toString(),ds); } public static Catalog getCatalog(String catalogKey) throws JDOMException, BadConfigurationException, PPTException, IOException, SaxonApiException, BESError { Catalog cat = getCatalogAndUpdateIfRequired(catalogKey); if(cat == null){ Catalog datasetScanCatalog = (Catalog) RequestCache.get(catalogKey); if(datasetScanCatalog == null ){ DatasetScan datasetScan = null; for(DatasetScan ds : _datasetScans.values()) { if(ds.matches(catalogKey)){ _log.info("Found DatasetScan matching catalogKey '{}' datasetScan: \n'{}'",catalogKey,ds); if(datasetScan==null){ datasetScan = ds; } } } if(datasetScan==null) return null; datasetScanCatalog = datasetScan.getCatalog(catalogKey); if(datasetScanCatalog==null){ String msg = "FAILED to retrieve the catalog identified by the key '{}' from the DatasetScan instance "+datasetScan.getName(); _log.error("getCatalog() - {}",msg); throw new BadConfigurationException(msg); } RequestCache.put(catalogKey,datasetScanCatalog); } return datasetScanCatalog; } return cat; } public static long getLastModified(String catalogKey) throws JDOMException, BadConfigurationException, PPTException, IOException { Catalog cat; try { cat = getCatalog(catalogKey); if (cat != null) return cat.getLastModified(); } catch(Exception e){ _log.info("No such catalog: {}",catalogKey); } return -1; } /** * * @param catalogKey Is the catalogKeyIntoThe * @return */ private static Catalog getCatalogAndUpdateIfRequired(String catalogKey) { if (catalogKey == null) return null; try { _catalogLock.lock(); _log.debug("getCatalogAndUpdateIfRequired(): Catalog locked."); Catalog c = _catalogs.get(catalogKey); if (c == null) return null; if (c.needsRefresh()) { _log.debug("getCatalogAndUpdateIfRequired(): Catalog '" + catalogKey + "' needs to be updated."); LocalFileCatalog newCat; try { newCat = new LocalFileCatalog(c.getPathPrefix(), c.getUrlPrefix(), c.getFileName(), c.getIngestTransformFilename(), c.usesMemoryCache()); //Thread.sleep(10000); _log.debug("getCatalogAndUpdateIfRequired(): Purging catalog '" + catalogKey + "' and it's children from catalog collection."); purgeCatalog(catalogKey); _log.debug("getCatalogAndUpdateIfRequired(): Adding new catalog for catalogKey " + newCat.getCatalogKey() + " to _catalogs collection."); addCatalog(newCat, newCat.usesMemoryCache()); return newCat; } catch (Exception e) { _log.error("getCatalogAndUpdateIfRequired(): Could not update Catalog: " + c.getName()+ "Msg: "+e.getMessage()); return null; } } else { _log.debug("getCatalogAndUpdateIfRequired(): Catalog '" + catalogKey + "' does NOT need updated."); return c; } } finally { _catalogLock.unlock(); _log.debug("getCatalogAndUpdateIfRequired(): Catalog unlocked."); } } /** * Purges the THREDDS catalog connected graph from the system, starting at the catalog associated with the * passed catalogKey. * * @param catalogKey */ private static void purgeCatalog(String catalogKey) { Catalog catalog; String childCatalogKeys[]; if (catalogKey != null) { _log.debug("purgeCatalog(): Removing catalog: " + catalogKey); catalog = _catalogs.remove(catalogKey); if (catalog == null) { _log.warn("purgeCatalog(): Catalog '" + Scrub.urlContent(catalogKey) + "' not in catalog collection!!"); } childCatalogKeys = _children.get(catalogKey); if (childCatalogKeys != null) { _log.debug("purgeCatalog(): Purging the childCatalogKeys of catalog: " + catalogKey); for (String childCatalogKey : childCatalogKeys) { purgeCatalog(childCatalogKey); } _children.remove(catalogKey); } else { _log.info("purgeCatalog(): Catalog '" + Scrub.urlContent(catalogKey) + "' has no childCatalogKeys."); } _log.debug("purgeCatalog(): Purging inherited metadata (if any) for catalogKey: " +catalogKey); InheritedMetadataManager.purgeInheritedMetadata(catalogKey); _log.debug("purgeCatalog(): Purging NcML data sets (if any) for catalogKey: " +catalogKey); NcmlManager.purgeNcmlDatasets(catalog); // catalog.destroy(); _log.debug("purgeCatalog(): Purged catalog: " + catalogKey); } } public static void destroy() { for (Catalog c : _catalogs.values()) { c.destroy(); } _catalogs.clear(); _children.clear(); _log.debug("Destroyed"); } public String toString() { StringBuilder s = new StringBuilder("THREDDS Catalog Manager:\n"); s.append(" ContentPath: " + _contentPath + "\n"); for (Catalog c : _catalogs.values()) { s.append(" Catalog Name: ").append(c.getName()).append("\n"); s.append(" file: ").append(c.getFileName()).append("\n"); s.append(" pathPrefix: ").append(c.getPathPrefix()).append("\n"); s.append(" urlPrefix: ").append(c.getUrlPrefix()).append("\n"); } return s.toString(); } //private static Vector<Catalog> rootCatalogs = new Vector<Catalog>(); //private static Document config; /* public static void addRootCatalog(String pathPrefix,String urlPrefix, String fname, boolean cacheCatalogFileContent) throws Exception { Catalog catalog = new Catalog(pathPrefix,urlPrefix,fname,cacheCatalogFileContent); ReentrantReadWriteLock.WriteLock lock = _catalogsLock.writeLock(); try { lock.lock(); rootCatalogs.add(catalog); addCatalog(catalog,cacheCatalogFileContent); } finally { lock.unlock(); } } */ /* public static Document getTopLevelCatalogDocument() { Element catRef, catalogRoot; String href, title, name; Document catalog = new Document(new Element(THREDDS.CATALOG)); catalogRoot = catalog.getRootElement(); catalogRoot.setNamespace(Namespace.getNamespace(THREDDS.BES_NAMESPACE_STRING)); catalogRoot.addNamespaceDeclaration(XLINK.NS); catalogRoot.setAttribute(THREDDS.NAME, "HyraxThreddsHandler"); // We only need a read lock here because we are NOT going to reread // our configuration. So - All of these top level _catalogs can't change. // Their content can change, but we can't add or remove from the list. // If one of them has a change, then that will get loaded when the // changed catalog gets accessed. ReentrantReadWriteLock.ReadLock lock = _catalogsLock.readLock(); try { lock.lock(); for (Catalog cat : rootCatalogs) { catRef = new Element(THREDDS.CATALOG_REF,THREDDS.NS); href = cat.getUrlPrefix() + cat.getFileName(); catRef.setAttribute(XLINK.HREF,href,XLINK.NS); title = cat.getName(); catRef.setAttribute(XLINK.TITLE,title,XLINK.NS); name = cat.getName(); catRef.setAttribute(THREDDS.NAME,name); catalogRoot.addContent(catRef); } } finally { lock.unlock(); } return catalog; } */ /* public static XdmNode getTopLevelCatalogAsXdmNode(Processor proc) throws IOException, SaxonApiException { XdmNode source; InputStream is; Document tlcat = getTopLevelCatalogDocument(); XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat()); byte[] buffer = xmlo.outputString(tlcat).getBytes(); is = new ByteArrayInputStream(buffer); _log.debug("getCatalogDocument(): Reading catalog from memory cache."); source = proc.newDocumentBuilder().build(new StreamSource(is)); return source; } */ }
package jolie.net; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.nio.channels.SelectableChannel; import jolie.Interpreter; import jolie.net.protocols.CommProtocol; import jolie.runtime.TimeoutHandler; /** * This class implements the support for a selectable channel. * A channel implementation based on this class must provide * methods for accessing its receiving <code>InputStream</code> and * <code>SelectableChannel</code>. * @author Fabrizio Montesi */ public abstract class SelectableStreamingCommChannel extends StreamingCommChannel { private TimeoutHandler selectionTimeoutHandler = null; public SelectableStreamingCommChannel( URI location, CommProtocol protocol ) { super( location, protocol ); } protected void setSelectionTimeoutHandler( TimeoutHandler selectionTimeoutHandler ) { this.selectionTimeoutHandler = selectionTimeoutHandler; } protected TimeoutHandler selectionTimeoutHandler() { return selectionTimeoutHandler; } /** * Returns the receiving <code>InputStream</code> of this channel. * @return the receiving <code>InputStream</code> of this channel */ abstract public InputStream inputStream(); /** * Returns the receiving <code>SelectableChannel</code> of this channel. * @return the receiving <code>SelectableChannel</code> of this channel */ abstract public SelectableChannel selectableChannel(); @Override public final void send( CommMessage message ) throws IOException { if ( lock.isHeldByCurrentThread() ) { _send( message ); } else { lock.lock(); try { _send( message ); } finally { lock.unlock(); } } } private void _send( CommMessage message ) throws IOException { final CommCore commCore = Interpreter.getInstance().commCore(); if ( commCore.isSelecting( this ) ) { commCore.unregisterForSelection( this ); sendImpl( message ); commCore.registerForSelection( this ); } else { sendImpl( message ); } } private void _disposeForInputImpl() throws IOException { Interpreter.getInstance().commCore().registerForSelection( this ); } @Override protected void disposeForInputImpl() throws IOException { if ( lock.isHeldByCurrentThread() ) { _disposeForInputImpl(); } else { lock.lock(); try { _disposeForInputImpl(); } finally { lock.unlock(); } } } @Override protected void releaseImpl() throws IOException { if ( lock.isHeldByCurrentThread() ) { final CommCore commCore = Interpreter.getInstance().commCore(); if ( commCore.isSelecting( this ) == false ) { super.releaseImpl(); } } else { lock.lock(); try { final CommCore commCore = Interpreter.getInstance().commCore(); if ( commCore.isSelecting( this ) == false ) { super.releaseImpl(); } } finally { lock.unlock(); } } } }
package <%=packageName%>.service; <% if (databaseType == 'sql' || databaseType == 'mongodb') { %> import <%=packageName%>.domain.Authority;<% } %> import <%=packageName%>.domain.User;<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> import <%=packageName%>.repository.AuthorityRepository;<% if (authenticationType == 'session') { %> import <%=packageName%>.repository.PersistentTokenRepository;<% } %><% } %> import <%=packageName%>.repository.UserRepository;<% if (searchEngine == 'elasticsearch') { %> import <%=packageName%>.repository.search.UserSearchRepository;<% } %> import <%=packageName%>.security.AuthoritiesConstants; import <%=packageName%>.security.SecurityUtils; import <%=packageName%>.service.util.RandomUtil; import <%=packageName%>.web.rest.vm.ManagedUserVM; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.security.crypto.password.PasswordEncoder; <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> import org.springframework.security.oauth2.provider.token.store.JdbcTokenStore; <%_ } _%> import org.springframework.stereotype.Service;<% if (databaseType == 'sql') { %> import org.springframework.transaction.annotation.Transactional;<% } %> <%_ if ((databaseType == 'sql' || databaseType == 'mongodb') && authenticationType == 'session') { _%> import java.time.LocalDate; <%_ } _%> import java.time.ZonedDateTime; import javax.inject.Inject; import java.util.*; /** * Service class for managing users. */ @Service<% if (databaseType == 'sql') { %> @Transactional<% } %> public class UserService { private final Logger log = LoggerFactory.getLogger(UserService.class); <%_ if (enableSocialSignIn) { _%> @Inject private SocialService socialService; <%_ } _%> @Inject private PasswordEncoder passwordEncoder; <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> @Inject public JdbcTokenStore jdbcTokenStore; <%_ } _%> @Inject private UserRepository userRepository;<% if (searchEngine == 'elasticsearch') { %> @Inject private UserSearchRepository userSearchRepository;<% } %><% if (databaseType == 'sql' || databaseType == 'mongodb') { %><% if (authenticationType == 'session') { %> @Inject private PersistentTokenRepository persistentTokenRepository;<% } %><% } %> <% if (databaseType == 'sql' || databaseType == 'mongodb') { %> @Inject private AuthorityRepository authorityRepository;<% } %> public Optional<User> activateRegistration(String key) { log.debug("Activating user for activation key {}", key); return userRepository.findOneByActivationKey(key) .map(user -> { // activate given user for the registration key. user.setActivated(true); user.setActivationKey(null); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine == 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> log.debug("Activated user: {}", user); return user; }); } public Optional<User> completePasswordReset(String newPassword, String key) { log.debug("Reset user password for reset key {}", key); return userRepository.findOneByResetKey(key) .filter(user -> { ZonedDateTime oneDayAgo = ZonedDateTime.now().minusHours(24); return user.getResetDate()<% if (databaseType == 'sql' || databaseType == 'mongodb') { %>.isAfter(oneDayAgo);<% } %><% if (databaseType == 'cassandra') { %>.after(Date.from(oneDayAgo.toInstant()));<% } %> }) .map(user -> { user.setPassword(passwordEncoder.encode(newPassword)); user.setResetKey(null); user.setResetDate(null); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> return user; }); } public Optional<User> requestPasswordReset(String mail) { return userRepository.findOneByEmail(mail) .filter(User::getActivated) .map(user -> { user.setResetKey(RandomUtil.generateResetKey()); user.<% if (databaseType == 'sql' || databaseType == 'mongodb') { %>setResetDate(ZonedDateTime.now());<% } %><% if (databaseType == 'cassandra') { %>setResetDate(new Date());<% } %> <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> return user; }); } public User createUser(String login, String password, String firstName, String lastName, String email, String langKey) { User newUser = new User();<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER); Set<Authority> authorities = new HashSet<>();<% } %><% if (databaseType == 'cassandra') { %> newUser.setId(UUID.randomUUID().toString()); Set<String> authorities = new HashSet<>();<% } %> String encryptedPassword = passwordEncoder.encode(password); newUser.setLogin(login); // new user gets initially a generated password newUser.setPassword(encryptedPassword); newUser.setFirstName(firstName); newUser.setLastName(lastName); newUser.setEmail(email); newUser.setLangKey(langKey); // new user is not active newUser.setActivated(false); // new user gets registration key newUser.setActivationKey(RandomUtil.generateActivationKey());<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> authorities.add(authority);<% } %><% if (databaseType == 'cassandra') { %> authorities.add(AuthoritiesConstants.USER);<% } %> newUser.setAuthorities(authorities); userRepository.save(newUser);<% if (searchEngine == 'elasticsearch') { %> userSearchRepository.save(newUser);<% } %> log.debug("Created Information for User: {}", newUser); return newUser; } public User createUser(ManagedUserVM managedUserVM) { User user = new User();<% if (databaseType == 'cassandra') { %> user.setId(UUID.randomUUID().toString());<% } %> user.setLogin(managedUserVM.getLogin()); user.setFirstName(managedUserVM.getFirstName()); user.setLastName(managedUserVM.getLastName()); user.setEmail(managedUserVM.getEmail()); if (managedUserVM.getLangKey() == null) { user.setLangKey("<%= nativeLanguage %>"); // default language } else { user.setLangKey(managedUserVM.getLangKey()); }<% if (databaseType == 'sql' || databaseType == 'mongodb') { %> if (managedUserVM.getAuthorities() != null) { Set<Authority> authorities = new HashSet<>(); managedUserVM.getAuthorities().stream().forEach( authority -> authorities.add(authorityRepository.findOne(authority)) ); user.setAuthorities(authorities); }<% } %><% if (databaseType == 'cassandra') { %> user.setAuthorities(managedUserVM.getAuthorities());<% } %> String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword()); user.setPassword(encryptedPassword); user.setResetKey(RandomUtil.generateResetKey()); user.<% if (databaseType == 'sql' || databaseType == 'mongodb') { %>setResetDate(ZonedDateTime.now());<% } %><% if (databaseType == 'cassandra') { %>setResetDate(new Date());<% } %> user.setActivated(true); userRepository.save(user);<% if (searchEngine == 'elasticsearch') { %> userSearchRepository.save(user);<% } %> log.debug("Created Information for User: {}", user); return user; } public void updateUser(String firstName, String lastName, String email, String langKey) { userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> { user.setFirstName(firstName); user.setLastName(lastName); user.setEmail(email); user.setLangKey(langKey); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> <%_ if (searchEngine == 'elasticsearch') { _%> userSearchRepository.save(user); <%_ } _%> log.debug("Changed Information for User: {}", user); }); } public void updateUser(<% if (databaseType == 'mongodb' || databaseType == 'cassandra') { %>String<% } else { %>Long<% } %> id, String login, String firstName, String lastName, String email, boolean activated, String langKey, Set<String> authorities) { Optional.of(userRepository .findOne(id)) .ifPresent(user -> { user.setLogin(login); user.setFirstName(firstName); user.setLastName(lastName); user.setEmail(email); user.setActivated(activated); user.setLangKey(langKey); <%_ if (databaseType == 'sql' || databaseType == 'mongodb') { _%> Set<Authority> managedAuthorities = user.getAuthorities(); managedAuthorities.clear(); authorities.stream().forEach( authority -> managedAuthorities.add(authorityRepository.findOne(authority)) ); <%_ } else { // Cassandra _%> user.setAuthorities(authorities); <%_ } _%> <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> log.debug("Changed Information for User: {}", user); }); } public void deleteUser(String login) { <%_ if (databaseType == 'sql' && authenticationType == 'oauth2') { _%> jdbcTokenStore.findTokensByUserName(login).stream().forEach(token -> jdbcTokenStore.removeAccessToken(token)); <%_ } _%> userRepository.findOneByLogin(login).ifPresent(user -> { <%_ if (enableSocialSignIn) { _%> socialService.deleteUserSocialConnection(user.getLogin()); <%_ } _%> userRepository.delete(user); <%_ if (searchEngine == 'elasticsearch') { _%> userSearchRepository.delete(user); <%_ } _%> log.debug("Deleted User: {}", user); }); } public void changePassword(String password) { userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> { String encryptedPassword = passwordEncoder.encode(password); user.setPassword(encryptedPassword); <%_ if (databaseType == 'mongodb' || databaseType == 'cassandra') { _%> userRepository.save(user); <%_ } _%> log.debug("Changed password for User: {}", user); }); } <%_ if (databaseType == 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public Optional<User> getUserWithAuthoritiesByLogin(String login) { <%_ if (databaseType == 'sql') { _%> return userRepository.findOneByLogin(login).map(user -> { user.getAuthorities().size(); return user; }); <%_ } else { // MongoDB and Cassandra _%> return userRepository.findOneByLogin(login); <%_ } _%> } <%_ if (databaseType == 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public User getUserWithAuthorities(<%= pkType %> id) { User user = userRepository.findOne(id); <%_ if (databaseType == 'sql') { _%> user.getAuthorities().size(); // eagerly load the association <%_ } _%> return user; } <%_ if (databaseType == 'sql') { _%> @Transactional(readOnly = true) <%_ } _%> public User getUserWithAuthorities() { Optional<User> optionalUser = userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()); User user = null; if (optionalUser.isPresent()) { user = optionalUser.get(); <%_ if (databaseType == 'sql') { _%> user.getAuthorities().size(); // eagerly load the association <%_ } _%> } return user; } <%_ if ((databaseType == 'sql' || databaseType == 'mongodb') && authenticationType == 'session') { _%> /** * Persistent Token are used for providing automatic authentication, they should be automatically deleted after * 30 days. * <p> * This is scheduled to get fired everyday, at midnight. * </p> */ @Scheduled(cron = "0 0 0 * * ?") public void removeOldPersistentTokens() { LocalDate now = LocalDate.now(); persistentTokenRepository.findByTokenDateBefore(now.minusMonths(1)).stream().forEach(token -> { log.debug("Deleting token {}", token.getSeries());<% if (databaseType == 'sql') { %> User user = token.getUser(); user.getPersistentTokens().remove(token);<% } %> persistentTokenRepository.delete(token); }); }<% } %><% if (databaseType == 'sql' || databaseType == 'mongodb') { %> /** * Not activated users should be automatically deleted after 3 days. * <p> * This is scheduled to get fired everyday, at 01:00 (am). * </p> */ @Scheduled(cron = "0 0 1 * * ?") public void removeNotActivatedUsers() { ZonedDateTime now = ZonedDateTime.now(); List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(now.minusDays(3)); for (User user : users) { log.debug("Deleting not activated user {}", user.getLogin()); userRepository.delete(user);<% if (searchEngine == 'elasticsearch') { %> userSearchRepository.delete(user);<% } %> } }<% } %> }
package cz.encircled.macl; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import org.apache.maven.plugin.logging.Log; /** * @author Kisel on 22.6.2017. */ public class GitLogParser implements VCSLogParser { private final ChangelogConfiguration conf; private final static String command = "git log %s..HEAD"; public GitLogParser(ChangelogConfiguration conf) { this.conf = conf; } @Override public List<String> getNewMessages(Log log, String tagFrom) throws Exception { List<String> result = new ArrayList<>(); Process p = Runtime.getRuntime().exec(String.format(command, tagFrom)); new Thread(() -> { BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream())); result.addAll(input.lines() .map(String::trim) .filter(l -> { boolean matches = conf.applicableCommitPattern.matcher(l).matches(); if (matches) { log.info("Message [" + l + "] included"); } else { log.info("Message [" + l + "] skipped"); } return matches; }) .map(s -> String.format(conf.commitFormat, s)) .collect(Collectors.toList())); }).start(); p.waitFor(); return result; } }
package de.braintags.netrelay; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.List; import org.apache.commons.lang3.StringUtils; import de.braintags.netrelay.controller.BodyController; import de.braintags.netrelay.controller.CookieController; import de.braintags.netrelay.controller.FailureController; import de.braintags.netrelay.controller.FavIconController; import de.braintags.netrelay.controller.SessionController; import de.braintags.netrelay.controller.StaticController; import de.braintags.netrelay.controller.TimeoutController; import de.braintags.netrelay.init.MailClientSettings; import de.braintags.netrelay.init.Settings; import de.braintags.netrelay.mapping.NetRelayMapperFactory; import de.braintags.netrelay.mapping.NetRelayStoreObjectFactory; import de.braintags.netrelay.processor.ProcessorDefinition; import de.braintags.netrelay.routing.RouterDefinition; import de.braintags.netrelay.routing.RoutingInit; import de.braintags.vertx.jomnigate.IDataStore; import de.braintags.vertx.jomnigate.init.IDataStoreInit; import de.braintags.vertx.jomnigate.mapping.IMapperFactory; import de.braintags.vertx.jomnigate.mongo.init.MongoDataStoreInit; import de.braintags.vertx.util.exception.InitException; import de.braintags.vertx.util.security.JWTHandler; import de.braintags.vertx.util.security.JWTSettings; import io.vertx.core.AbstractVerticle; import io.vertx.core.AsyncResult; import io.vertx.core.Future; import io.vertx.core.Handler; import io.vertx.core.http.HttpServer; import io.vertx.core.http.HttpServerOptions; import io.vertx.core.net.PemKeyCertOptions; import io.vertx.core.net.PfxOptions; import io.vertx.core.net.SelfSignedCertificate; import io.vertx.ext.jwt.JWT; import io.vertx.ext.mail.MailClient; import io.vertx.ext.web.Router; /** * * * @author Michael Remme * */ public class NetRelay extends AbstractVerticle { private static final io.vertx.core.logging.Logger LOGGER = io.vertx.core.logging.LoggerFactory .getLogger(NetRelay.class); /** * The name of the property which is used to store the instance of NetRelay */ public static final String NETRELAY_PROPERTY = "NetRelay"; // to be able to handle multiple datastores, an IDatastoreCollection will come from pojo-mapper later private IDataStore<?, ?> datastore; private Settings settings; private Router router; private MailClient mailClient; private JWT jwt; /** * The mapper factory which translates between the browser and the server */ private NetRelayMapperFactory mapperFactory; private NetRelayStoreObjectFactory storeObjectFactory; /* * (non-Javadoc) * * @see io.vertx.core.AbstractVerticle#start() */ @Override public void start(final Future<Void> startFuture) { try { settings = initSettings(); initDataStore(dsInitResult -> { if (dsInitResult.failed()) { startFuture.fail(dsInitResult.cause()); } else { init(initResult -> { if (initResult.failed()) { startFuture.fail(initResult.cause()); } else { initComplete(startFuture); } }); } }); } catch (Exception e) { startFuture.fail(e); } } /** * Initialize all needed resources of NetRelay * * @param handler */ protected void init(final Handler<AsyncResult<Void>> handler) { try { router = Router.router(vertx); mapperFactory = new NetRelayMapperFactory(this); storeObjectFactory = new NetRelayStoreObjectFactory(this); initJwt(); initMailClient(); initController(router); initProcessors(); initHttpServer(router, res -> { if (res.failed()) { handler.handle(Future.failedFuture(res.cause())); } else { initHttpsServer(router, httpsResult -> { if (httpsResult.failed()) { handler.handle(Future.failedFuture(httpsResult.cause())); } else { handler.handle(Future.succeededFuture()); } }); } }); } catch (Exception e) { handler.handle(Future.failedFuture(e)); } } private void initJwt() { if (settings.getJwtSettings() != null) { this.jwt = JWTHandler.createJWT(getVertx(), settings.getJwtSettings()); } } /** * Set the future to be completed * * @param startFuture */ protected void initComplete(final Future<Void> startFuture) { startFuture.complete(); } private void initMailClient() { MailClientSettings ms = settings.getMailClientSettings(); initMailClientSettings(ms); if (ms.isActive()) { mailClient = MailClient.createShared(vertx, ms, ms.getName()); LOGGER.info("MailClient startet with configuration " + ms.toJson()); } else { LOGGER.info("MailClient NOT started, cause not activated in configuration"); } } private void initMailClientSettings(final MailClientSettings ms) { String mailUserName = System.getProperty(MailClientSettings.USERNAME_SYS_PROPERTY); if (mailUserName != null && mailUserName.hashCode() != 0) { ms.setUsername(mailUserName); } String mailClientPassword = System.getProperty(MailClientSettings.PASSWORD_SYS_PROPERTY); if (mailClientPassword != null && mailClientPassword.hashCode() != 0) { ms.setPassword(mailClientPassword); } String mailClientHost = System.getProperty(MailClientSettings.HOST_SYS_PROPERTY); if (mailClientHost != null && mailClientHost.hashCode() != 0) { ms.setHostname(mailClientHost); } String mailClientPort = System.getProperty(MailClientSettings.PORT_SYS_PROPERTY); if (mailClientPort != null && mailClientPort.hashCode() != 0) { ms.setPort(Integer.parseInt(mailClientPort)); } } /** * Retrive the {@link IMapperFactory} which translates between the mappers and the browser * * @return the {@link IMapperFactory} of NetRelay */ public NetRelayMapperFactory getNetRelayMapperFactory() { return mapperFactory; } /** * Get the StoreObjectFactory used by NetRelay * * @return */ public NetRelayStoreObjectFactory getStoreObjectFactory() { return storeObjectFactory; } /** * Get the router, which is used by NetRelay * * @return the router */ public Router getRouter() { return router; } /** * Init the definitions inside {@link Settings#getProcessorDefinitons()} * * @throws Exception */ protected void initProcessors() { List<ProcessorDefinition> rd = settings.getProcessorDefinitons().getProcessorDefinitions(); for (ProcessorDefinition def : rd) { def.initProcessorDefinition(vertx, this); } } /** * Init the definitions inside {@link Settings#getRouterDefinitions()} * * @throws Exception */ protected void initController(final Router router) throws Exception { List<RouterDefinition> rd = settings.getRouterDefinitions().getRouterDefinitions(); for (RouterDefinition def : rd) { RoutingInit.initRoutingDefinition(vertx, this, router, def); } } /** * Initialize the {@link Settings} which are used to init the current instance * * @return */ protected Settings initSettings() { try { Settings st = Settings.loadSettings(this, vertx, context); if (!st.isEdited()) { throw new InitException( "The settings are not yet edited. Change the value of property 'edited' to true inside the appropriate file"); } return st; } catch (Exception e) { LOGGER.error("", e); throw e; } } public final void initDataStore(final Handler<AsyncResult<Void>> handler) throws InstantiationException, IllegalAccessException { IDataStoreInit dsInit = settings.getDatastoreSettings().getDatastoreInit().newInstance(); dsInit.initDataStore(vertx, settings.getDatastoreSettings(), dsInitResult -> { if (dsInitResult.failed()) { handler.handle(Future.failedFuture(dsInitResult.cause())); } else { datastore = dsInitResult.result(); handler.handle(Future.succeededFuture()); } }); } private void initHttpServer(final Router router, final Handler<AsyncResult<Void>> handler) { HttpServerOptions options = new HttpServerOptions().setPort(settings.getServerPort()); HttpServer server = vertx.createHttpServer(options); server.requestHandler(router::accept).listen(result -> { if (result.failed()) { handler.handle(Future.failedFuture(result.cause())); } else { handler.handle(Future.succeededFuture()); } }); } private void initHttpsServer(final Router router, final Handler<AsyncResult<Void>> handler) { if (settings.getSslPort() > 0) { LOGGER.info("launching ssl server listening on port " + settings.getSslPort()); HttpServerOptions options = new HttpServerOptions().setPort(settings.getSslPort()); options.setSsl(true); try { handleSslCertificate(options, handler); HttpServer server = vertx.createHttpServer(options); server.requestHandler(router::accept).listen(result -> { if (result.failed()) { handler.handle(Future.failedFuture(result.cause())); } else { handler.handle(Future.succeededFuture()); } }); } catch (Exception e) { handler.handle(Future.failedFuture(e)); } } else { LOGGER.info("no ssl server is launched, cause ssl port is not set: " + settings.getSslPort()); handler.handle(Future.succeededFuture()); } } private void handleSslCertificate(final HttpServerOptions options, final Handler<AsyncResult<Void>> handler) throws GeneralSecurityException, IOException { if (settings.isCertificateSelfSigned()) { SelfSignedCertificate certificate = SelfSignedCertificate.create(); options.setSsl(true).setKeyCertOptions(certificate.keyCertOptions()).setTrustOptions(certificate.trustOptions()); } else if (settings.getCertificatePath() != null && settings.getCertificatePath().hashCode() != 0) { importCertificate(options); } else { handler.handle(Future.failedFuture(new UnsupportedOperationException( "ssl port is set, but no certificate path set and option certificateSelfSigned is not activated"))); } } private String validateSslPassword() { if (StringUtils.isEmpty(settings.getCertificatePassword())) { throw new IllegalArgumentException("The property 'certificatePassword' must be set in the settings of NetRelay"); } return settings.getCertificatePassword(); } private void importCertificate(final HttpServerOptions httpOpts) { String certPath = settings.getCertificatePath(); if (certPath.matches("^.*\\.(pem|PEM)$")) { // Use a PEM key/cert pair if (settings.getCertificateKeyPath() == null) { throw new IllegalArgumentException("The certificateKeyPath is not set for pem certificate"); } httpOpts.setPemKeyCertOptions( new PemKeyCertOptions().setCertPath(certPath).setKeyPath(settings.getCertificateKeyPath())); httpOpts.setSsl(true); } else if (certPath.matches("^.*\\.(P12|p12)$")) { String password = validateSslPassword(); httpOpts.setPfxKeyCertOptions(new PfxOptions().setPath(certPath).setPassword(password)); } else { throw new IllegalArgumentException( "Please specify the certificate as PEM file in the format pkcs8, or as p12 file in the Pfx format"); } } /* * (non-Javadoc) * * @see io.vertx.core.AbstractVerticle#stop(io.vertx.core.Future) */ @Override public void stop(final Future<Void> stopFuture) throws Exception { getDatastore().shutdown(result -> { if (result.failed()) { stopFuture.fail(new RuntimeException(result.cause())); } else { stopFuture.complete(); } }); } /** * The default instance is requested, when there was no saved instance found * * @return */ public Settings createDefaultSettings() { Settings st = new Settings(); addDefaultRouterDefinitions(st); addDefaultProcessorDefinitions(st); st.setDatastoreSettings(MongoDataStoreInit.createDefaultSettings()); return st; } protected void addDefaultProcessorDefinitions(final Settings settings) { ProcessorDefinition def = new ProcessorDefinition(); def.setActive(false); def.setName("dummyprocessor"); def.getProcessorProperties().put("demoKey", "demoValue"); def.setTimeDef("60000"); settings.getProcessorDefinitons().add(def); } protected void addDefaultRouterDefinitions(final Settings settings) { settings.getRouterDefinitions().add(FavIconController.createDefaultRouterDefinition()); settings.getRouterDefinitions().add(CookieController.createDefaultRouterDefinition()); settings.getRouterDefinitions().add(SessionController.createDefaultRouterDefinition()); settings.getRouterDefinitions().add(TimeoutController.createDefaultRouterDefinition()); settings.getRouterDefinitions().add(BodyController.createDefaultRouterDefinition()); settings.getRouterDefinitions().add(StaticController.createDefaultRouterDefinition()); settings.getRouterDefinitions().add(FailureController.createDefaultRouterDefinition()); } /** * Get the {@link IDataStore} for the current instance * * @return the datastore */ public final IDataStore<?, ?> getDatastore() { return datastore; } /** * Get the {@link Settings} which are configuring NetRelay * * @return the settings */ public Settings getSettings() { return settings; } /** * Resets and rebuilds the routes by using the {@link Settings#getRouterDefinitions()} * * @throws Exception */ public void resetRoutes() throws Exception { getRouter().clear(); initController(router); } /** * If {@link MailClientSettings#isActive()} from the {@link Settings}, then this will return * the initialized instance of {@link MailClient} * * @return the mailClient */ public final MailClient getMailClient() { return mailClient; } /** * If the {@link JWTSettings} inside the {@link Settings} are configured, this will return a JWT instance to de- and * encode JWTs. Otherwise, it will return null * * @return the jwt */ public JWT getJwt() { return jwt; } }
package org.clapper.util.config; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.net.MalformedURLException; import java.net.URL; import org.clapper.util.text.TextUtil; import org.clapper.util.text.UnixShellVariableSubstituter; import org.clapper.util.text.VariableDereferencer; import org.clapper.util.text.VariableNameChecker; import org.clapper.util.text.VariableSubstitutionException; import org.clapper.util.text.VariableSubstituter; import org.clapper.util.text.XStringBuffer; import org.clapper.util.io.FileUtil; public class Configuration implements VariableDereferencer, VariableNameChecker { private static final String COMMENT_CHARS = "#!"; private static final char SECTION_START = '['; private static final char SECTION_END = ']'; private static final String INCLUDE = "%include"; private static final int MAX_INCLUDE_NESTING_LEVEL = 50; private static final String SYSTEM_SECTION_NAME = "system"; private static final String PROGRAM_SECTION_NAME = "program"; /** * Line types */ enum LineType { COMMENT, INCLUDE, SECTION, VARIABLE, BLANK } /** * Contains one logical input line. */ static class Line { int number = 0; LineType type = LineType.COMMENT; XStringBuffer buffer = new XStringBuffer(); Line() { } void newLine() { buffer.setLength (0); } } /** * Context for variable substitution */ private class SubstitutionContext { Variable currentVariable; int totalSubstitutions = 0; SubstitutionContext (Variable v) { currentVariable = v; } } /** * Container for data used only during parsing. */ private class ParseContext { /** * Current section. Only set during parsing. */ private Section currentSection = null; /** * Current variable name being processed. Used during the variable * substitution parsing phase. */ private Variable currentVariable = null; /** * Current include file nesting level. Used as a fail-safe during * parsing. */ private int includeFileNestingLevel = 0; /** * Table of files/URLs currently open. Used during include * processing. */ private Set<String> openURLs = new HashSet<String>(); ParseContext() { } } /** * The URL of the configuration file, if available */ private URL configURL = null; /** * List of sections, in order encountered. Each element is a reference to * a Section object. */ private List<Section> sectionsInOrder = new ArrayList<Section>(); /** * Sections by name. Each index is a string. Each value is a reference to * a Section object. */ private Map<String, Section> sectionsByName = new HashMap<String, Section>(); /** * Special section for System.properties */ private static Section systemSection; /** * Special section for program properties */ private Section programSection; /** * Section ID values. */ private int nextSectionID = 1; /** * Construct an empty <tt>Configuration</tt> object. The object may * later be filled with configuration data via one of the <tt>load()</tt> * methods, or by calls to {@link #addSection addSection()} and * {@link #setVariable setVariable()}. */ public Configuration() { } /** * Construct a <tt>Configuration</tt> object that parses data from * the specified file. * * @param f The <tt>File</tt> to open and parse * * @throws IOException can't open or read file * @throws ConfigurationException error in configuration data */ public Configuration (File f) throws IOException, ConfigurationException { load (f); } /** * Construct a <tt>Configuration</tt> object that parses data from * the specified file. * * @param path the path to the file to parse * * @throws FileNotFoundException specified file doesn't exist * @throws IOException can't open or read file * @throws ConfigurationException error in configuration data */ public Configuration (String path) throws FileNotFoundException, IOException, ConfigurationException { load (path); } /** * Construct a <tt>Configuration</tt> object that parses data from * the specified URL. * * @param url the URL to open and parse * * @throws IOException can't open or read URL * @throws ConfigurationException error in configuration data */ public Configuration (URL url) throws IOException, ConfigurationException { load (url); } /** * Construct a <tt>Configuration</tt> object that parses data from * the specified <tt>InputStream</tt>. * * @param iStream the <tt>InputStream</tt> * * @throws IOException can't read from <tt>InputStream</tt> * @throws ConfigurationException error in configuration data */ public Configuration (InputStream iStream) throws IOException, ConfigurationException { load (iStream); } /** * Add a new section to this configuration data. * * @param sectionName the name of the new section * * @throws SectionExistsException a section by that name already exists * * @see #containsSection * @see #getSectionNames * @see #setVariable */ public void addSection (String sectionName) throws SectionExistsException { if (sectionsByName.get (sectionName) != null) throw new SectionExistsException (sectionName); makeNewSection (sectionName); } /** * Clear this object of all configuration data. */ public void clear() { sectionsInOrder.clear(); sectionsByName.clear(); configURL = null; } /** * Determine whether this object contains a specified section. * * @param sectionName the section name * * @return <tt>true</tt> if the section exists in this configuration, * <tt>false</tt> if not. * * @see #getSectionNames * @see #addSection */ public boolean containsSection (String sectionName) { return (sectionsByName.get (sectionName) != null); } /** * Get the URL of the configuration file, if available. * * @return the URL of the configuration file, or null if the file * was parsed from an <tt>InputStream</tt> */ public URL getConfigurationFileURL() { return configURL; } /** * Get the names of the sections in this object, in the order they were * parsed and/or added. * * @param collection the <tt>Collection</tt> to which to add the section * names. The names are added in the order they were * parsed and/or added to this object; of course, the * <tt>Collection</tt> may reorder them. * * @return the <tt>collection</tt> parameter, for convenience * * @see #getVariableNames */ public Collection<String> getSectionNames (Collection<String> collection) { for (Section section : sectionsInOrder) collection.add (section.getName()); return collection; } /** * Get the names of the sections in this object, in the order they were * parsed and/or added. * * @return a new <tt>Collection</tt> of section names * * @see #getVariableNames */ public Collection<String> getSectionNames() { return getSectionNames (new ArrayList<String>()); } /** * Get the names of the all the variables in a section, in the order * they were parsed and/or added. * * @param sectionName the name of the section to access * @param collection the <tt>Collection</tt> to which to add the variable * names. The names are added in the order they were * parsed and/or added to this object; of course, the * <tt>Collection</tt> may reorder them. * * @return the <tt>collection</tt> parameter, for convenience * * @throws NoSuchSectionException no such section * * @see #getSectionNames * @see #containsSection * @see #getVariableValue */ public Collection<String> getVariableNames (String sectionName, Collection<String> collection) throws NoSuchSectionException { Section section = sectionsByName.get (sectionName); if (section == null) throw new NoSuchSectionException (sectionName); collection.addAll (section.getVariableNames()); return collection; } /** * Get the names of the all the variables in a section, in the order * they were parsed and/or added. * * @param sectionName the name of the section to access * * @return a new <tt>Collection</tt> of variable names * * @throws NoSuchSectionException no such section * * @see #getSectionNames * @see #containsSection * @see #getVariableValue */ public Collection<String> getVariableNames (String sectionName) throws NoSuchSectionException { return getVariableNames (sectionName, new ArrayList<String>()); } /** * Get the value for a variable. * * @param sectionName the name of the section containing the variable * @param variableName the variable name * * @return the value for the variable (which may be the empty string) * * @throws NoSuchSectionException the named section does not exist * @throws NoSuchVariableException the section has no such variable */ public String getConfigurationValue (String sectionName, String variableName) throws NoSuchSectionException, NoSuchVariableException { Section section = (Section) sectionsByName.get (sectionName); if (section == null) throw new NoSuchSectionException (sectionName); Variable variable = null; try { variable = section.getVariable (variableName); } catch (ConfigurationException ex) { } if (variable == null) throw new NoSuchVariableException (sectionName, variableName); return variable.getCookedValue(); } /** * Convenience method to get and convert an optional integer parameter. * The default value applies if the variable is missing or is there but * has an empty value. * * @param sectionName section name * @param variableName variable name * @param defaultValue default value if not found * * @return the value, or the default value if not found * * @throws NoSuchSectionException no such section * @throws ConfigurationException bad numeric value * * @see #getOptionalCardinalValue * @see #getRequiredIntegerValue */ public int getOptionalIntegerValue (String sectionName, String variableName, int defaultValue) throws NoSuchSectionException, ConfigurationException { try { return getRequiredIntegerValue (sectionName, variableName); } catch (NoSuchVariableException ex) { return defaultValue; } } /** * Convenience method to get and convert a required integer parameter. * * @param sectionName section name * @param variableName variable name * * @return the value * * @throws NoSuchSectionException no such section * @throws NoSuchVariableException no such variable * @throws ConfigurationException bad numeric value * * @see #getRequiredCardinalValue * @see #getOptionalIntegerValue */ public int getRequiredIntegerValue (String sectionName, String variableName) throws NoSuchSectionException, NoSuchVariableException, ConfigurationException { String sNum = getConfigurationValue (sectionName, variableName); try { return Integer.parseInt (sNum); } catch (NumberFormatException ex) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.badNumericValue", "Bad numeric value \"{0}\" for " + "variable \"{1}\" in section " + "\"{2}\"", new Object[] { sNum, variableName, sectionName }); } } public int getOptionalCardinalValue (String sectionName, String variableName, int defaultValue) throws NoSuchSectionException, ConfigurationException { assert (defaultValue >= 0); try { return getRequiredCardinalValue (sectionName, variableName); } catch (NoSuchVariableException ex) { return defaultValue; } } /** * Convenience method to get and convert a required integer parameter. * * @param sectionName section name * @param variableName variable name * * @return the value * * @throws NoSuchSectionException no such section * @throws NoSuchVariableException no such variable * @throws ConfigurationException bad numeric value * * @see #getOptionalCardinalValue * @see #getRequiredIntegerValue */ public int getRequiredCardinalValue (String sectionName, String variableName) throws NoSuchSectionException, NoSuchVariableException, ConfigurationException { String sNum = getConfigurationValue (sectionName, variableName); int i = getRequiredIntegerValue (sectionName, variableName); if (i < 0) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.negativeCardinalValue", "Bad netaive numeric value \"{0}\" " + "for variable \"{1}\" in section \"{2}\"", new Object[] { sNum, variableName, sectionName }); } return i; } /** * Convenience method to get and convert an optional floating point * numeric parameter. The default value applies if the variable is * missing or is there but has an empty value. * * @param sectionName section name * @param variableName variable name * @param defaultValue default value if not found * * @return the value, or the default value if not found * * @throws NoSuchSectionException no such section * @throws ConfigurationException bad numeric value */ public double getOptionalDoubleValue (String sectionName, String variableName, double defaultValue) throws NoSuchSectionException, ConfigurationException { try { return getRequiredDoubleValue (sectionName, variableName); } catch (NoSuchVariableException ex) { return defaultValue; } } /** * Convenience method to get and convert a required floating point * numeric parameter. * * @param sectionName section name * @param variableName variable name * * @return the value * * @throws NoSuchSectionException no such section * @throws NoSuchVariableException no such variable * @throws ConfigurationException bad numeric value */ public double getRequiredDoubleValue (String sectionName, String variableName) throws NoSuchSectionException, NoSuchVariableException, ConfigurationException { String sNum = getConfigurationValue (sectionName, variableName); try { return Double.parseDouble (sNum); } catch (NumberFormatException ex) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.badFloatValue", "Bad floating point value " + "\"{0}\" for variable \"{1}\" " + "in section \"{2}\"", new Object[] { sNum, variableName, sectionName }); } } /** * Convenience method to get and convert an optional boolean parameter. * The default value applies if the variable is missing or is there * but has an empty value. * * @param sectionName section name * @param variableName variable name * @param defaultValue default value if not found * * @return the value, or the default value if not found * * @throws NoSuchSectionException no such section * @throws ConfigurationException bad numeric value */ public boolean getOptionalBooleanValue (String sectionName, String variableName, boolean defaultValue) throws NoSuchSectionException, ConfigurationException { boolean result = defaultValue; try { String s = getConfigurationValue (sectionName, variableName); if (s.trim().length() == 0) result = defaultValue; else result = TextUtil.booleanFromString (s); } catch (NoSuchVariableException ex) { result = defaultValue; } catch (IllegalArgumentException ex) { throw new ConfigurationException (ex.getMessage()); } return result; } /** * Convenience method to get and convert a required boolean parameter. * * @param sectionName section name * @param variableName variable name * * @return the value * * @throws NoSuchSectionException no such section * @throws NoSuchVariableException no such variable * @throws ConfigurationException bad numeric value */ public boolean getRequiredBooleanValue (String sectionName, String variableName) throws NoSuchSectionException, ConfigurationException, NoSuchVariableException { return Boolean.valueOf (getConfigurationValue (sectionName, variableName)) .booleanValue(); } /** * Convenience method to get an optional string value. The default * value applies if the variable is missing or is there but has an * empty value. * * @param sectionName section name * @param variableName variable name * @param defaultValue default value if not found * * @return the value, or the default value if not found * * @throws NoSuchSectionException no such section * @throws ConfigurationException bad numeric value */ public String getOptionalStringValue (String sectionName, String variableName, String defaultValue) throws NoSuchSectionException, ConfigurationException { String result; try { result = getConfigurationValue (sectionName, variableName); if (result.trim().length() == 0) result = defaultValue; } catch (NoSuchVariableException ex) { result = defaultValue; } return result; } /** * Get the value associated with a given variable. Required by the * {@link VariableDereferencer} interface, this method is used during * parsing to handle variable substitutions (but also potentially * useful by other applications). See this class's documentation for * details on variable references. * * @param varName The name of the variable for which the value is * desired. * @param context a context object, passed through from the caller * to the dereferencer, or null if there isn't one. * For this class, the context object is a * SubstitutionContext variable. * * @return The variable's value. If the variable has no value, this * method must return the empty string (""). It is important * <b>not</b> to return null. * * @throws VariableSubstitutionException variable references itself */ public String getVariableValue (String varName, Object context) throws VariableSubstitutionException { int i; Section section = null; String sectionName; String value = null; SubstitutionContext substContext = (SubstitutionContext) context; Section variableParentSection; Variable currentVariable; try { checkVariableName (varName); } catch (ConfigurationException ex) { throw new VariableSubstitutionException (ex); } currentVariable = substContext.currentVariable; if (currentVariable.getName().equals (varName)) { throw new VariableSubstitutionException (Package.BUNDLE_NAME, "Configuration.recursiveSubst", "Attempt to substitute value for variable " + "\"{0}\" within itself.", new Object[] {varName}); } variableParentSection = substContext.currentVariable.getSection(); i = varName.indexOf (':'); if (i == -1) { // No section in the variable reference. Use the variable's // context. section = variableParentSection; } else { sectionName = varName.substring (0, i); varName = varName.substring (i + 1); if (sectionName.equals (SYSTEM_SECTION_NAME)) section = systemSection; else if (sectionName.equals (PROGRAM_SECTION_NAME)) section = programSection; else section = sectionsByName.get (sectionName); } if (section != null) { if (variableParentSection.getID() < section.getID()) { String parentSectionName = variableParentSection.getName(); String thisSectionName = section.getName(); throw new VariableSubstitutionException (Package.BUNDLE_NAME, "Configuration.badSectionRef", "Variable \"{0}\" in section \"{1}\" cannot substitute " + "the value of variable \"{2}\" from section \"{3}\", " + "because section \"{3}\" appears after section \"{1}\" " + "in the configuration file.", new Object[] { substContext.currentVariable.getName(), parentSectionName, varName, thisSectionName, thisSectionName, parentSectionName }); } Variable varToSubst; try { varToSubst = section.getVariable (varName); } catch (ConfigurationException ex) { throw new VariableSubstitutionException (ex.getMessage()); } if (varToSubst != null) value = varToSubst.getCookedValue(); } substContext.totalSubstitutions++; return (value == null) ? "" : value; } public boolean legalVariableCharacter (char c) { return ! (UnixShellVariableSubstituter.isVariableMetacharacter (c)); } /** * Load configuration from a <tt>File</tt>. Any existing data is * discarded. * * @param file the file * * @throws IOException read error * @throws ConfigurationException parse error */ public void load (File file) throws IOException, ConfigurationException { clear(); URL url = file.toURL(); parse (new FileInputStream (file), url); this.configURL = url; } /** * Load configuration from a file specified as a pathname. Any existing * data is discarded. * * @param path the path * * @throws FileNotFoundException specified file doesn't exist * @throws IOException can't open or read file * @throws ConfigurationException error in configuration data */ public void load (String path) throws FileNotFoundException, IOException, ConfigurationException { clear(); URL url = new File (path).toURL(); parse (new FileInputStream (path), url); this.configURL = url; } /** * Load configuration from a URL. Any existing data is discarded. * * @param url the URL * * @throws IOException read error * @throws ConfigurationException parse error */ public void load (URL url) throws IOException, ConfigurationException { clear(); parse (url.openStream(), url); this.configURL = url; } /** * Load configuration from an <tt>InputStream</tt>. Any existing data * is discarded. * * @param iStream the <tt>InputStream</tt> * * @throws IOException can't open or read URL * @throws ConfigurationException error in configuration data */ public void load (InputStream iStream) throws IOException, ConfigurationException { clear(); parse (iStream, null); } /** * Set a variable's value. If the variable does not exist, it is created. * If it does exist, its current value is overwritten with the new one. * Metacharacters and variable references are not expanded unless the * <tt>expand</tt> parameter is <tt>true</tt>. An <tt>expand</tt> value * of <tt>false</tt> is useful when creating new configuration data to * be written later. * * @param sectionName name of existing section to contain the variable * @param variableName name of variable to set * @param value variable's value * @param expand <tt>true</tt> to expand metacharacters and variable * references in the value, <tt>false</tt> to leave * the value untouched. * * @throws NoSuchSectionException section does not exist * @throws VariableSubstitutionException variable substitution error */ public void setVariable (String sectionName, String variableName, String value, boolean expand) throws NoSuchSectionException, VariableSubstitutionException { Section section = sectionsByName.get (sectionName); if (section == null) throw new NoSuchSectionException (sectionName); Variable variable = null; try { variable = section.getVariable (variableName); } catch (ConfigurationException ex) { throw new VariableSubstitutionException (ex.getMessage()); } if (variable != null) variable.setValue (value); else variable = section.addVariable (variableName, value); if (expand) { try { substituteVariables (variable, new UnixShellVariableSubstituter(), true); } catch (ConfigurationException ex) { throw new VariableSubstitutionException (ex.getMessage()); } } } /** * Writes the configuration data to a <tt>PrintWriter</tt>. The sections * and variables within the sections are written in the order they were * originally read from the file. Non-printable characters (and a few * others) are encoded into metacharacter sequences. Comments and * variable references are not propagated, since they are not retained * when the data is parsed. * * @param out where to write the configuration data * * @throws ConfigurationException on error * * @see XStringBuffer#encodeMetacharacters() */ public void write (PrintWriter out) throws ConfigurationException { XStringBuffer value = new XStringBuffer(); boolean firstSection = true; out.print (COMMENT_CHARS.charAt (0)); out.print (" Written by "); out.println (this.getClass().getName()); out.print (COMMENT_CHARS.charAt (0)); out.print (" on "); out.println (new Date().toString()); out.println(); for (Section section : sectionsInOrder) { if (! firstSection) out.println(); out.println (SECTION_START + section.getName() + SECTION_END); firstSection = false; for (String varName : section.getVariableNames()) { Variable var = section.getVariable (varName); value.setLength (0); value.append (var.getCookedValue()); value.encodeMetacharacters(); out.println (varName + ": " + value.toString()); } } } /** * Writes the configuration data to a <tt>PrintStream</tt>. The sections * and variables within the sections are written in the order they were * originally read from the file. Non-printable characters (and a few * others) are encoded into metacharacter sequences. Comments and * variable references are not propagated, since they are not retained * when the data is parsed. * * @param out where to write the configuration data * * @throws ConfigurationException on error * * @see XStringBuffer#encodeMetacharacters() */ public void write (PrintStream out) throws ConfigurationException { PrintWriter w = new PrintWriter (out); write (w); w.flush(); } /** * Parse configuration data from the specified stream. * * @param in the input stream * @param url the URL associated with the stream, or null if not known * * @throws ConfigurationException parse error */ private synchronized void parse (InputStream in, URL url) throws ConfigurationException { loadConfiguration (in, url, new ParseContext()); } /** * Load the configuration data into memory, without processing * metacharacters or variable substitution. Includes are processed, * though. * * @param in input stream * @param url URL associated with the stream, or null if not known * @param parseContext current parsing context * * @throws IOException read error * @throws ConfigurationException parse error */ private void loadConfiguration (InputStream in, URL url, ParseContext parseContext) throws ConfigurationException { BufferedReader r; Line line = new Line(); String sURL = url.toExternalForm(); // Now, create the phantom program and system sections. These MUST // be created first, or other sections won't be able to substitute // from them. (i.e., They must have the lowest IDs.) programSection = new ProgramSection (PROGRAM_SECTION_NAME, nextSectionID()); systemSection = new SystemSection (SYSTEM_SECTION_NAME, nextSectionID()); if (parseContext.openURLs.contains (sURL)) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.recursiveInclude", "{0}, line {1}: Attempt to include \"{2}\" " + "from itself, either directly or indirectly.", new Object[] { url.toExternalForm(), String.valueOf (line.number), sURL }); } parseContext.openURLs.add (sURL); // Parse the entire file into memory before doing variable // substitution and metacharacter expansion. r = new BufferedReader (new InputStreamReader (in)); while (readLogicalLine (r, line)) { try { switch (line.type) { case COMMENT: case BLANK: break; case INCLUDE: handleInclude (line, url, parseContext); break; case SECTION: parseContext.currentSection = handleNewSection (line, url); break; case VARIABLE: if (parseContext.currentSection == null) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.varBeforeSection", "{0}, line {1}: Variable assignment " + "before first section.", new Object[] { url.toExternalForm(), String.valueOf (line.number) }); } handleVariable (line, url, parseContext); break; default: throw new IllegalStateException ("Bug: line.type=" + line.type); } } catch (IOException ex) { throw new ConfigurationException (getExceptionPrefix (line, url) + ex.toString()); } } parseContext.openURLs.remove (sURL); } /** * Handle a new section. * * @param line line buffer * @param url URL currently being processed, or null if unknown * * @return a new Section object, which has been stored in the appropriate * places * * @throws ConfigurationException configuration error */ private Section handleNewSection (Line line, URL url) throws ConfigurationException { String s = line.buffer.toString().trim(); if (s.charAt (0) != SECTION_START) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.badSectionBegin", "{0}, line {1}: Section does not begin with \"{2}\"", new Object[] { url.toExternalForm(), String.valueOf (line.number), String.valueOf (SECTION_START) }); } else if (s.charAt (s.length() - 1) != SECTION_END) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.badSectionEnd", "{0}, line {1}: Section does not end with \"{2}\"", new Object[] { url.toExternalForm(), String.valueOf (line.number), String.valueOf (SECTION_END) }); } return makeNewSection (s.substring (1, s.length() - 1)); } /** * Handle a new variable during parsing. * * @param line line buffer * @param url URL currently being processed, or null if unknown * @param parseContext current parsing context * * @throws ConfigurationException configuration error */ private void handleVariable (Line line, URL url, ParseContext parseContext) throws ConfigurationException { char[] s = line.buffer.toString().toCharArray(); int iSep; for (iSep = 0; iSep < s.length; iSep++) { if ((s[iSep] == ':') || (s[iSep] == '=')) break; } if (iSep == s.length) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.missingAssignOp", "{0}, line {1}: Missing \"=\" " + "or \":\" for variable " + "definition.", new Object[] { url.toExternalForm(), String.valueOf (line.number) }); } if (iSep == 0) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.noVariablName", "{0}, line {1}: Missing " + "variable name for variable " + "definition.", new Object[] { url.toExternalForm(), String.valueOf (line.number) }); } int i = 0; int j = iSep - 1; while (Character.isWhitespace (s[i])) i++; while (Character.isWhitespace (s[j])) j if (i > j) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.noVariablName", "{0}, line {1}: Missing " + "variable name for variable " + "definition.", new Object[] { url.toExternalForm(), String.valueOf (line.number) }); } String varName = new String (s, i, j - i + 1); if (varName.length() == 0) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.noVariablName", "{0}, line {1}: Missing " + "variable name for variable " + "definition.", new Object[] { url.toExternalForm(), String.valueOf (line.number) }); } checkVariableName (varName); i = skipWhitespace (s, iSep + 1); j = s.length - i; Section currentSection = parseContext.currentSection; String value = new String (s, i, j); Variable existing = currentSection.getVariable (varName); if (existing != null) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.duplicateVar", "{0}, line {1}: Section \"{2}\" has a " + "duplicate definition for variable " + "\"{3}\". The first instance was defined " + "on line {4}.", new Object[] { url.toExternalForm(), String.valueOf (line.number), currentSection.getName(), varName, String.valueOf (existing.lineWhereDefined()) }); } Variable newVar = currentSection.addVariable (varName, value, line.number); // Expand the metacharacters and variable references in the variable. try { newVar.segmentValue(); VariableSubstituter sub = new UnixShellVariableSubstituter(); substituteVariables (newVar, sub, false); decodeMetacharacters (newVar); newVar.reassembleCookedValueFromSegments(); } catch (VariableSubstitutionException ex) { throw new ConfigurationException (ex.getMessage()); } } private void checkVariableName (String varName) throws ConfigurationException { char[] ch = varName.toCharArray(); for (int i = 0; i < ch.length; i++) { if (! legalVariableCharacter (ch[i])) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.badVariableName", "\"{0}\" is an illegal variable name", new Object[] {varName}); } } } /** * Handle an include directive. * * @param line line buffer * @param url URL currently being processed, or null if unknown * @param parseContext current parsing context * * @throws IOException I/O error opening or reading include * @throws ConfigurationException configuration error */ private void handleInclude (Line line, URL url, ParseContext parseContext) throws IOException, ConfigurationException { if (parseContext.includeFileNestingLevel >= MAX_INCLUDE_NESTING_LEVEL) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.maxNestedIncludeExceeded", "{0}, line {1}: Exceeded maximum nested " + "include level of {2}.", new Object[] { url.toExternalForm(), String.valueOf (line.number), String.valueOf (MAX_INCLUDE_NESTING_LEVEL) }); } parseContext.includeFileNestingLevel++; String s = line.buffer.toString(); // Parse the file name. String includeTarget = s.substring (INCLUDE.length() + 1).trim(); int len = includeTarget.length(); // Make sure double quotes surround the file or URL. if ((len < 2) || (! includeTarget.startsWith ("\"")) || (! includeTarget.endsWith ("\""))) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.malformedDirective", "{0}, line {1}: Malformed \"{2}\" directive", new Object[] { url.toExternalForm(), String.valueOf (line.number), INCLUDE }); } // Extract the file. includeTarget = includeTarget.substring (1, len - 1); if (includeTarget.length() == 0) { throw new ConfigurationException (Package.BUNDLE_NAME, "Configuration.includeMissingFile", "{0}, line {1}: Missing file name or URL in " + "\"{2}\" directive", new Object[] { url.toExternalForm(), String.valueOf (line.number), INCLUDE }); } // Process the include try { loadInclude (new URL (includeTarget), parseContext); } catch (MalformedURLException ex) { // Not obviously a URL. First, determine whether it has // directory information or not. If not, try to use the // parent's directory information. if (FileUtil.isAbsolutePath (includeTarget)) { loadInclude (new URL (url.getProtocol(), url.getHost(), url.getPort(), includeTarget), parseContext); } else { // It's relative to the parent. If the parent URL is not // specified, then we can't do anything except try to load // the include as is. It'll probably fail... if (url == null) { loadInclude (new File (includeTarget).toURL(), parseContext); } else { String parent = new File (url.getFile()).getParent(); if (parent == null) parent = ""; loadInclude (new URL (url.getProtocol(), url.getHost(), url.getPort(), parent + "/" + includeTarget), parseContext); } } } parseContext.includeFileNestingLevel } /** * Actually attempts to load an include reference. This is basically just * a simplified front-end to loadConfiguration(). * * @param url the URL to be included * @param parseContext current parsing context * * @throws IOException I/O error * @throws ConfigurationException configuration error */ private void loadInclude (URL url, ParseContext parseContext) throws IOException, ConfigurationException { loadConfiguration (url.openStream(), url, parseContext); } /** * Read the next logical line of input from a config file. * * @param r the reader * @param line where to store the line. The line number in this * object is incremented, the "buffer" field is updated, * and the "type" field is set appropriately. * * @return <tt>true</tt> if a line was read, <tt>false</tt> for EOF. * * @throws ConfigurationException read error */ private boolean readLogicalLine (BufferedReader r, Line line) throws ConfigurationException { boolean continued = false; boolean gotSomething = false; line.newLine(); for (;;) { String s; try { s = r.readLine(); } catch (IOException ex) { throw new ConfigurationException (ex.toString()); } if (s == null) break; gotSomething = true; line.number++; // Strip leading white space on all lines. int i; char[] chars = s.toCharArray(); i = skipWhitespace (chars, 0); if (i < chars.length) s = s.substring (i); else s = ""; if (! continued) { // First line. Determine what it is. char firstChar; if (s.length() == 0) line.type = LineType.BLANK; else if (COMMENT_CHARS.indexOf (s.charAt (0)) != -1) line.type = LineType.COMMENT; else if (s.charAt (0) == SECTION_START) line.type = LineType.SECTION; else if (new StringTokenizer (s).nextToken().equals (INCLUDE)) line.type = LineType.INCLUDE; else line.type = LineType.VARIABLE; } if ((line.type == LineType.VARIABLE) && (hasContinuationMark (s))) { continued = true; line.buffer.append (s.substring (0, s.length() - 1)); } else { line.buffer.append (s); break; } } return gotSomething; } /** * Determine whether a line has a continuation mark or not. * * @param s the line * * @return true if there's a continuation mark, false if not */ private boolean hasContinuationMark (String s) { boolean has = false; if (s.length() > 0) { char[] chars = s.toCharArray(); if (chars[chars.length-1] == '\\') { // Possibly. See if there are an odd number of them. int total = 0; for (int i = chars.length - 1; i >= 0; i { if (chars[i] != '\\') break; total++; } has = ((total % 2) == 1); } } return has; } /** * Get an appropriate exception prefix (e.g., line number, etc.) * * @param line line buffer * @param url URL currently being processed, or null if unknown * * @return a suitable string */ private String getExceptionPrefix (Line line, URL url) { StringBuffer buf = new StringBuffer(); if (url != null) { buf.append (url.toExternalForm()); buf.append (", line "); } else { buf.append ("Line "); } buf.append (line.number); buf.append (": "); return buf.toString(); } /** * Handle metacharacter substitution for a variable value. * * @param var The current variable being processed * * @return the expanded result * * @throws VariableSubstitutionException variable substitution error * @throws ConfigurationException some other configuration error */ private void decodeMetacharacters (Variable var) throws VariableSubstitutionException, ConfigurationException { ValueSegment[] segments = var.getCookedSegments(); for (int i = 0; i < segments.length; i++) { ValueSegment segment = segments[i]; if (segment.isLiteral) continue; segment.segmentBuf.decodeMetacharacters(); } } /** * Handle variable substitution for a variable value. * * @param var The current variable being processed * @param substituter VariableSubstituter to use * @param concatSegments Re-concatenate the segments * * @return the expanded result * * @throws VariableSubstitutionException variable substitution error * @throws ConfigurationException some other configuration error */ private void substituteVariables (Variable var, VariableSubstituter substituter, boolean concatSegments) throws VariableSubstitutionException, ConfigurationException { ValueSegment[] segments = var.getCookedSegments(); SubstitutionContext context = new SubstitutionContext (var); for (int i = 0; i < segments.length; i++) { // Keep substituting the current variable's value until there // no more substitutions are performed. This handles the case // where a dereferenced variable value contains its own // variable references. ValueSegment segment = segments[i]; if (segment.isLiteral) continue; String s = segment.segmentBuf.toString(); do { context.totalSubstitutions = 0; s = substituter.substitute (s, this, this, context); } while (context.totalSubstitutions > 0); segment.segmentBuf.setLength (0); segment.segmentBuf.append (s); } if (concatSegments) var.reassembleCookedValueFromSegments(); } /** * Get index of first non-whitespace character. * * @param s string to check * @param start starting point * * @return index of first non-whitespace character past "start", or -1 */ private int skipWhitespace (String s, int start) { return skipWhitespace (s.toCharArray(), start); } /** * Get index of first non-whitespace character. * * @param chars character array to check * @param start starting point * * @return index of first non-whitespace character past "start", or -1 */ private int skipWhitespace (char[] chars, int start) { while (start < chars.length) { if (! Character.isWhitespace (chars[start])) break; start++; } return start; } /** * Create and save a new Section. * * @param sectionName the name * * @return the Section object, which has been saved. */ private Section makeNewSection (String sectionName) { int id = nextSectionID(); Section section = new Section (sectionName, id); sectionsInOrder.add (section); sectionsByName.put (sectionName, section); return section; } /** * Get the next section ID * * @return the ID */ private synchronized int nextSectionID() { return ++nextSectionID; } }
package cz.semecky.simplemilight.tools; import cz.semecky.simplemilight.core.Command; import cz.semecky.simplemilight.core.RGBW; import java.io.IOException; import java.util.Scanner; /** * Tool for scanning IP addresses in a local network to find which IP has the milight-hub. */ public class IpScanner { public static final String BASE_IP = "192.168.0."; public static final RGBW.Zone ZONE = RGBW.all; public static final int FROM_IP = 0; public static final int TO_IP = 255; public static void main(String[] args) throws IOException, InterruptedException { Scanner keyboardScanner = new Scanner(System.in); int fromIP = FROM_IP; int toIP = TO_IP; System.out.println("We will ask a couple of times whether you saw the light blinking."); while (fromIP != toIP) { int middlePoint = (fromIP + toIP) / 2; blinkLights(fromIP, middlePoint); System.out.print("Did the light blink? [Y | N]: "); String input = keyboardScanner.next(); if ("Y".equals(input.toUpperCase())) { toIP = middlePoint; } else if ("N".equals(input.toUpperCase())) { fromIP = middlePoint + 1; } else { System.out.println("Input was not understood, only enter 'Y' or 'N'. Start again."); return; } } assert fromIP == toIP; System.out.println("The IP address of the lights (if any) is '" + BASE_IP + toIP + "'."); rainbow(BASE_IP + toIP); } private static void blinkLights(int fromIP, int toIP) throws IOException, InterruptedException { scanAddressses(ZONE.off(), fromIP, toIP); Thread.sleep(500); scanAddressses(ZONE.on(), fromIP, toIP); } private static void rainbow(String ip) throws IOException, InterruptedException { ZONE.color(RGBW.COLOR_RED).send(ip); Thread.sleep(500); ZONE.color(RGBW.COLOR_GREEN).send(ip); Thread.sleep(500); ZONE.color(RGBW.COLOR_ROYAL_BLUE).send(ip); Thread.sleep(500); ZONE.white().send(ip); } private static void scanAddressses(Command command, int from, int to) throws IOException, InterruptedException { for (int i = from; i <= to; i++) { command.send(BASE_IP + i); } } }
package jsettlers.main; import java.util.LinkedList; import java.util.List; import jsettlers.common.ai.EWhatToDoAiType; import jsettlers.common.utils.collections.ChangingList; import jsettlers.graphics.startscreen.interfaces.ENetworkMessage; import jsettlers.graphics.startscreen.interfaces.IChatMessageListener; import jsettlers.graphics.startscreen.interfaces.IJoinPhaseMultiplayerGameConnector; import jsettlers.graphics.startscreen.interfaces.IJoiningGame; import jsettlers.graphics.startscreen.interfaces.IJoiningGameListener; import jsettlers.graphics.startscreen.interfaces.IMapDefinition; import jsettlers.graphics.startscreen.interfaces.IMultiplayerListener; import jsettlers.graphics.startscreen.interfaces.IMultiplayerPlayer; import jsettlers.graphics.startscreen.interfaces.IOpenMultiplayerGameInfo; import jsettlers.logic.map.save.MapList; import jsettlers.logic.map.save.loader.MapLoader; import jsettlers.logic.player.PlayerSetting; import jsettlers.main.datatypes.MultiplayerPlayer; import jsettlers.network.NetworkConstants; import jsettlers.network.client.interfaces.INetworkClient; import jsettlers.network.client.receiver.IPacketReceiver; import jsettlers.network.common.packets.ChatMessagePacket; import jsettlers.network.common.packets.MapInfoPacket; import jsettlers.network.common.packets.MatchInfoUpdatePacket; import jsettlers.network.common.packets.MatchStartPacket; import jsettlers.network.common.packets.PlayerInfoPacket; import jsettlers.network.infrastructure.channel.reject.RejectPacket; import jsettlers.network.server.match.EPlayerState; /** * * @author Andreas Eberle * */ public class MultiplayerGame { private final AsyncNetworkClientConnector networkClientFactory; private final ChangingList<IMultiplayerPlayer> playersList = new ChangingList<IMultiplayerPlayer>(); private INetworkClient networkClient; private IJoiningGameListener joiningGameListener; private IMultiplayerListener multiplayerListener; private IChatMessageListener chatMessageListener; private boolean iAmTheHost = false; public MultiplayerGame(AsyncNetworkClientConnector networkClientFactory) { this.networkClientFactory = networkClientFactory; } public IJoiningGame join(final String matchId) { new Thread("joinGameThread") { @Override public void run() { networkClient = networkClientFactory.getNetworkClient(); networkClient.joinMatch(matchId, generateMatchStartedListener(), generateMatchInfoUpdatedListener(), generateChatMessageReceiver()); } }.start(); return generateJoiningGame(); } public IJoiningGame openNewGame(final IOpenMultiplayerGameInfo gameInfo) { iAmTheHost = true; new Thread("openNewGameThread") { @Override public void run() { networkClient = networkClientFactory.getNetworkClient(); IMapDefinition mapDefintion = gameInfo.getMapDefinition(); MapInfoPacket mapInfo = new MapInfoPacket(mapDefintion.getMapId(), mapDefintion.getMapName(), "", "", mapDefintion.getMaxPlayers()); networkClient.openNewMatch(gameInfo.getMatchName(), gameInfo.getMaxPlayers(), mapInfo, 4711L, generateMatchStartedListener(), generateMatchInfoUpdatedListener(), generateChatMessageReceiver()); } }.start(); return generateJoiningGame(); } private IJoiningGame generateJoiningGame() { return new IJoiningGame() { @Override public void setListener(IJoiningGameListener joiningGameListener) { MultiplayerGame.this.joiningGameListener = joiningGameListener; if (joiningGameListener != null && networkClient != null && networkClient.getState() == EPlayerState.IN_MATCH) { joiningGameListener.gameJoined(generateJoinPhaseGameConnector()); } } @Override public void abort() { networkClient.leaveMatch(); } }; } private IPacketReceiver<ChatMessagePacket> generateChatMessageReceiver() { return new IPacketReceiver<ChatMessagePacket>() { @Override public void receivePacket(ChatMessagePacket packet) { if (chatMessageListener != null) { chatMessageListener.chatMessageReceived(packet.getAuthorId(), packet.getMessage()); } } }; } private IPacketReceiver<MatchStartPacket> generateMatchStartedListener() { return new IPacketReceiver<MatchStartPacket>() { @Override public void receivePacket(MatchStartPacket packet) { updatePlayersList(packet.getMatchInfo().getPlayers()); MapLoader mapLoader = MapList.getDefaultList().getMapById(packet.getMatchInfo().getMapInfo().getId()); long randomSeed = packet.getRandomSeed(); boolean[] availablePlayers = new boolean[mapLoader.getMaxPlayers()]; byte ownPlayerId = calculatePlayerInfos(availablePlayers); PlayerSetting[] playerSettings = determinePlayerSettings(availablePlayers); JSettlersGame game = new JSettlersGame(mapLoader, randomSeed, networkClient.getNetworkConnector(), ownPlayerId, playerSettings); multiplayerListener.gameIsStarting(game.start()); } }; } private PlayerSetting[] determinePlayerSettings(boolean[] availablePlayers) { PlayerSetting[] playerSettings = new PlayerSetting[availablePlayers.length]; byte i = 0; for (; i < playersList.getItems().size(); i++) { playerSettings[i] = new PlayerSetting(true); } EWhatToDoAiType aiType = iAmTheHost ? EWhatToDoAiType.ROMAN_VERY_HARD : null; for (; i < availablePlayers.length; i++) { playerSettings[i] = new PlayerSetting(true, aiType); } return playerSettings; } byte calculatePlayerInfos(boolean[] availablePlayers) { String myId = networkClient.getPlayerInfo().getId(); byte i = 0; byte ownPlayerId = -1; for (IMultiplayerPlayer currPlayer : playersList.getItems()) { availablePlayers[i] = true; if (currPlayer.getId().equals(myId)) { ownPlayerId = i; } i++; } for (byte ii = i; ii < availablePlayers.length; ii++) { availablePlayers[ii] = true; } if (ownPlayerId < 0) { throw new RuntimeException("Wasn't able to find my id!"); } else { return ownPlayerId; } } private IPacketReceiver<MatchInfoUpdatePacket> generateMatchInfoUpdatedListener() { return new IPacketReceiver<MatchInfoUpdatePacket>() { @Override public void receivePacket(MatchInfoUpdatePacket packet) { if (joiningGameListener != null) { joiningGameListener.gameJoined(generateJoinPhaseGameConnector()); joiningGameListener = null; } updatePlayersList(packet.getMatchInfo().getPlayers()); receiveSystemMessage(new MultiplayerPlayer(packet.getUpdatedPlayer()), getNetworkMessageById(packet.getUpdateReason())); } }; } void updatePlayersList(PlayerInfoPacket[] playerInfoPackets) { List<IMultiplayerPlayer> players = new LinkedList<IMultiplayerPlayer>(); for (PlayerInfoPacket playerInfoPacket : playerInfoPackets) { players.add(new MultiplayerPlayer(playerInfoPacket)); } playersList.setList(players); } private ENetworkMessage getNetworkMessageById(NetworkConstants.ENetworkMessage errorMessageId) { switch (errorMessageId) { case INVALID_STATE_ERROR: return ENetworkMessage.INVALID_STATE_ERROR; case NO_LISTENER_FOUND: return ENetworkMessage.UNKNOWN_ERROR; case NOT_ALL_PLAYERS_READY: return ENetworkMessage.NOT_ALL_PLAYERS_READY; case PLAYER_JOINED: return ENetworkMessage.PLAYER_JOINED; case PLAYER_LEFT: return ENetworkMessage.PLAYER_LEFT; case UNAUTHORIZED: return ENetworkMessage.UNAUTHORIZED; case READY_STATE_CHANGED: return ENetworkMessage.READY_STATE_CHANGED; case UNKNOWN_ERROR: default: return ENetworkMessage.UNKNOWN_ERROR; } } void receiveSystemMessage(IMultiplayerPlayer author, ENetworkMessage networkMessage) { if (chatMessageListener != null) { chatMessageListener.systemMessageReceived(author, networkMessage); } } private IJoinPhaseMultiplayerGameConnector generateJoinPhaseGameConnector() { networkClient.registerRejectReceiver(new IPacketReceiver<RejectPacket>() { @Override public void receivePacket(RejectPacket packet) { receiveSystemMessage(null, getNetworkMessageById(packet.getErrorMessageId())); System.out.println("Received reject packet: rejectedKey: " + packet.getRejectedKey() + " messageid: " + packet.getErrorMessageId()); } }); return new IJoinPhaseMultiplayerGameConnector() { @Override public void startGame() { networkClient.startMatch(); } @Override public void setReady(boolean ready) { networkClient.setReadyState(ready); } @Override public void setMultiplayerListener(IMultiplayerListener multiplayerListener) { MultiplayerGame.this.multiplayerListener = multiplayerListener; } @Override public ChangingList<IMultiplayerPlayer> getPlayers() { return playersList; } @Override public void abort() { networkClient.leaveMatch(); } @Override public void setChatListener(IChatMessageListener chatMessageListener) { MultiplayerGame.this.chatMessageListener = chatMessageListener; } @Override public void sendChatMessage(String chatMessage) { networkClient.sendChatMessage(chatMessage); } }; } }
package de.bwaldvogel.liblinear; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.Closeable; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Reader; import java.io.Writer; import java.nio.charset.Charset; import java.util.Formatter; import java.util.Locale; import java.util.Random; import java.util.regex.Pattern; public class Linear { static final Charset FILE_CHARSET = Charset.forName("ISO-8859-1"); static final Locale DEFAULT_LOCALE = Locale.ENGLISH; private static Object OUTPUT_MUTEX = new Object(); private static PrintStream DEBUG_OUTPUT = System.out; private static final long DEFAULT_RANDOM_SEED = 0L; static Random random = new Random(DEFAULT_RANDOM_SEED); /** * @param target predicted classes */ public static void crossValidation(Problem prob, Parameter param, int nr_fold, double[] target) { int i; int l = prob.l; int[] perm = new int[l]; if (nr_fold > l) { nr_fold = l; System.err.println("WARNING: # folds > # data. Will use # folds = # data instead (i.e., leave-one-out cross validation)"); } int[] fold_start = new int[nr_fold + 1]; for (i = 0; i < l; i++) perm[i] = i; for (i = 0; i < l; i++) { int j = i + random.nextInt(l - i); swap(perm, i, j); } for (i = 0; i <= nr_fold; i++) fold_start[i] = i * l / nr_fold; for (i = 0; i < nr_fold; i++) { int begin = fold_start[i]; int end = fold_start[i + 1]; int j, k; Problem subprob = new Problem(); subprob.bias = prob.bias; subprob.n = prob.n; subprob.l = l - (end - begin); subprob.x = new Feature[subprob.l][]; subprob.y = new double[subprob.l]; k = 0; for (j = 0; j < begin; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } for (j = end; j < l; j++) { subprob.x[k] = prob.x[perm[j]]; subprob.y[k] = prob.y[perm[j]]; ++k; } Model submodel = train(subprob, param); for (j = begin; j < end; j++) target[perm[j]] = predict(submodel, prob.x[perm[j]]); } } /** used as complex return type */ private static class GroupClassesReturn { final int[] count; final int[] label; final int nr_class; final int[] start; GroupClassesReturn( int nr_class, int[] label, int[] start, int[] count ) { this.nr_class = nr_class; this.label = label; this.start = start; this.count = count; } } private static GroupClassesReturn groupClasses(Problem prob, int[] perm) { int l = prob.l; int max_nr_class = 16; int nr_class = 0; int[] label = new int[max_nr_class]; int[] count = new int[max_nr_class]; int[] data_label = new int[l]; int i; for (i = 0; i < l; i++) { int this_label = (int)prob.y[i]; int j; for (j = 0; j < nr_class; j++) { if (this_label == label[j]) { ++count[j]; break; } } data_label[i] = j; if (j == nr_class) { if (nr_class == max_nr_class) { max_nr_class *= 2; label = copyOf(label, max_nr_class); count = copyOf(count, max_nr_class); } label[nr_class] = this_label; count[nr_class] = 1; ++nr_class; } } // Labels are ordered by their first occurrence in the training set. // However, for two-class sets with -1/+1 labels and -1 appears first, // we swap labels to ensure that internally the binary SVM has positive data corresponding to the +1 instances. if (nr_class == 2 && label[0] == -1 && label[1] == 1) { swap(label, 0, 1); swap(count, 0, 1); for (i = 0; i < l; i++) { if (data_label[i] == 0) data_label[i] = 1; else data_label[i] = 0; } } int[] start = new int[nr_class]; start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; for (i = 0; i < l; i++) { perm[start[data_label[i]]] = i; ++start[data_label[i]]; } start[0] = 0; for (i = 1; i < nr_class; i++) start[i] = start[i - 1] + count[i - 1]; return new GroupClassesReturn(nr_class, label, start, count); } static void info(String message) { synchronized (OUTPUT_MUTEX) { if (DEBUG_OUTPUT == null) return; DEBUG_OUTPUT.printf(message); DEBUG_OUTPUT.flush(); } } static void info(String format, Object... args) { synchronized (OUTPUT_MUTEX) { if (DEBUG_OUTPUT == null) return; DEBUG_OUTPUT.printf(format, args); DEBUG_OUTPUT.flush(); } } static double atof(String s) { if (s == null || s.length() < 1) throw new IllegalArgumentException("Can't convert empty string to integer"); double d = Double.parseDouble(s); if (Double.isNaN(d) || Double.isInfinite(d)) { throw new IllegalArgumentException("NaN or Infinity in input: " + s); } return (d); } static int atoi(String s) throws NumberFormatException { if (s == null || s.length() < 1) throw new IllegalArgumentException("Can't convert empty string to integer"); // Integer.parseInt doesn't accept '+' prefixed strings if (s.charAt(0) == '+') s = s.substring(1); return Integer.parseInt(s); } /** * Java5 'backport' of Arrays.copyOf */ public static double[] copyOf(double[] original, int newLength) { double[] copy = new double[newLength]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, newLength)); return copy; } /** * Java5 'backport' of Arrays.copyOf */ public static int[] copyOf(int[] original, int newLength) { int[] copy = new int[newLength]; System.arraycopy(original, 0, copy, 0, Math.min(original.length, newLength)); return copy; } /** * Loads the model from inputReader. * It uses {@link java.util.Locale#ENGLISH} for number formatting. * * <p>Note: The inputReader is <b>NOT closed</b> after reading or in case of an exception.</p> */ public static Model loadModel(Reader inputReader) throws IOException { Model model = new Model(); model.label = null; Pattern whitespace = Pattern.compile("\\s+"); BufferedReader reader = null; if (inputReader instanceof BufferedReader) { reader = (BufferedReader)inputReader; } else { reader = new BufferedReader(inputReader); } String line = null; while ((line = reader.readLine()) != null) { String[] split = whitespace.split(line); if (split[0].equals("solver_type")) { SolverType solver = SolverType.valueOf(split[1]); if (solver == null) { throw new RuntimeException("unknown solver type"); } model.solverType = solver; } else if (split[0].equals("nr_class")) { model.nr_class = atoi(split[1]); Integer.parseInt(split[1]); } else if (split[0].equals("nr_feature")) { model.nr_feature = atoi(split[1]); } else if (split[0].equals("bias")) { model.bias = atof(split[1]); } else if (split[0].equals("w")) { break; } else if (split[0].equals("label")) { model.label = new int[model.nr_class]; for (int i = 0; i < model.nr_class; i++) { model.label[i] = atoi(split[i + 1]); } } else { throw new RuntimeException("unknown text in model file: [" + line + "]"); } } int w_size = model.nr_feature; if (model.bias >= 0) w_size++; int nr_w = model.nr_class; if (model.nr_class == 2 && model.solverType != SolverType.MCSVM_CS) nr_w = 1; model.w = new double[w_size * nr_w]; int[] buffer = new int[128]; for (int i = 0; i < w_size; i++) { for (int j = 0; j < nr_w; j++) { int b = 0; while (true) { int ch = reader.read(); if (ch == -1) { throw new EOFException("unexpected EOF"); } if (ch == ' ') { model.w[i * nr_w + j] = atof(new String(buffer, 0, b)); break; } else { buffer[b++] = ch; } } } } return model; } /** * Loads the model from the file with ISO-8859-1 charset. * It uses {@link java.util.Locale#ENGLISH} for number formatting. */ public static Model loadModel(File modelFile) throws IOException { BufferedReader inputReader = new BufferedReader(new InputStreamReader(new FileInputStream(modelFile), FILE_CHARSET)); try { return loadModel(inputReader); } finally { inputReader.close(); } } static void closeQuietly(Closeable c) { if (c == null) return; try { c.close(); } catch (Throwable t) {} } public static double predict(Model model, Feature[] x) { double[] dec_values = new double[model.nr_class]; return predictValues(model, x, dec_values); } public static double predictProbability(Model model, Feature[] x, double[] prob_estimates) throws IllegalArgumentException { if (!model.isProbabilityModel()) { StringBuilder sb = new StringBuilder("probability output is only supported for logistic regression"); sb.append(". This is currently only supported by the following solvers: "); int i = 0; for (SolverType solverType : SolverType.values()) { if (solverType.isLogisticRegressionSolver()) { if (i++ > 0) { sb.append(", "); } sb.append(solverType.name()); } } throw new IllegalArgumentException(sb.toString()); } int nr_class = model.nr_class; int nr_w; if (nr_class == 2) nr_w = 1; else nr_w = nr_class; double label = predictValues(model, x, prob_estimates); for (int i = 0; i < nr_w; i++) prob_estimates[i] = 1 / (1 + Math.exp(-prob_estimates[i])); if (nr_class == 2) // for binary classification prob_estimates[1] = 1. - prob_estimates[0]; else { double sum = 0; for (int i = 0; i < nr_class; i++) sum += prob_estimates[i]; for (int i = 0; i < nr_class; i++) prob_estimates[i] = prob_estimates[i] / sum; } return label; } public static double predictValues(Model model, Feature[] x, double[] dec_values) { int n; if (model.bias >= 0) n = model.nr_feature + 1; else n = model.nr_feature; double[] w = model.w; int nr_w; if (model.nr_class == 2 && model.solverType != SolverType.MCSVM_CS) nr_w = 1; else nr_w = model.nr_class; for (int i = 0; i < nr_w; i++) dec_values[i] = 0; for (Feature lx : x) { int idx = lx.getIndex(); // the dimension of testing data may exceed that of training if (idx <= n) { for (int i = 0; i < nr_w; i++) { dec_values[i] += w[(idx - 1) * nr_w + i] * lx.getValue(); } } } if (model.nr_class == 2) { if (model.solverType.isSupportVectorRegression()) return dec_values[0]; else return (dec_values[0] > 0) ? model.label[0] : model.label[1]; } else { int dec_max_idx = 0; for (int i = 1; i < model.nr_class; i++) { if (dec_values[i] > dec_values[dec_max_idx]) dec_max_idx = i; } return model.label[dec_max_idx]; } } static void printf(Formatter formatter, String format, Object... args) throws IOException { formatter.format(format, args); IOException ioException = formatter.ioException(); if (ioException != null) throw ioException; } /** * Writes the model to the modelOutput. * It uses {@link java.util.Locale#ENGLISH} for number formatting. * * <p><b>Note: The modelOutput is closed after reading or in case of an exception.</b></p> */ public static void saveModel(Writer modelOutput, Model model) throws IOException { int nr_feature = model.nr_feature; int w_size = nr_feature; if (model.bias >= 0) w_size++; int nr_w = model.nr_class; if (model.nr_class == 2 && model.solverType != SolverType.MCSVM_CS) nr_w = 1; Formatter formatter = new Formatter(modelOutput, DEFAULT_LOCALE); try { printf(formatter, "solver_type %s\n", model.solverType.name()); printf(formatter, "nr_class %d\n", model.nr_class); if (model.label != null) { printf(formatter, "label"); for (int i = 0; i < model.nr_class; i++) { printf(formatter, " %d", model.label[i]); } printf(formatter, "\n"); } printf(formatter, "nr_feature %d\n", nr_feature); printf(formatter, "bias %.16g\n", model.bias); printf(formatter, "w\n"); for (int i = 0; i < w_size; i++) { for (int j = 0; j < nr_w; j++) { double value = model.w[i * nr_w + j]; /** this optimization is the reason for {@link Model#equals(double[], double[])} */ if (value == 0.0) { printf(formatter, "%d ", 0); } else { printf(formatter, "%.16g ", value); } } printf(formatter, "\n"); } formatter.flush(); IOException ioException = formatter.ioException(); if (ioException != null) throw ioException; } finally { formatter.close(); } } /** * Writes the model to the file with ISO-8859-1 charset. * It uses {@link java.util.Locale#ENGLISH} for number formatting. */ public static void saveModel(File modelFile, Model model) throws IOException { BufferedWriter modelOutput = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(modelFile), FILE_CHARSET)); saveModel(modelOutput, model); } private static int GETI(byte[] y, int i) { return y[i] + 1; } /** * A coordinate descent algorithm for * L1-loss and L2-loss SVM dual problems *<pre> * min_\alpha 0.5(\alpha^T (Q + D)\alpha) - e^T \alpha, * s.t. 0 <= \alpha_i <= upper_bound_i, * * where Qij = yi yj xi^T xj and * D is a diagonal matrix * * In L1-SVM case: * upper_bound_i = Cp if y_i = 1 * upper_bound_i = Cn if y_i = -1 * D_ii = 0 * In L2-SVM case: * upper_bound_i = INF * D_ii = 1/(2*Cp) if y_i = 1 * D_ii = 1/(2*Cn) if y_i = -1 * * Given: * x, y, Cp, Cn * eps is the stopping tolerance * * solution will be put in w * * See Algorithm 3 of Hsieh et al., ICML 2008 *</pre> */ private static void solve_l2r_l1l2_svc(Problem prob, double[] w, double eps, double Cp, double Cn, SolverType solver_type) { int l = prob.l; int w_size = prob.n; int i, s, iter = 0; double C, d, G; double[] QD = new double[l]; int max_iter = 1000; int[] index = new int[l]; double[] alpha = new double[l]; byte[] y = new byte[l]; int active_size = l; // PG: projected gradient, for shrinking and stopping double PG; double PGmax_old = Double.POSITIVE_INFINITY; double PGmin_old = Double.NEGATIVE_INFINITY; double PGmax_new, PGmin_new; // default solver_type: L2R_L2LOSS_SVC_DUAL double diag[] = new double[] {0.5 / Cn, 0, 0.5 / Cp}; double upper_bound[] = new double[] {Double.POSITIVE_INFINITY, 0, Double.POSITIVE_INFINITY}; if (solver_type == SolverType.L2R_L1LOSS_SVC_DUAL) { diag[0] = 0; diag[2] = 0; upper_bound[0] = Cn; upper_bound[2] = Cp; } for (i = 0; i < l; i++) { if (prob.y[i] > 0) { y[i] = +1; } else { y[i] = -1; } } // Initial alpha can be set here. Note that // 0 <= alpha[i] <= upper_bound[GETI(i)] for (i = 0; i < l; i++) alpha[i] = 0; for (i = 0; i < w_size; i++) w[i] = 0; for (i = 0; i < l; i++) { QD[i] = diag[GETI(y, i)]; for (Feature xi : prob.x[i]) { double val = xi.getValue(); QD[i] += val * val; w[xi.getIndex() - 1] += y[i] * alpha[i] * val; } index[i] = i; } while (iter < max_iter) { PGmax_new = Double.NEGATIVE_INFINITY; PGmin_new = Double.POSITIVE_INFINITY; for (i = 0; i < active_size; i++) { int j = i + random.nextInt(active_size - i); swap(index, i, j); } for (s = 0; s < active_size; s++) { i = index[s]; G = 0; byte yi = y[i]; for (Feature xi : prob.x[i]) { G += w[xi.getIndex() - 1] * xi.getValue(); } G = G * yi - 1; C = upper_bound[GETI(y, i)]; G += alpha[i] * diag[GETI(y, i)]; PG = 0; if (alpha[i] == 0) { if (G > PGmax_old) { active_size swap(index, s, active_size); s continue; } else if (G < 0) { PG = G; } } else if (alpha[i] == C) { if (G < PGmin_old) { active_size swap(index, s, active_size); s continue; } else if (G > 0) { PG = G; } } else { PG = G; } PGmax_new = Math.max(PGmax_new, PG); PGmin_new = Math.min(PGmin_new, PG); if (Math.abs(PG) > 1.0e-12) { double alpha_old = alpha[i]; alpha[i] = Math.min(Math.max(alpha[i] - G / QD[i], 0.0), C); d = (alpha[i] - alpha_old) * yi; for (Feature xi : prob.x[i]) { w[xi.getIndex() - 1] += d * xi.getValue(); } } } iter++; if (iter % 10 == 0) info("."); if (PGmax_new - PGmin_new <= eps) { if (active_size == l) break; else { active_size = l; info("*"); PGmax_old = Double.POSITIVE_INFINITY; PGmin_old = Double.NEGATIVE_INFINITY; continue; } } PGmax_old = PGmax_new; PGmin_old = PGmin_new; if (PGmax_old <= 0) PGmax_old = Double.POSITIVE_INFINITY; if (PGmin_old >= 0) PGmin_old = Double.NEGATIVE_INFINITY; } info("%noptimization finished, #iter = %d%n", iter); if (iter >= max_iter) info("%nWARNING: reaching max number of iterations%nUsing -s 2 may be faster (also see FAQ)%n%n"); // calculate objective value double v = 0; int nSV = 0; for (i = 0; i < w_size; i++) v += w[i] * w[i]; for (i = 0; i < l; i++) { v += alpha[i] * (alpha[i] * diag[GETI(y, i)] - 2); if (alpha[i] > 0) ++nSV; } info("Objective value = %g%n", v / 2); info("nSV = %d%n", nSV); } // To support weights for instances, use GETI(i) (i) private static int GETI_SVR(int i) { return 0; } /** * A coordinate descent algorithm for * L1-loss and L2-loss epsilon-SVR dual problem * * min_\beta 0.5\beta^T (Q + diag(lambda)) \beta - p \sum_{i=1}^l|\beta_i| + \sum_{i=1}^l yi\beta_i, * s.t. -upper_bound_i <= \beta_i <= upper_bound_i, * * where Qij = xi^T xj and * D is a diagonal matrix * * In L1-SVM case: * upper_bound_i = C * lambda_i = 0 * In L2-SVM case: * upper_bound_i = INF * lambda_i = 1/(2*C) * * Given: * x, y, p, C * eps is the stopping tolerance * * solution will be put in w * * See Algorithm 4 of Ho and Lin, 2012 */ private static void solve_l2r_l1l2_svr(Problem prob, double[] w, Parameter param) { int l = prob.l; double C = param.C; double p = param.p; int w_size = prob.n; double eps = param.eps; int i, s, iter = 0; int max_iter = param.getMaxIters(); int active_size = l; int[] index = new int[l]; double d, G, H; double Gmax_old = Double.POSITIVE_INFINITY; double Gmax_new, Gnorm1_new; double Gnorm1_init= -1.0; // Gnorm1_init is initialized at the first iteration double[] beta = new double[l]; double[] QD = new double[l]; double[] y = prob.y; // L2R_L2LOSS_SVR_DUAL double[] lambda = new double[] {0.5 / C}; double[] upper_bound = new double[] {Double.POSITIVE_INFINITY}; if (param.solverType == SolverType.L2R_L1LOSS_SVR_DUAL) { lambda[0] = 0; upper_bound[0] = C; } // Initial beta can be set here. Note that // -upper_bound <= beta[i] <= upper_bound for (i = 0; i < l; i++) beta[i] = 0; for (i = 0; i < w_size; i++) w[i] = 0; for (i = 0; i < l; i++) { QD[i] = 0; for (Feature xi : prob.x[i]) { double val = xi.getValue(); QD[i] += val * val; w[xi.getIndex() - 1] += beta[i] * val; } index[i] = i; } while (iter < max_iter) { Gmax_new = 0; Gnorm1_new = 0; for (i = 0; i < active_size; i++) { int j = i + random.nextInt(active_size - i); swap(index, i, j); } for (s = 0; s < active_size; s++) { i = index[s]; G = -y[i] + lambda[GETI_SVR(i)] * beta[i]; H = QD[i] + lambda[GETI_SVR(i)]; for (Feature xi : prob.x[i]) { int ind = xi.getIndex() - 1; double val = xi.getValue(); G += val * w[ind]; } double Gp = G + p; double Gn = G - p; double violation = 0; if (beta[i] == 0) { if (Gp < 0) violation = -Gp; else if (Gn > 0) violation = Gn; else if (Gp > Gmax_old && Gn < -Gmax_old) { active_size swap(index, s, active_size); s continue; } } else if (beta[i] >= upper_bound[GETI_SVR(i)]) { if (Gp > 0) violation = Gp; else if (Gp < -Gmax_old) { active_size swap(index, s, active_size); s continue; } } else if (beta[i] <= -upper_bound[GETI_SVR(i)]) { if (Gn < 0) violation = -Gn; else if (Gn > Gmax_old) { active_size swap(index, s, active_size); s continue; } } else if (beta[i] > 0) violation = Math.abs(Gp); else violation = Math.abs(Gn); Gmax_new = Math.max(Gmax_new, violation); Gnorm1_new += violation; // obtain Newton direction d if (Gp < H * beta[i]) d = -Gp / H; else if (Gn > H * beta[i]) d = -Gn / H; else d = -beta[i]; if (Math.abs(d) < 1.0e-12) continue; double beta_old = beta[i]; beta[i] = Math.min(Math.max(beta[i] + d, -upper_bound[GETI_SVR(i)]), upper_bound[GETI_SVR(i)]); d = beta[i] - beta_old; if (d != 0) { for (Feature xi : prob.x[i]) { w[xi.getIndex() - 1] += d * xi.getValue(); } } } if (iter == 0) Gnorm1_init = Gnorm1_new; iter++; if (iter % 10 == 0) info("."); if (Gnorm1_new <= eps * Gnorm1_init) { if (active_size == l) break; else { active_size = l; info("*"); Gmax_old = Double.POSITIVE_INFINITY; continue; } } Gmax_old = Gmax_new; } info("%noptimization finished, #iter = %d%n", iter); if (iter >= max_iter) info("%nWARNING: reaching max number of iterations%nUsing -s 11 may be faster%n%n"); // calculate objective value double v = 0; int nSV = 0; for (i = 0; i < w_size; i++) v += w[i] * w[i]; v = 0.5 * v; for (i = 0; i < l; i++) { v += p * Math.abs(beta[i]) - y[i] * beta[i] + 0.5 * lambda[GETI_SVR(i)] * beta[i] * beta[i]; if (beta[i] != 0) nSV++; } info("Objective value = %g%n", v); info("nSV = %d%n", nSV); } /** * A coordinate descent algorithm for * the dual of L2-regularized logistic regression problems *<pre> * min_\alpha 0.5(\alpha^T Q \alpha) + \sum \alpha_i log (\alpha_i) + (upper_bound_i - \alpha_i) log (upper_bound_i - \alpha_i) , * s.t. 0 <= \alpha_i <= upper_bound_i, * * where Qij = yi yj xi^T xj and * upper_bound_i = Cp if y_i = 1 * upper_bound_i = Cn if y_i = -1 * * Given: * x, y, Cp, Cn * eps is the stopping tolerance * * solution will be put in w * * See Algorithm 5 of Yu et al., MLJ 2010 *</pre> * * @since 1.7 */ private static void solve_l2r_lr_dual(Problem prob, double w[], double eps, double Cp, double Cn) { int l = prob.l; int w_size = prob.n; int i, s, iter = 0; double xTx[] = new double[l]; int max_iter = 1000; int index[] = new int[l]; double alpha[] = new double[2 * l]; // store alpha and C - alpha byte y[] = new byte[l]; int max_inner_iter = 100; // for inner Newton double innereps = 1e-2; double innereps_min = Math.min(1e-8, eps); double upper_bound[] = new double[] {Cn, 0, Cp}; for (i = 0; i < l; i++) { if (prob.y[i] > 0) { y[i] = +1; } else { y[i] = -1; } } // Initial alpha can be set here. Note that // 0 < alpha[i] < upper_bound[GETI(i)] // alpha[2*i] + alpha[2*i+1] = upper_bound[GETI(i)] for (i = 0; i < l; i++) { alpha[2 * i] = Math.min(0.001 * upper_bound[GETI(y, i)], 1e-8); alpha[2 * i + 1] = upper_bound[GETI(y, i)] - alpha[2 * i]; } for (i = 0; i < w_size; i++) w[i] = 0; for (i = 0; i < l; i++) { xTx[i] = 0; for (Feature xi : prob.x[i]) { double val = xi.getValue(); xTx[i] += val * val; w[xi.getIndex() - 1] += y[i] * alpha[2 * i] * val; } index[i] = i; } while (iter < max_iter) { for (i = 0; i < l; i++) { int j = i + random.nextInt(l - i); swap(index, i, j); } int newton_iter = 0; double Gmax = 0; for (s = 0; s < l; s++) { i = index[s]; byte yi = y[i]; double C = upper_bound[GETI(y, i)]; double ywTx = 0, xisq = xTx[i]; for (Feature xi : prob.x[i]) { ywTx += w[xi.getIndex() - 1] * xi.getValue(); } ywTx *= y[i]; double a = xisq, b = ywTx; // Decide to minimize g_1(z) or g_2(z) int ind1 = 2 * i, ind2 = 2 * i + 1, sign = 1; if (0.5 * a * (alpha[ind2] - alpha[ind1]) + b < 0) { ind1 = 2 * i + 1; ind2 = 2 * i; sign = -1; } // g_t(z) = z*log(z) + (C-z)*log(C-z) + 0.5a(z-alpha_old)^2 + sign*b(z-alpha_old) double alpha_old = alpha[ind1]; double z = alpha_old; if (C - z < 0.5 * C) z = 0.1 * z; double gp = a * (z - alpha_old) + sign * b + Math.log(z / (C - z)); Gmax = Math.max(Gmax, Math.abs(gp)); // Newton method on the sub-problem final double eta = 0.1; // xi in the paper int inner_iter = 0; while (inner_iter <= max_inner_iter) { if (Math.abs(gp) < innereps) break; double gpp = a + C / (C - z) / z; double tmpz = z - gp / gpp; if (tmpz <= 0) z *= eta; else // tmpz in (0, C) z = tmpz; gp = a * (z - alpha_old) + sign * b + Math.log(z / (C - z)); newton_iter++; inner_iter++; } if (inner_iter > 0) // update w { alpha[ind1] = z; alpha[ind2] = C - z; for (Feature xi : prob.x[i]) { w[xi.getIndex() - 1] += sign * (z - alpha_old) * yi * xi.getValue(); } } } iter++; if (iter % 10 == 0) info("."); if (Gmax < eps) break; if (newton_iter <= l / 10) { innereps = Math.max(innereps_min, 0.1 * innereps); } } info("%noptimization finished, #iter = %d%n", iter); if (iter >= max_iter) info("%nWARNING: reaching max number of iterations%nUsing -s 0 may be faster (also see FAQ)%n%n"); // calculate objective value double v = 0; for (i = 0; i < w_size; i++) v += w[i] * w[i]; v *= 0.5; for (i = 0; i < l; i++) v += alpha[2 * i] * Math.log(alpha[2 * i]) + alpha[2 * i + 1] * Math.log(alpha[2 * i + 1]) - upper_bound[GETI(y, i)] * Math.log(upper_bound[GETI(y, i)]); info("Objective value = %g%n", v); } /** * A coordinate descent algorithm for * L1-regularized L2-loss support vector classification * *<pre> * min_w \sum |wj| + C \sum max(0, 1-yi w^T xi)^2, * * Given: * x, y, Cp, Cn * eps is the stopping tolerance * * solution will be put in w * * See Yuan et al. (2010) and appendix of LIBLINEAR paper, Fan et al. (2008) *</pre> * * @since 1.5 */ private static void solve_l1r_l2_svc(Problem prob_col, double[] w, double eps, double Cp, double Cn) { int l = prob_col.l; int w_size = prob_col.n; int j, s, iter = 0; int max_iter = 1000; int active_size = w_size; int max_num_linesearch = 20; double sigma = 0.01; double d, G_loss, G, H; double Gmax_old = Double.POSITIVE_INFINITY; double Gmax_new, Gnorm1_new; double Gnorm1_init = -1.0; // Gnorm1_init is initialized at the first iteration double d_old, d_diff; double loss_old = 0; // eclipse moans this variable might not be initialized double loss_new; double appxcond, cond; int[] index = new int[w_size]; byte[] y = new byte[l]; double[] b = new double[l]; // b = 1-ywTx double[] xj_sq = new double[w_size]; double[] C = new double[] {Cn, 0, Cp}; // Initial w can be set here. for (j = 0; j < w_size; j++) w[j] = 0; for (j = 0; j < l; j++) { b[j] = 1; if (prob_col.y[j] > 0) y[j] = 1; else y[j] = -1; } for (j = 0; j < w_size; j++) { index[j] = j; xj_sq[j] = 0; for (Feature xi : prob_col.x[j]) { int ind = xi.getIndex() - 1; xi.setValue(xi.getValue() * y[ind]); // x->value stores yi*xij double val = xi.getValue(); b[ind] -= w[j] * val; xj_sq[j] += C[GETI(y, ind)] * val * val; } } while (iter < max_iter) { Gmax_new = 0; Gnorm1_new = 0; for (j = 0; j < active_size; j++) { int i = j + random.nextInt(active_size - j); swap(index, i, j); } for (s = 0; s < active_size; s++) { j = index[s]; G_loss = 0; H = 0; for (Feature xi : prob_col.x[j]) { int ind = xi.getIndex() - 1; if (b[ind] > 0) { double val = xi.getValue(); double tmp = C[GETI(y, ind)] * val; G_loss -= tmp * b[ind]; H += tmp * val; } } G_loss *= 2; G = G_loss; H *= 2; H = Math.max(H, 1e-12); double Gp = G + 1; double Gn = G - 1; double violation = 0; if (w[j] == 0) { if (Gp < 0) violation = -Gp; else if (Gn > 0) violation = Gn; else if (Gp > Gmax_old / l && Gn < -Gmax_old / l) { active_size swap(index, s, active_size); s continue; } } else if (w[j] > 0) violation = Math.abs(Gp); else violation = Math.abs(Gn); Gmax_new = Math.max(Gmax_new, violation); Gnorm1_new += violation; // obtain Newton direction d if (Gp < H * w[j]) d = -Gp / H; else if (Gn > H * w[j]) d = -Gn / H; else d = -w[j]; if (Math.abs(d) < 1.0e-12) continue; double delta = Math.abs(w[j] + d) - Math.abs(w[j]) + G * d; d_old = 0; int num_linesearch; for (num_linesearch = 0; num_linesearch < max_num_linesearch; num_linesearch++) { d_diff = d_old - d; cond = Math.abs(w[j] + d) - Math.abs(w[j]) - sigma * delta; appxcond = xj_sq[j] * d * d + G_loss * d + cond; if (appxcond <= 0) { for (Feature x : prob_col.x[j]) { b[x.getIndex() - 1] += d_diff * x.getValue(); } break; } if (num_linesearch == 0) { loss_old = 0; loss_new = 0; for (Feature x : prob_col.x[j]) { int ind = x.getIndex() - 1; if (b[ind] > 0) { loss_old += C[GETI(y, ind)] * b[ind] * b[ind]; } double b_new = b[ind] + d_diff * x.getValue(); b[ind] = b_new; if (b_new > 0) { loss_new += C[GETI(y, ind)] * b_new * b_new; } } } else { loss_new = 0; for (Feature x : prob_col.x[j]) { int ind = x.getIndex() - 1; double b_new = b[ind] + d_diff * x.getValue(); b[ind] = b_new; if (b_new > 0) { loss_new += C[GETI(y, ind)] * b_new * b_new; } } } cond = cond + loss_new - loss_old; if (cond <= 0) break; else { d_old = d; d *= 0.5; delta *= 0.5; } } w[j] += d; // recompute b[] if line search takes too many steps if (num_linesearch >= max_num_linesearch) { info(" for (int i = 0; i < l; i++) b[i] = 1; for (int i = 0; i < w_size; i++) { if (w[i] == 0) continue; for (Feature x : prob_col.x[i]) { b[x.getIndex() - 1] -= w[i] * x.getValue(); } } } } if (iter == 0) { Gnorm1_init = Gnorm1_new; } iter++; if (iter % 10 == 0) info("."); if (Gmax_new <= eps * Gnorm1_init) { if (active_size == w_size) break; else { active_size = w_size; info("*"); Gmax_old = Double.POSITIVE_INFINITY; continue; } } Gmax_old = Gmax_new; } info("%noptimization finished, #iter = %d%n", iter); if (iter >= max_iter) info("%nWARNING: reaching max number of iterations%n"); // calculate objective value double v = 0; int nnz = 0; for (j = 0; j < w_size; j++) { for (Feature x : prob_col.x[j]) { x.setValue(x.getValue() * prob_col.y[x.getIndex() - 1]); // restore x->value } if (w[j] != 0) { v += Math.abs(w[j]); nnz++; } } for (j = 0; j < l; j++) if (b[j] > 0) v += C[GETI(y, j)] * b[j] * b[j]; info("Objective value = %g%n", v); info("#nonzeros/#features = %d/%d%n", nnz, w_size); } /** * A coordinate descent algorithm for * L1-regularized logistic regression problems * *<pre> * min_w \sum |wj| + C \sum log(1+exp(-yi w^T xi)), * * Given: * x, y, Cp, Cn * eps is the stopping tolerance * * solution will be put in w * * See Yuan et al. (2011) and appendix of LIBLINEAR paper, Fan et al. (2008) *</pre> * * @since 1.5 */ private static void solve_l1r_lr(Problem prob_col, double[] w, double eps, double Cp, double Cn) { int l = prob_col.l; int w_size = prob_col.n; int j, s, newton_iter = 0, iter = 0; int max_newton_iter = 100; int max_iter = 1000; int max_num_linesearch = 20; int active_size; int QP_active_size; double nu = 1e-12; double inner_eps = 1; double sigma = 0.01; double w_norm, w_norm_new; double z, G, H; double Gnorm1_init = -1.0; // Gnorm1_init is initialized at the first iteration double Gmax_old = Double.POSITIVE_INFINITY; double Gmax_new, Gnorm1_new; double QP_Gmax_old = Double.POSITIVE_INFINITY; double QP_Gmax_new, QP_Gnorm1_new; double delta, negsum_xTd, cond; int[] index = new int[w_size]; byte[] y = new byte[l]; double[] Hdiag = new double[w_size]; double[] Grad = new double[w_size]; double[] wpd = new double[w_size]; double[] xjneg_sum = new double[w_size]; double[] xTd = new double[l]; double[] exp_wTx = new double[l]; double[] exp_wTx_new = new double[l]; double[] tau = new double[l]; double[] D = new double[l]; double[] C = {Cn, 0, Cp}; // Initial w can be set here. for (j = 0; j < w_size; j++) w[j] = 0; for (j = 0; j < l; j++) { if (prob_col.y[j] > 0) y[j] = 1; else y[j] = -1; exp_wTx[j] = 0; } w_norm = 0; for (j = 0; j < w_size; j++) { w_norm += Math.abs(w[j]); wpd[j] = w[j]; index[j] = j; xjneg_sum[j] = 0; for (Feature x : prob_col.x[j]) { int ind = x.getIndex() - 1; double val = x.getValue(); exp_wTx[ind] += w[j] * val; if (y[ind] == -1) { xjneg_sum[j] += C[GETI(y, ind)] * val; } } } for (j = 0; j < l; j++) { exp_wTx[j] = Math.exp(exp_wTx[j]); double tau_tmp = 1 / (1 + exp_wTx[j]); tau[j] = C[GETI(y, j)] * tau_tmp; D[j] = C[GETI(y, j)] * exp_wTx[j] * tau_tmp * tau_tmp; } while (newton_iter < max_newton_iter) { Gmax_new = 0; Gnorm1_new = 0; active_size = w_size; for (s = 0; s < active_size; s++) { j = index[s]; Hdiag[j] = nu; Grad[j] = 0; double tmp = 0; for (Feature x : prob_col.x[j]) { int ind = x.getIndex() - 1; Hdiag[j] += x.getValue() * x.getValue() * D[ind]; tmp += x.getValue() * tau[ind]; } Grad[j] = -tmp + xjneg_sum[j]; double Gp = Grad[j] + 1; double Gn = Grad[j] - 1; double violation = 0; if (w[j] == 0) { if (Gp < 0) violation = -Gp; else if (Gn > 0) violation = Gn; //outer-level shrinking else if (Gp > Gmax_old / l && Gn < -Gmax_old / l) { active_size swap(index, s, active_size); s continue; } } else if (w[j] > 0) violation = Math.abs(Gp); else violation = Math.abs(Gn); Gmax_new = Math.max(Gmax_new, violation); Gnorm1_new += violation; } if (newton_iter == 0) Gnorm1_init = Gnorm1_new; if (Gnorm1_new <= eps * Gnorm1_init) break; iter = 0; QP_Gmax_old = Double.POSITIVE_INFINITY; QP_active_size = active_size; for (int i = 0; i < l; i++) xTd[i] = 0; // optimize QP over wpd while (iter < max_iter) { QP_Gmax_new = 0; QP_Gnorm1_new = 0; for (j = 0; j < QP_active_size; j++) { int i = random.nextInt(QP_active_size - j); swap(index, i, j); } for (s = 0; s < QP_active_size; s++) { j = index[s]; H = Hdiag[j]; G = Grad[j] + (wpd[j] - w[j]) * nu; for (Feature x : prob_col.x[j]) { int ind = x.getIndex() - 1; G += x.getValue() * D[ind] * xTd[ind]; } double Gp = G + 1; double Gn = G - 1; double violation = 0; if (wpd[j] == 0) { if (Gp < 0) violation = -Gp; else if (Gn > 0) violation = Gn; //inner-level shrinking else if (Gp > QP_Gmax_old / l && Gn < -QP_Gmax_old / l) { QP_active_size swap(index, s, QP_active_size); s continue; } } else if (wpd[j] > 0) violation = Math.abs(Gp); else violation = Math.abs(Gn); QP_Gmax_new = Math.max(QP_Gmax_new, violation); QP_Gnorm1_new += violation; // obtain solution of one-variable problem if (Gp < H * wpd[j]) z = -Gp / H; else if (Gn > H * wpd[j]) z = -Gn / H; else z = -wpd[j]; if (Math.abs(z) < 1.0e-12) continue; z = Math.min(Math.max(z, -10.0), 10.0); wpd[j] += z; for (Feature x : prob_col.x[j]) { int ind = x.getIndex() - 1; xTd[ind] += x.getValue() * z; } } iter++; if (QP_Gnorm1_new <= inner_eps * Gnorm1_init) { //inner stopping if (QP_active_size == active_size) break; //active set reactivation else { QP_active_size = active_size; QP_Gmax_old = Double.POSITIVE_INFINITY; continue; } } QP_Gmax_old = QP_Gmax_new; } if (iter >= max_iter) info("WARNING: reaching max number of inner iterations%n"); delta = 0; w_norm_new = 0; for (j = 0; j < w_size; j++) { delta += Grad[j] * (wpd[j] - w[j]); if (wpd[j] != 0) w_norm_new += Math.abs(wpd[j]); } delta += (w_norm_new - w_norm); negsum_xTd = 0; for (int i = 0; i < l; i++) if (y[i] == -1) negsum_xTd += C[GETI(y, i)] * xTd[i]; int num_linesearch; for (num_linesearch = 0; num_linesearch < max_num_linesearch; num_linesearch++) { cond = w_norm_new - w_norm + negsum_xTd - sigma * delta; for (int i = 0; i < l; i++) { double exp_xTd = Math.exp(xTd[i]); exp_wTx_new[i] = exp_wTx[i] * exp_xTd; cond += C[GETI(y, i)] * Math.log((1 + exp_wTx_new[i]) / (exp_xTd + exp_wTx_new[i])); } if (cond <= 0) { w_norm = w_norm_new; for (j = 0; j < w_size; j++) w[j] = wpd[j]; for (int i = 0; i < l; i++) { exp_wTx[i] = exp_wTx_new[i]; double tau_tmp = 1 / (1 + exp_wTx[i]); tau[i] = C[GETI(y, i)] * tau_tmp; D[i] = C[GETI(y, i)] * exp_wTx[i] * tau_tmp * tau_tmp; } break; } else { w_norm_new = 0; for (j = 0; j < w_size; j++) { wpd[j] = (w[j] + wpd[j]) * 0.5; if (wpd[j] != 0) w_norm_new += Math.abs(wpd[j]); } delta *= 0.5; negsum_xTd *= 0.5; for (int i = 0; i < l; i++) xTd[i] *= 0.5; } } // Recompute some info due to too many line search steps if (num_linesearch >= max_num_linesearch) { for (int i = 0; i < l; i++) exp_wTx[i] = 0; for (int i = 0; i < w_size; i++) { if (w[i] == 0) continue; for (Feature x : prob_col.x[i]) { exp_wTx[x.getIndex() - 1] += w[i] * x.getValue(); } } for (int i = 0; i < l; i++) exp_wTx[i] = Math.exp(exp_wTx[i]); } if (iter == 1) inner_eps *= 0.25; newton_iter++; Gmax_old = Gmax_new; info("iter %3d #CD cycles %d%n", newton_iter, iter); } info("=========================%n"); info("optimization finished, #iter = %d%n", newton_iter); if (newton_iter >= max_newton_iter) info("WARNING: reaching max number of iterations%n"); // calculate objective value double v = 0; int nnz = 0; for (j = 0; j < w_size; j++) if (w[j] != 0) { v += Math.abs(w[j]); nnz++; } for (j = 0; j < l; j++) if (y[j] == 1) v += C[GETI(y, j)] * Math.log(1 + 1 / exp_wTx[j]); else v += C[GETI(y, j)] * Math.log(1 + exp_wTx[j]); info("Objective value = %g%n", v); info("#nonzeros/#features = %d/%d%n", nnz, w_size); } // transpose matrix X from row format to column format static Problem transpose(Problem prob) { int l = prob.l; int n = prob.n; int[] col_ptr = new int[n + 1]; Problem prob_col = new Problem(); prob_col.l = l; prob_col.n = n; prob_col.y = new double[l]; prob_col.x = new Feature[n][]; for (int i = 0; i < l; i++) prob_col.y[i] = prob.y[i]; for (int i = 0; i < l; i++) { for (Feature x : prob.x[i]) { col_ptr[x.getIndex()]++; } } for (int i = 0; i < n; i++) { prob_col.x[i] = new Feature[col_ptr[i + 1]]; col_ptr[i] = 0; // reuse the array to count the nr of elements } for (int i = 0; i < l; i++) { for (int j = 0; j < prob.x[i].length; j++) { Feature x = prob.x[i][j]; int index = x.getIndex() - 1; prob_col.x[index][col_ptr[index]] = new FeatureNode(i + 1, x.getValue()); col_ptr[index]++; } } return prob_col; } static void swap(double[] array, int idxA, int idxB) { double temp = array[idxA]; array[idxA] = array[idxB]; array[idxB] = temp; } static void swap(int[] array, int idxA, int idxB) { int temp = array[idxA]; array[idxA] = array[idxB]; array[idxB] = temp; } static void swap(IntArrayPointer array, int idxA, int idxB) { int temp = array.get(idxA); array.set(idxA, array.get(idxB)); array.set(idxB, temp); } public static Model train(Problem prob, Parameter param) { if (prob == null) throw new IllegalArgumentException("problem must not be null"); if (param == null) throw new IllegalArgumentException("parameter must not be null"); if (prob.n == 0) throw new IllegalArgumentException("problem has zero features"); if (prob.l == 0) throw new IllegalArgumentException("problem has zero instances"); for (Feature[] nodes : prob.x) { int indexBefore = 0; for (Feature n : nodes) { if (n.getIndex() <= indexBefore) { throw new IllegalArgumentException("feature nodes must be sorted by index in ascending order"); } indexBefore = n.getIndex(); } } int l = prob.l; int n = prob.n; int w_size = prob.n; Model model = new Model(); if (prob.bias >= 0) model.nr_feature = n - 1; else model.nr_feature = n; model.solverType = param.solverType; model.bias = prob.bias; if (param.solverType.isSupportVectorRegression()) { model.w = new double[w_size]; model.nr_class = 2; model.label = null; checkProblemSize(n, model.nr_class); train_one(prob, param, model.w, 0, 0); } else { int[] perm = new int[l]; // group training data of the same class GroupClassesReturn rv = groupClasses(prob, perm); int nr_class = rv.nr_class; int[] label = rv.label; int[] start = rv.start; int[] count = rv.count; checkProblemSize(n, nr_class); model.nr_class = nr_class; model.label = new int[nr_class]; for (int i = 0; i < nr_class; i++) model.label[i] = label[i]; // calculate weighted C double[] weighted_C = new double[nr_class]; for (int i = 0; i < nr_class; i++) weighted_C[i] = param.C; for (int i = 0; i < param.getNumWeights(); i++) { int j; for (j = 0; j < nr_class; j++) if (param.weightLabel[i] == label[j]) break; if (j == nr_class) throw new IllegalArgumentException("class label " + param.weightLabel[i] + " specified in weight is not found"); weighted_C[j] *= param.weight[i]; } // constructing the subproblem Feature[][] x = new Feature[l][]; for (int i = 0; i < l; i++) x[i] = prob.x[perm[i]]; Problem sub_prob = new Problem(); sub_prob.l = l; sub_prob.n = n; sub_prob.x = new Feature[sub_prob.l][]; sub_prob.y = new double[sub_prob.l]; for (int k = 0; k < sub_prob.l; k++) sub_prob.x[k] = x[k]; // multi-class svm by Crammer and Singer if (param.solverType == SolverType.MCSVM_CS) { model.w = new double[n * nr_class]; for (int i = 0; i < nr_class; i++) { for (int j = start[i]; j < start[i] + count[i]; j++) { sub_prob.y[j] = i; } } SolverMCSVM_CS solver = new SolverMCSVM_CS(sub_prob, nr_class, weighted_C, param.eps); solver.solve(model.w); } else { if (nr_class == 2) { model.w = new double[w_size]; int e0 = start[0] + count[0]; int k = 0; for (; k < e0; k++) sub_prob.y[k] = +1; for (; k < sub_prob.l; k++) sub_prob.y[k] = -1; train_one(sub_prob, param, model.w, weighted_C[0], weighted_C[1]); } else { model.w = new double[w_size * nr_class]; double[] w = new double[w_size]; for (int i = 0; i < nr_class; i++) { int si = start[i]; int ei = si + count[i]; int k = 0; for (; k < si; k++) sub_prob.y[k] = -1; for (; k < ei; k++) sub_prob.y[k] = +1; for (; k < sub_prob.l; k++) sub_prob.y[k] = -1; train_one(sub_prob, param, w, weighted_C[i], param.C); for (int j = 0; j < n; j++) model.w[j * nr_class + i] = w[j]; } } } } return model; } /** * verify the size and throw an exception early if the problem is too large */ private static void checkProblemSize(int n, int nr_class) { if (n >= Integer.MAX_VALUE / nr_class || n * nr_class < 0) { throw new IllegalArgumentException("'number of classes' * 'number of instances' is too large: " + nr_class + "*" + n); } } private static void train_one(Problem prob, Parameter param, double[] w, double Cp, double Cn) { double eps = param.eps; int pos = 0; for (int i = 0; i < prob.l; i++) if (prob.y[i] > 0) { pos++; } int neg = prob.l - pos; double primal_solver_tol = eps * Math.max(Math.min(pos, neg), 1) / prob.l; Function fun_obj = null; switch (param.solverType) { case L2R_LR: { double[] C = new double[prob.l]; for (int i = 0; i < prob.l; i++) { if (prob.y[i] > 0) C[i] = Cp; else C[i] = Cn; } fun_obj = new L2R_LrFunction(prob, C); Tron tron_obj = new Tron(fun_obj, primal_solver_tol, param.max_iters); tron_obj.tron(w); break; } case L2R_L2LOSS_SVC: { double[] C = new double[prob.l]; for (int i = 0; i < prob.l; i++) { if (prob.y[i] > 0) C[i] = Cp; else C[i] = Cn; } fun_obj = new L2R_L2_SvcFunction(prob, C); Tron tron_obj = new Tron(fun_obj, primal_solver_tol, param.max_iters); tron_obj.tron(w); break; } case L2R_L2LOSS_SVC_DUAL: solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, SolverType.L2R_L2LOSS_SVC_DUAL); break; case L2R_L1LOSS_SVC_DUAL: solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, SolverType.L2R_L1LOSS_SVC_DUAL); break; case L1R_L2LOSS_SVC: { Problem prob_col = transpose(prob); solve_l1r_l2_svc(prob_col, w, primal_solver_tol, Cp, Cn); break; } case L1R_LR: { Problem prob_col = transpose(prob); solve_l1r_lr(prob_col, w, primal_solver_tol, Cp, Cn); break; } case L2R_LR_DUAL: solve_l2r_lr_dual(prob, w, eps, Cp, Cn); break; case L2R_L2LOSS_SVR: { double[] C = new double[prob.l]; for (int i = 0; i < prob.l; i++) C[i] = param.C; fun_obj = new L2R_L2_SvrFunction(prob, C, param.p); Tron tron_obj = new Tron(fun_obj, param.eps, param.max_iters); tron_obj.tron(w); break; } case L2R_L1LOSS_SVR_DUAL: case L2R_L2LOSS_SVR_DUAL: solve_l2r_l1l2_svr(prob, w, param); break; default: throw new IllegalStateException("unknown solver type: " + param.solverType); } } public static void disableDebugOutput() { setDebugOutput(null); } public static void enableDebugOutput() { setDebugOutput(System.out); } public static void setDebugOutput(PrintStream debugOutput) { synchronized (OUTPUT_MUTEX) { DEBUG_OUTPUT = debugOutput; } } /** * resets the PRNG * * this is i.a. needed for regression testing (eg. the Weka wrapper) */ public static void resetRandom() { random = new Random(DEFAULT_RANDOM_SEED); } }
package org.erhsroboticsclub.robo2013; public class RoboMap { /* CAN ID Numbers */ public static final int SECONDARY_LAUNCH_MOTOR = 1; // second public static final int PRIMARY_LAUNCH_MOTOR = 2; // forward public static final int TOP_LEFT_DRIVE_MOTOR = 3; public static final int BOTTOM_LEFT_DRIVE_MOTOR = 4; public static final int TOP_RIGHT_DRIVE_MOTOR = 5; public static final int BOTTOM_RIGHT_DRIVE_MOTOR = 6; public static final int ELEVATOR_MOTOR = 7; /* Digital Output */ public static final int LOAD_ARM_MOTOR1 = 1; public static final int LOAD_ARM_MOTOR2 = 2; /* Digital Input */ public static final int LIMIT_SWITCH = 2; /* USB Input */ public static final int LEFT_DRIVE_STICK = 1; public static final int RIGHT_DRIVE_STICK = 2; /* Analog Ports */ public static final int LAUNCHER_ANGLE_POT = 1; /* Controls */ public static final int AUTO_AIM_BUTTON = 1; public static final int FIRE_BUTTON = 1; //LEFT public static final int MANUAL_LAUNCHER_UP_BUTTON = 3; public static final int MANUAL_LAUNCHER_DOWN_BUTTON = 4; public static final int MANUAL_SET_SPEED_BUTTON = 5; public static final int TURN_TO_TARGET_BUTTON = 6; public static final int TURN_TO_TARGET_0 = 4; //LEFT public static final int TURN_TO_TARGET_1 = 3; //LEFT public static final int TURN_TO_TARGET_2 = 5; //LEFT public static final int TURN_TO_TARGET_3 = 2; //LEFT }
package mil.nga.giat.mage.sdk.datastore; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.util.Log; import com.j256.ormlite.android.apptools.OrmLiteSqliteOpenHelper; import com.j256.ormlite.dao.Dao; import com.j256.ormlite.support.ConnectionSource; import com.j256.ormlite.table.TableUtils; import java.sql.SQLException; import mil.nga.giat.mage.sdk.datastore.layer.Layer; import mil.nga.giat.mage.sdk.datastore.location.Location; import mil.nga.giat.mage.sdk.datastore.location.LocationProperty; import mil.nga.giat.mage.sdk.datastore.observation.Attachment; import mil.nga.giat.mage.sdk.datastore.observation.Observation; import mil.nga.giat.mage.sdk.datastore.observation.ObservationProperty; import mil.nga.giat.mage.sdk.datastore.staticfeature.StaticFeature; import mil.nga.giat.mage.sdk.datastore.staticfeature.StaticFeatureProperty; import mil.nga.giat.mage.sdk.datastore.user.Event; import mil.nga.giat.mage.sdk.datastore.user.Role; import mil.nga.giat.mage.sdk.datastore.user.Team; import mil.nga.giat.mage.sdk.datastore.user.TeamEvent; import mil.nga.giat.mage.sdk.datastore.user.User; import mil.nga.giat.mage.sdk.datastore.user.UserTeam; /** * This is an implementation of OrmLite android database Helper. Go here to get * daos that you may need. Manage your table creation and update strategies here * as well. * * @author travis, wiedemanns * */ public class DaoStore extends OrmLiteSqliteOpenHelper { private static DaoStore helperInstance; private static final String DATABASE_NAME = "mage.db"; private static final String LOG_NAME = DaoStore.class.getName(); // Making this public so we can check if it has been upgraded and log the user out public static final int DATABASE_VERSION = 8; // Observation DAOS private Dao<Observation, Long> observationDao; private Dao<ObservationProperty, Long> observationPropertyDao; private Dao<Attachment, Long> attachmentDao; // User and Location DAOS private Dao<User, Long> userDao; private Dao<Role, Long> roleDao; private Dao<Event, Long> eventDao; private Dao<Team, Long> teamDao; private Dao<UserTeam, Long> userTeamDao; private Dao<TeamEvent, Long> teamEventDao; private Dao<Location, Long> locationDao; private Dao<LocationProperty, Long> locationPropertyDao; // Layer and StaticFeature DAOS private Dao<Layer, Long> layerDao; private Dao<StaticFeature, Long> staticFeatureDao; private Dao<StaticFeatureProperty, Long> staticFeaturePropertyDao; /** * Singleton implementation. * * @param context * @return */ public static DaoStore getInstance(Context context) { if (helperInstance == null) { helperInstance = new DaoStore(context); } return helperInstance; } /** * Constructor that takes an android Context. * * @param context * */ private DaoStore(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); // initialize DAOs try { getObservationDao(); getObservationPropertyDao(); getAttachmentDao(); getUserDao(); getRoleDao(); getEventDao(); getTeamDao(); getUserTeamDao(); getTeamEventDao(); getLocationDao(); getLocationPropertyDao(); getLayerDao(); getStaticFeatureDao(); getStaticFeaturePropertyDao(); } catch (SQLException sqle) { // TODO: handle this... sqle.printStackTrace(); } } public boolean isDatabaseEmpty() { long countOfAllRecords = 0l; try { countOfAllRecords += getObservationDao().countOf(); countOfAllRecords += getObservationPropertyDao().countOf(); countOfAllRecords += getAttachmentDao().countOf(); countOfAllRecords += getUserDao().countOf(); countOfAllRecords += getRoleDao().countOf(); countOfAllRecords += getEventDao().countOf(); countOfAllRecords += getTeamDao().countOf(); countOfAllRecords += getUserTeamDao().countOf(); countOfAllRecords += getTeamEventDao().countOf(); countOfAllRecords += getLocationDao().countOf(); countOfAllRecords += getLocationPropertyDao().countOf(); countOfAllRecords += getLayerDao().countOf(); countOfAllRecords += getStaticFeatureDao().countOf(); countOfAllRecords += getStaticFeaturePropertyDao().countOf(); } catch (SQLException sqle) { sqle.printStackTrace(); return false; } return countOfAllRecords == 0; } private void createTables() throws SQLException { TableUtils.createTable(connectionSource, Observation.class); TableUtils.createTable(connectionSource, ObservationProperty.class); TableUtils.createTable(connectionSource, Attachment.class); TableUtils.createTable(connectionSource, User.class); TableUtils.createTable(connectionSource, Role.class); TableUtils.createTable(connectionSource, Event.class); TableUtils.createTable(connectionSource, Team.class); TableUtils.createTable(connectionSource, UserTeam.class); TableUtils.createTable(connectionSource, TeamEvent.class); TableUtils.createTable(connectionSource, Location.class); TableUtils.createTable(connectionSource, LocationProperty.class); TableUtils.createTable(connectionSource, Layer.class); TableUtils.createTable(connectionSource, StaticFeature.class); TableUtils.createTable(connectionSource, StaticFeatureProperty.class); } @Override public void onCreate(SQLiteDatabase sqliteDatabase, ConnectionSource connectionSource) { try { createTables(); } catch (SQLException se) { Log.e(LOG_NAME, "Could not create tables.", se); } } private void dropTables() throws SQLException { TableUtils.dropTable(connectionSource, Observation.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, ObservationProperty.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, Attachment.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, User.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, Role.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, Event.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, Team.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, UserTeam.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, TeamEvent.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, Location.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, LocationProperty.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, Layer.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, StaticFeature.class, Boolean.TRUE); TableUtils.dropTable(connectionSource, StaticFeatureProperty.class, Boolean.TRUE); } @Override public void onUpgrade(SQLiteDatabase database, ConnectionSource connectionSource, int oldVersion, int newVersion) { resetDatabase(); } /** * Drop and create all tables. */ public void resetDatabase() { try { Log.d(LOG_NAME, "Reseting Database."); dropTables(); createTables(); Log.d(LOG_NAME, "Reset Database."); } catch (SQLException se) { Log.e(LOG_NAME, "Could not reset Database.", se); } } @Override public void close() { helperInstance = null; super.close(); } /** * Getter for the ObservationDao. * * @return This instance's ObservationDao * @throws SQLException */ public Dao<Observation, Long> getObservationDao() throws SQLException { if (observationDao == null) { observationDao = getDao(Observation.class); } return observationDao; } /** * Getter for the PropertyDao * * @return This instance's PropertyDao * @throws SQLException */ public Dao<ObservationProperty, Long> getObservationPropertyDao() throws SQLException { if (observationPropertyDao == null) { observationPropertyDao = getDao(ObservationProperty.class); } return observationPropertyDao; } /** * Getter for the AttachmentDao * * @return This instance's AttachmentDao * @throws SQLException */ public Dao<Attachment, Long> getAttachmentDao() throws SQLException { if (attachmentDao == null) { attachmentDao = getDao(Attachment.class); } return attachmentDao; } /** * Getter for the UserDao * * @return This instance's UserDao * @throws SQLException */ public Dao<User, Long> getUserDao() throws SQLException { if (userDao == null) { userDao = getDao(User.class); } return userDao; } /** * Getter for the RoleDao * * @return This instance's RoleDao * @throws SQLException */ public Dao<Role, Long> getRoleDao() throws SQLException { if (roleDao == null) { roleDao = getDao(Role.class); } return roleDao; } /** * Getter for the EventDao * * @return This instance's EventDao * @throws SQLException */ public Dao<Event, Long> getEventDao() throws SQLException { if (eventDao == null) { eventDao = getDao(Event.class); } return eventDao; } /** * Getter for the TeamDao * * @return This instance's TeamDao * @throws SQLException */ public Dao<Team, Long> getTeamDao() throws SQLException { if (teamDao == null) { teamDao = getDao(Team.class); } return teamDao; } /** * Getter for the UserTeamDao * * @return This instance's UserTeamDao * @throws SQLException */ public Dao<UserTeam, Long> getUserTeamDao() throws SQLException { if (userTeamDao == null) { userTeamDao = getDao(UserTeam.class); } return userTeamDao; } /** * Getter for the TeamEventDao * * @return This instance's TeamEventDao * @throws SQLException */ public Dao<TeamEvent, Long> getTeamEventDao() throws SQLException { if (teamEventDao == null) { teamEventDao = getDao(TeamEvent.class); } return teamEventDao; } /** * Getter for the LocationDao * * @return This instance's LocationDao * @throws SQLException */ public Dao<Location, Long> getLocationDao() throws SQLException { if (locationDao == null) { locationDao = getDao(Location.class); } return locationDao; } /** * Getter for the LocationPropertyDao * * @return This instance's LocationPropertyDao * @throws SQLException */ public Dao<LocationProperty, Long> getLocationPropertyDao() throws SQLException { if (locationPropertyDao == null) { locationPropertyDao = getDao(LocationProperty.class); } return locationPropertyDao; } /** * Getter for the LayerDao * * @return This instance's LayerDao * @throws SQLException */ public Dao<Layer, Long> getLayerDao() throws SQLException { if (layerDao == null) { layerDao = getDao(Layer.class); } return layerDao; } /** * Getter for the StaticFeatureDao * * @return This instance's StaticFeatureDao * @throws SQLException */ public Dao<StaticFeature, Long> getStaticFeatureDao() throws SQLException { if (staticFeatureDao == null) { staticFeatureDao = getDao(StaticFeature.class); } return staticFeatureDao; } /** * Getter for the StaticFeaturePropertyDao * * @return This instance's StaticFeaturePropertyDao * @throws SQLException */ public Dao<StaticFeatureProperty, Long> getStaticFeaturePropertyDao() throws SQLException { if (staticFeaturePropertyDao == null) { staticFeaturePropertyDao = getDao(StaticFeatureProperty.class); } return staticFeaturePropertyDao; } }
package uk.ac.ebi.spot.goci.ui; import org.springframework.context.annotation.Configuration; import org.springframework.web.servlet.config.annotation.ViewControllerRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; @Configuration public class MvcConfig extends WebMvcConfigurerAdapter { @Override public void addViewControllers(ViewControllerRegistry registry) { registry.addViewController("/").setViewName("index"); registry.addViewController("/home").setViewName("index"); registry.addViewController("/search").setViewName("search"); registry.addViewController("/ontology").setViewName("ontology"); registry.addViewController("/downloads").setViewName("downloads"); registry.addViewController("/about").setViewName("about"); registry.addViewController("/help").setViewName("help"); registry.addViewController("/methods").setViewName("methods"); // registry.addViewController("/login").setViewName("login"); // registry.addViewController("/studies").setViewName("studies"); } }
package org.exist.storage; import org.apache.log4j.Logger; import org.exist.dom.DocumentImpl; import org.exist.dom.StoredNode; import org.exist.numbering.NodeId; import java.util.IdentityHashMap; /** * Global notification service for document updates. Other classes * can subscribe to this service to be notified of document modifications, * removals or additions. * * @author wolf * */ public class NotificationService extends IdentityHashMap<UpdateListener, Object> { private static final long serialVersionUID = -3629584664969740903L; private final static Logger LOG = Logger.getLogger(NotificationService.class); public NotificationService() { super(); } /** * Subscribe an {@link UpdateListener} to receive notifications. * * @param listener */ public synchronized void subscribe(UpdateListener listener) { put(listener, new Object()); } /** * Unsubscribe an {@link UpdateListener}. * * @param listener */ public synchronized void unsubscribe(UpdateListener listener) { Object i = remove(listener); if (i == null) throw new RuntimeException(hashCode() + " listener not found: " + listener.hashCode()); listener.unsubscribe(); } /** * Notify all subscribers that a document has been updated/removed or * a new document has been added. * * @param document * @param event */ public synchronized void notifyUpdate(DocumentImpl document, int event) { for (UpdateListener listener : keySet()) { listener.documentUpdated(document, event); } } /** * Notify all subscribers that a node has been moved. Nodes may be moved during a * defragmentation run. */ public synchronized void notifyMove(NodeId oldNodeId, StoredNode newNode) { for (UpdateListener listener : keySet()) { listener.nodeMoved(oldNodeId, newNode); } } public void debug() { LOG.debug("Registered UpdateListeners:"); for (UpdateListener listener : keySet()) { listener.debug(); } } }
package de.skuzzle.stringz; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * When using the default field mapping strategy, this annotation can be used to * explicitly specify the resource key to which a variable should be mapped. If this * annotation is not present on a field, its name is used as key. * * @author Simon Taddiken * @see DefaultFieldMapper */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface ResourceKey { /** * Specifies the key which references the value within a * {@link java.util.ResourceBundle} taht should be assigned to the annotated field. * @return The resource key to use. */ public String value(); }
package graphene.web.pages; import graphene.model.idl.G_User; import graphene.model.idl.G_UserDataAccess; import graphene.model.idl.G_VisualType; import graphene.web.annotations.PluginPage; import graphene.web.pages.pub.Login; import org.apache.tapestry5.annotations.Component; import org.apache.tapestry5.annotations.InjectPage; import org.apache.tapestry5.annotations.Property; import org.apache.tapestry5.annotations.SessionState; import org.apache.tapestry5.corelib.components.Form; import org.apache.tapestry5.ioc.Messages; import org.apache.tapestry5.ioc.annotations.Inject; import org.slf4j.Logger; import org.tynamo.security.services.SecurityService; /** * Allows the user to modify password and other settings * * @author djue */ @PluginPage(visualType = G_VisualType.SETTINGS, menuName = "Change Password", icon = "fa fa-lg fa-fw fa-list-alt") public class Settings { @Inject private G_UserDataAccess userDataAccess; @Inject private Messages messages; @InjectPage private Login loginPage; @Property private String password; @Property private String verifyPassword; @Component private Form settingsForm; @SessionState(create = false) private G_User user; private boolean userExists; @Inject private Logger logger; @Inject private SecurityService securityService; public Object onSuccess() { if (userExists) { if (!verifyPassword.equals(password)) { settingsForm.recordError(messages.get("error.verifypassword")); return null; } else { settingsForm.clearErrors(); } boolean success = false; try { success = userDataAccess.setUserPassword(user.getId(), password); } catch (final Exception e) { // if the DAO didn't update successfully, tell them so. logger.error("Unable to update password for user " + user.getUsername() + " Error: " + e.getMessage()); } if (success) { loginPage.setFlashMessage(messages.get("settings.password-changed")); } else { loginPage.setFlashMessage(messages.get("settings.password-not-changed")); } // authenticator.logout(); user = null; securityService.getSubject().logout(); } else { logger.error("A user tried to change a password without being logged in."); loginPage.setFlashMessage(messages.get("settings.password-not-changed")); } // Send the user to the login page. return loginPage; } }
/* * @author <a href="mailto:novotny@gridsphere.org">Jason Novotny</a> * @version $Id: PortletTitleBar.java 5032 2006-08-17 18:15:06Z novotny $ */ package org.gridsphere.layout; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.gridsphere.layout.event.PortletTitleBarEvent; import org.gridsphere.layout.event.PortletTitleBarListener; import org.gridsphere.layout.event.PortletWindowEvent; import org.gridsphere.layout.event.impl.PortletTitleBarEventImpl; import org.gridsphere.layout.event.impl.PortletWindowEventImpl; import org.gridsphere.layout.view.Render; import org.gridsphere.portlet.impl.SportletProperties; import org.gridsphere.portlet.impl.StoredPortletResponseImpl; import org.gridsphere.portlet.service.spi.PortletServiceFactory; import org.gridsphere.portletcontainer.ApplicationPortlet; import org.gridsphere.portletcontainer.GridSphereEvent; import org.gridsphere.portletcontainer.impl.PortletInvoker; import org.gridsphere.services.core.registry.PortletRegistryService; import org.gridsphere.services.core.security.role.PortletRole; import javax.portlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.security.Principal; import java.util.*; /** * A <code>PortletTitleBar</code> represents the visual display of the portlet title bar * within a portlet frame and is contained by {@link PortletFrame}. * The title bar contains portlet mode and window state as well as a title. */ public class PortletTitleBar extends BasePortletComponent implements Serializable, Cloneable { private Log log = LogFactory.getLog(PortletTitleBar.class); private String title = "unknown title"; private String portletClass = null; private transient PortletRegistryService portletRegistryService = null; private transient PortletInvoker portletInvoker = null; private transient WindowState windowState = WindowState.NORMAL; private transient PortletMode portletMode = PortletMode.VIEW; private transient PortletMode previousMode = PortletMode.VIEW; private transient List<javax.portlet.WindowState> allowedWindowStates = new ArrayList<javax.portlet.WindowState>(); private transient String errorMessage = ""; private transient boolean hasError = false; private transient boolean isActive = false; private transient List<PortletTitleBar.PortletModeLink> modeLinks = null; private transient List<PortletTitleBar.PortletStateLink> windowLinks = null; private transient Render titleView = null; // display modes in title bar at all? private transient boolean displayModes = true; // display states in title bar at all? private transient boolean displayStates = true; /** * Link is an abstract representation of a hyperlink with an href, image and * alt tags. */ abstract class Link { protected String href = ""; protected String imageSrc = ""; protected String altTag = ""; protected String symbol = ""; protected String cursor = ""; /** * Returns the image source attribute in the link * * @return the image source attribute in the link */ public String getImageSrc() { return imageSrc; } public String getSymbol() { //WAP 2.0 Extention return symbol; } /** * Returns the CSS cursor style to use * * @return the cursor */ public String getCursor() { return cursor; } /** * Sets the CSS cursor style to use * * @param cursor the cursor */ public void setCursor(String cursor) { this.cursor = cursor; } /** * Sets the href attribute in the link * * @param href the href attribute in the link */ public void setHref(String href) { this.href = href; } /** * Returns the href attribute in the link * * @return the href attribute in the link */ public String getHref() { return href; } /** * Returns the alt tag attribute in the link * * @return the alt tag attribute in the link */ public String getAltTag() { return altTag; } /** * Returns a string containing the image src, href and alt tag attributes * Used primarily for debugging purposes */ public String toString() { StringBuffer sb = new StringBuffer("\n"); sb.append("image src: ").append(imageSrc).append("\n"); sb.append("href: ").append(href).append("\n"); sb.append("alt tag: ").append(altTag).append("\n"); return sb.toString(); } } /** * PortletModeLink is a concrete instance of a Link used for creating * portlet mode hyperlinks */ public class PortletModeLink extends Link { public static final String configImage = "images/window_configure.gif"; public static final String configSymbol = "c";//WAP 2.0 Extention public static final String editImage = "images/window_edit.gif"; public static final String editSymbol = "/";//WAP 2.0 Extention public static final String helpImage = "images/window_help.gif"; public static final String helpSymbol = "?";//WAP 2.0 Extention public static final String viewImage = "images/window_view.gif"; public static final String viewSymbol = "V";//WAP 2.0Extention /** * Constructs an instance of PortletModeLink with the supplied portlet mode * * @param mode the portlet mode * @param locale the locale * @throws PortletModeException if the mode is not supported */ public PortletModeLink(PortletMode mode, Locale locale) throws PortletModeException { if (mode == null) return; ResourceBundle bundle = ResourceBundle.getBundle("gridsphere.resources.Portlet", locale); String key = mode.toString().toUpperCase(); altTag = bundle.getString(key); // Set the image src if (mode.equals(new PortletMode("CONFIG"))) { imageSrc = configImage; symbol = configSymbol;//WAP 2.0 } else if (mode.equals(PortletMode.EDIT)) { imageSrc = editImage; symbol = editSymbol;//WAP 2.0 } else if (mode.equals(PortletMode.HELP)) { imageSrc = helpImage; symbol = helpSymbol;//WAP 2.0 cursor = "help"; } else if (mode.equals(PortletMode.VIEW)) { imageSrc = viewImage; symbol = viewSymbol;//WAP 2.0 } else { throw new PortletModeException("Unsupported portlet mode: ", mode); } } } /** * PortletStateLink is a concrete instance of a Link used for creating * portlet window state hyperlinks */ public class PortletStateLink extends Link { public static final String closeImage = "images/window_close.gif"; public static final String minimizeImage = "images/window_minimize.gif"; public static final String maximizeImage = "images/window_maximize.gif"; public static final String normalImage = "images/window_normal.gif"; public static final String floatImage = "images/window_float.gif"; public static final String closeSymbol = "X"; //WAP 2.0 public static final String minimizeSymbol = "_"; //WAP 2.0 public static final String maximizeSymbol = "="; //WAP 2.0 public static final String normalSymbol = "-"; //WAP 2.0 public static final String floatSymbol = "^"; //WAP 2.0 /** * Constructs an instance of PortletStateLink with the supplied window state * * @param state the window state * @param locale the client locale * @throws WindowStateException if the state is unsupported */ public PortletStateLink(WindowState state, Locale locale) throws WindowStateException { if (state == null) return; // Set the image src if (state.equals(WindowState.MINIMIZED)) { imageSrc = minimizeImage; symbol = minimizeSymbol; } else if (state.equals(WindowState.MAXIMIZED)) { imageSrc = maximizeImage; symbol = maximizeSymbol; } else if (state.equals(WindowState.NORMAL)) { imageSrc = normalImage; symbol = normalSymbol; } else if (state.equals(new WindowState("closed"))) { imageSrc = closeImage; symbol = closeSymbol; } else if (state.equals(new WindowState("floating"))) { imageSrc = floatImage; symbol = floatSymbol; } else { throw new WindowStateException("Unsupported window state window mode: ", state); } ResourceBundle bundle = ResourceBundle.getBundle("gridsphere.resources.Portlet", locale); String key = state.toString().toUpperCase(); altTag = bundle.getString(key); } } /** * Constructs an instance of PortletTitleBar */ public PortletTitleBar() { } /** * Sets the portlet class used to render the title bar * * @param portletClass the concrete portlet class */ public void setPortletClass(String portletClass) { this.portletClass = portletClass; } /** * Returns the portlet class used in rendering the title bar * * @return the concrete portlet class */ public String getPortletClass() { return portletClass; } public boolean isActive() { return isActive; } public void setActive(boolean isActive) { this.isActive = isActive; } /** * Returns the title of the portlet title bar * * @return the portlet title bar */ public String getTitle() { return title; } /** * Sets the title of the portlet title bar * * @param title the portlet title bar */ public void setTitle(String title) { this.title = title; } /** * Sets the window state of this title bar * * @param state the portlet window state expressed as a string */ public void setWindowState(WindowState state) { if (state != null) this.windowState = state; } /** * Returns the window state of this title bar * * @return the portlet window state expressed as a string */ public WindowState getWindowState() { return windowState; } /** * Sets the window state of this title bar * * @param state the portlet window state expressed as a string */ public void setWindowStateAsString(String state) { if (state != null) { try { this.windowState = new WindowState(state); } catch (IllegalArgumentException e) { // do nothing } } } /** * Returns the window state of this title bar * * @return the portlet window state expressed as a string */ public String getWindowStateAsString() { return windowState.toString(); } /** * Sets the portlet mode of this title bar * * @param mode the portlet mode expressed as a string */ public void setPortletMode(PortletMode mode) { if (mode != null) this.portletMode = mode; } /** * Returns the portlet mode of this title bar * * @return the portlet mode expressed as a string */ public PortletMode getPortletMode() { return portletMode; } /** * Sets the portlet mode of this title bar * * @param mode the portlet mode expressed as a string */ public void setPreviousMode(PortletMode mode) { if (mode != null) this.previousMode = mode; } /** * Returns the portlet mode of this title bar * * @return the portlet mode expressed as a string */ public PortletMode getPreviousMode() { return previousMode; } /** * Sets the portlet mode of this title bar * * @param mode the portlet mode expressed as a string */ public void setPortletModeAsString(String mode) { if (mode == null) return; try { this.portletMode = new PortletMode(mode); } catch (IllegalArgumentException e) { // do nothing } } /** * Returns the portlet mode of this title bar * * @return the portlet mode expressed as a string */ public String getPortletModeAsString() { return portletMode.toString(); } /** * Indicates an error ocurred suring the processing of this title bar * * @return <code>true</code> if an error occured during rendering, * <code>false</code> otherwise */ public boolean hasRenderError() { return hasError; } /** * Returns any errors associated with the functioning of this title bar * * @return any title bar errors that occured */ public String getErrorMessage() { return errorMessage; } /** * Initializes the portlet title bar. Since the components are isolated * after Castor unmarshalls from XML, the ordering is determined by a * passed in List containing the previous portlet components in the tree. * * @param list a list of component identifiers * @return a list of updated component identifiers * @see ComponentIdentifier */ public List<ComponentIdentifier> init(PortletRequest req, List<ComponentIdentifier> list) { list = super.init(req, list); titleView = (Render) getRenderClass(req, "TitleBar"); portletInvoker = new PortletInvoker(); ComponentIdentifier compId = new ComponentIdentifier(); compId.setPortletComponent(this); compId.setPortletClass(portletClass); compId.setComponentID(list.size()); compId.setComponentLabel(label); compId.setClassName(this.getClass().getName()); list.add(compId); portletRegistryService = (PortletRegistryService) PortletServiceFactory.createPortletService(PortletRegistryService.class, true); String appID = portletRegistryService.getApplicationPortletID(portletClass); ApplicationPortlet appPortlet = portletRegistryService.getApplicationPortlet(appID); if (appPortlet != null) { allowedWindowStates = appPortlet.getAllowedWindowStates(); allowedWindowStates = sort(allowedWindowStates); if (canModify) { if (!allowedWindowStates.contains(new WindowState("CLOSED"))) { allowedWindowStates.add(new WindowState("CLOSED")); } } } displayModes = req.getAttribute(SportletProperties.DISPLAY_MODES).equals(Boolean.FALSE); displayStates = req.getAttribute(SportletProperties.DISPLAY_STATES).equals(Boolean.FALSE); return list; } /** * Simple sorting algoritm that sorts in increasing order a <code>List</code> * containing objects that implement <code>Comparator</code> * * @param list a <code>List</code> to be sorted * @return the sorted list */ private List<javax.portlet.WindowState> sort(List<javax.portlet.WindowState> list) { List<javax.portlet.WindowState> tmp = new ArrayList<javax.portlet.WindowState>(); if (list.contains(WindowState.MINIMIZED)) { tmp.add(WindowState.MINIMIZED); } if (list.contains(WindowState.NORMAL)) { tmp.add(WindowState.NORMAL); } if (list.contains(WindowState.MAXIMIZED)) { tmp.add(WindowState.MAXIMIZED); } if (list.contains(new WindowState("CLOSED"))) { tmp.add(new WindowState("CLOSED")); } if (list.contains(new WindowState("FLOATING"))) { tmp.add(new WindowState("FLOATING")); } return tmp; } /** * Creates the portlet window state hyperlinks displayed in the title bar * * @param event the gridsphere event * @return a list of window state hyperlinks */ public List<PortletStateLink> createWindowLinks(GridSphereEvent event) { super.doRender(event); PortletURL portletURL; RenderResponse res = event.getRenderResponse(); if (allowedWindowStates.isEmpty()) return null; if (!displayStates) return null; //String[] windowStates = new String[allowedWindowStates.size()]; List<javax.portlet.WindowState> windowStates = new ArrayList<javax.portlet.WindowState>(); for (WindowState state : allowedWindowStates) { windowStates.add(state); // remove current state from list if (state.equals(windowState) && (!windowState.equals(new WindowState("closed")))) { windowStates.remove(state); } } // get rid of floating if window state is minimized if (windowState.equals(WindowState.MINIMIZED)) { windowStates.remove(new WindowState("floating")); } // Localize the window state names RenderRequest req = event.getRenderRequest(); Locale locale = req.getLocale(); // create a URI for each of the window states PortletStateLink stateLink; List<PortletStateLink> stateLinks = new ArrayList<PortletStateLink>(); for (WindowState state : windowStates) { portletURL = res.createActionURL(); try { stateLink = new PortletStateLink(state, locale); portletURL.setWindowState(state); stateLink.setHref(portletURL.toString()); if (state.equals(new WindowState("floating"))) { stateLink.setHref(portletURL.toString() + "\" onclick=\"return GridSphere_popup(this, 'notes')\""); } stateLinks.add(stateLink); } catch (WindowStateException e) { log.error("a window state exception occurred! " + state); } } return stateLinks; } /** * Creates the portlet mode hyperlinks displayed in the title bar * * @param event the gridsphere event * @return a list of portlet mode hyperlinks */ public List<PortletTitleBar.PortletModeLink> createModeLinks(GridSphereEvent event) { super.doRender(event); RenderResponse res = event.getRenderResponse(); RenderRequest req = event.getRenderRequest(); if (!displayStates) return null; // make modes from supported modes Set<String> supportedModes = (Set<String>) req.getAttribute(SportletProperties.ALLOWED_MODES); if (supportedModes == null) return null; // Unless user is admin they should not see configure mode boolean hasConfigurePermission = req.isUserInRole(PortletRole.ADMIN.getName()); List<String> smodes = new ArrayList<String>(); for (String mode : supportedModes) { if (mode.equalsIgnoreCase("config")) { if (hasConfigurePermission) { smodes.add(mode); } } else { smodes.add(mode); } // remove current mode from list smodes.remove(portletMode.toString()); } // Localize the portlet mode names Locale locale = req.getLocale(); List<PortletModeLink> portletLinks = new ArrayList<PortletModeLink>(); for (String mode : smodes) { // create a URI for each of the portlet modes PortletModeLink modeLink; PortletURL portletURL = res.createActionURL(); try { PortletMode pmode = new PortletMode(mode); modeLink = new PortletModeLink(pmode, locale); portletURL.setPortletMode(pmode); modeLink.setHref(portletURL.toString()); portletLinks.add(modeLink); } catch (PortletModeException e) { log.error("Unable to get mode for : " + mode.toString()); } } return portletLinks; } /** * Performs an action on this portlet title bar component * * @param event a gridsphere event */ public void actionPerformed(GridSphereEvent event) { super.actionPerformed(event); isActive = true; HttpServletRequest req = event.getHttpServletRequest(); ActionResponse res = event.getActionResponse(); req.setAttribute(SportletProperties.PORTLETID, portletClass); // Render title bar Set supportedModes = null; String appID = portletRegistryService.getApplicationPortletID(portletClass); ApplicationPortlet appPortlet = portletRegistryService.getApplicationPortlet(appID); if (appPortlet != null) { supportedModes = appPortlet.getSupportedModes(event.getClient().getMimeType()); } req.setAttribute(SportletProperties.ALLOWED_MODES, supportedModes); // pop last event off stack event.getLastRenderEvent(); PortletTitleBarEvent titleBarEvent = new PortletTitleBarEventImpl(this, event, COMPONENT_ID); Principal principal = event.getActionRequest().getUserPrincipal(); if (principal != null) { if (titleBarEvent.hasAction()) { if (titleBarEvent.hasWindowStateAction()) { // don't set window state if it is floating if (!titleBarEvent.getState().equals(new WindowState("floating"))) windowState = titleBarEvent.getState(); //System.err.println("setting window state= " + windowState); PortletWindowEvent winEvent = null; // if receive a window state that is not supported do nothing if (!allowedWindowStates.contains(windowState)) return; if (windowState.equals(WindowState.MAXIMIZED)) { winEvent = new PortletWindowEventImpl(req, PortletWindowEvent.WINDOW_MAXIMIZED); } else if (windowState.equals(WindowState.MINIMIZED)) { winEvent = new PortletWindowEventImpl(req, PortletWindowEvent.WINDOW_MINIMIZED); } else if (windowState.equals(WindowState.NORMAL)) { winEvent = new PortletWindowEventImpl(req, PortletWindowEvent.WINDOW_RESTORED); } else if (windowState.equals(new WindowState("CLOSED"))) { winEvent = new PortletWindowEventImpl(req, PortletWindowEvent.WINDOW_CLOSED); } if (winEvent != null) { try { portletInvoker.windowEvent((String) req.getAttribute(SportletProperties.PORTLETID), winEvent, req, (HttpServletResponse) res); } catch (Exception e) { hasError = true; errorMessage += "Failed to invoke window event method of portlet: " + portletClass; } } } if (titleBarEvent.hasPortletModeAction()) { /* if (titleBarEvent.getMode().equals(Portlet.Mode.CONFIGURE)) { @TODO fix me boolean hasrole = aclService.hasRequiredRole(req, portletClass, true); if (!hasrole) return; }*/ previousMode = portletMode; portletMode = titleBarEvent.getMode(); //System.err.println("mode = " + portletMode); //System.err.println("prev mode = " + previousMode); } } } req.setAttribute(SportletProperties.PORTLET_WINDOW, windowState); try { res.setPortletMode(portletMode); } catch (PortletModeException e) { log.error("Unable to set mode to " + portletMode); } req.setAttribute(SportletProperties.PREVIOUS_MODE, previousMode); for (PortletComponent comp : listeners) { event.addNewRenderEvent(titleBarEvent); comp.actionPerformed(event); } } /** * Fires a title bar event notification * * @param event a portlet title bar event */ protected void fireTitleBarEvent(PortletTitleBarEvent event) { for (PortletComponent titleBarListener : listeners) { ((PortletTitleBarListener) titleBarListener).handleTitleBarEvent(event); } } public List<PortletTitleBar.PortletModeLink> getModeLinks() { return modeLinks; } public List<PortletTitleBar.PortletStateLink> getWindowLinks() { return windowLinks; } public void doRender(GridSphereEvent event) { super.doRender(event); hasError = false; // title bar: configure, edit, help, title, min, max RenderRequest req = event.getRenderRequest(); RenderResponse res = event.getRenderResponse(); Set supportedModes = null; String appID = portletRegistryService.getApplicationPortletID(portletClass); ApplicationPortlet appPortlet = portletRegistryService.getApplicationPortlet(appID); if (appPortlet != null) { supportedModes = appPortlet.getSupportedModes(event.getClient().getMimeType()); } req.setAttribute(SportletProperties.ALLOWED_MODES, supportedModes); PortalContext portalContext = appPortlet.getPortalContext(); req.setAttribute(SportletProperties.PORTAL_CONTEXT, portalContext); // get the appropriate title for this client Locale locale = req.getLocale(); Principal principal = req.getUserPrincipal(); if (principal != null) { if (portletClass != null) { modeLinks = createModeLinks(event); windowLinks = createWindowLinks(event); } } //System.err.println("in title bar render portletclass=" + portletClass + ": setting prev mode= " + previousMode + " cur mode= " + portletMode); req.setAttribute(SportletProperties.PORTLET_MODE, portletMode); req.setAttribute(SportletProperties.PREVIOUS_MODE, previousMode); req.setAttribute(SportletProperties.PORTLET_WINDOW, windowState); StringBuffer preTitle = titleView.doStart(event, this); req.setAttribute(SportletProperties.RENDER_OUTPUT + COMPONENT_ID + ".pre", preTitle.toString()); StringBuffer postTitle = titleView.doEnd(event, this); req.setAttribute(SportletProperties.RENDER_OUTPUT + COMPONENT_ID + ".post", postTitle.toString()); StringWriter storedWriter = new StringWriter(); PrintWriter writer = new PrintWriter(storedWriter); PortletResponse wrappedResponse = new StoredPortletResponseImpl((HttpServletRequest) req, (HttpServletResponse) res, writer); try { //System.err.println("invoking doTitle:" + title); portletInvoker.doTitle((String) req.getAttribute(SportletProperties.PORTLETID), (HttpServletRequest) req, (HttpServletResponse) wrappedResponse); //out.println(" (" + portletMode.toString() + ") "); title = storedWriter.toString(); } catch (Exception e) { ResourceBundle bundle = ResourceBundle.getBundle("gridsphere.resources.Portlet", locale); title = bundle.getString("PORTLET_UNAVAILABLE"); hasError = true; errorMessage = portletClass + " " + title + "!\n"; //"PortletException:" + e.getMessage(); log.error(portletClass + " is currently unavailable:", e); } } public String getPreBufferedTitle(PortletRequest req) { String preTitle = (String) req.getAttribute(SportletProperties.RENDER_OUTPUT + COMPONENT_ID + ".pre"); req.removeAttribute(SportletProperties.RENDER_OUTPUT + COMPONENT_ID + ".pre"); return preTitle; } public String getPostBufferedTitle(PortletRequest req) { String postTitle = (String) req.getAttribute(SportletProperties.RENDER_OUTPUT + COMPONENT_ID + ".post"); req.removeAttribute(SportletProperties.RENDER_OUTPUT + COMPONENT_ID + ".post"); return postTitle; } public Object clone() throws CloneNotSupportedException { PortletTitleBar t = (PortletTitleBar) super.clone(); t.title = this.title; t.portletClass = this.portletClass; t.portletMode = new PortletMode(this.portletMode.toString()); t.windowState = new WindowState(this.windowState.toString()); t.previousMode = this.previousMode; return t; } public String toString() { StringBuffer sb = new StringBuffer(); sb.append(super.toString()); return sb.toString(); } }
package ca.uhn.fhir.to; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.model.api.ExtensionDt; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.dstu2.resource.Conformance; import ca.uhn.fhir.model.primitive.DecimalDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.client.api.IClientInterceptor; import ca.uhn.fhir.rest.client.api.IHttpRequest; import ca.uhn.fhir.rest.client.api.IHttpResponse; import ca.uhn.fhir.rest.client.impl.GenericClient; import ca.uhn.fhir.to.model.HomeRequest; import ca.uhn.fhir.util.ExtensionConstants; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; import org.apache.http.entity.ContentType; import org.apache.http.message.BasicHeader; import org.hl7.fhir.dstu3.model.CapabilityStatement; import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestComponent; import org.hl7.fhir.dstu3.model.CapabilityStatement.CapabilityStatementRestResourceComponent; import org.hl7.fhir.dstu3.model.DecimalType; import org.hl7.fhir.dstu3.model.Extension; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IDomainResource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.ui.ModelMap; import org.thymeleaf.TemplateEngine; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.*; import static org.apache.commons.lang3.StringUtils.defaultString; public class BaseController { static final String PARAM_RESOURCE = "resource"; static final String RESOURCE_COUNT_EXT_URL = "http://hl7api.sourceforge.net/hapi-fhir/res/extdefs.html#resourceCount"; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(BaseController.class); @Autowired protected TesterConfig myConfig; private Map<FhirVersionEnum, FhirContext> myContexts = new HashMap<FhirVersionEnum, FhirContext>(); private List<String> myFilterHeaders; @Autowired private TemplateEngine myTemplateEngine; public BaseController() { super(); } protected IBaseResource addCommonParams(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { if (myConfig.getDebugTemplatesMode()) { myTemplateEngine.getCacheManager().clearAllCaches(); } final String serverId = theRequest.getServerIdWithDefault(myConfig); final String serverBase = theRequest.getServerBase(theServletRequest, myConfig); final String serverName = theRequest.getServerName(myConfig); final String apiKey = theRequest.getApiKey(theServletRequest, myConfig); theModel.put("serverId", serverId); theModel.put("base", serverBase); theModel.put("baseName", serverName); theModel.put("apiKey", apiKey); theModel.put("resourceName", defaultString(theRequest.getResource())); theModel.put("encoding", theRequest.getEncoding()); theModel.put("pretty", theRequest.getPretty()); theModel.put("_summary", theRequest.get_summary()); theModel.put("serverEntries", myConfig.getIdToServerName()); return loadAndAddConf(theServletRequest, theRequest, theModel); } private Header[] applyHeaderFilters(Header[] theAllHeaders) { if (myFilterHeaders == null || myFilterHeaders.isEmpty()) { return theAllHeaders; } ArrayList<Header> retVal = new ArrayList<Header>(); for (Header next : theAllHeaders) { if (!myFilterHeaders.contains(next.getName().toLowerCase())) { retVal.add(next); } } return retVal.toArray(new Header[retVal.size()]); } private Header[] applyHeaderFilters(Map<String, List<String>> theAllHeaders) { ArrayList<Header> retVal = new ArrayList<Header>(); for (String nextKey : theAllHeaders.keySet()) { for (String nextValue : theAllHeaders.get(nextKey)) { if (myFilterHeaders == null || !myFilterHeaders.contains(nextKey.toLowerCase())) { retVal.add(new BasicHeader(nextKey, nextValue)); } } } return retVal.toArray(new Header[retVal.size()]); } private String format(String theResultBody, EncodingEnum theEncodingEnum) { String str = StringEscapeUtils.escapeHtml4(theResultBody); if (str == null || theEncodingEnum == null) { return str; } StringBuilder b = new StringBuilder(); if (theEncodingEnum == EncodingEnum.JSON) { boolean inValue = false; boolean inQuote = false; for (int i = 0; i < str.length(); i++) { char prevChar = (i > 0) ? str.charAt(i - 1) : ' '; char nextChar = str.charAt(i); char nextChar2 = (i + 1) < str.length() ? str.charAt(i + 1) : ' '; char nextChar3 = (i + 2) < str.length() ? str.charAt(i + 2) : ' '; char nextChar4 = (i + 3) < str.length() ? str.charAt(i + 3) : ' '; char nextChar5 = (i + 4) < str.length() ? str.charAt(i + 4) : ' '; char nextChar6 = (i + 5) < str.length() ? str.charAt(i + 5) : ' '; if (inQuote) { b.append(nextChar); if (prevChar != '\\' && nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { b.append("quot;</span>"); i += 5; inQuote = false; } else if (nextChar == '\\' && nextChar2 == '"') { b.append("quot;</span>"); i += 5; inQuote = false; } } else { if (nextChar == ':') { inValue = true; b.append(nextChar); } else if (nextChar == '[' || nextChar == '{') { b.append("<span class='hlControl'>"); b.append(nextChar); b.append("</span>"); inValue = false; } else if (nextChar == '{' || nextChar == '}' || nextChar == ',') { b.append("<span class='hlControl'>"); b.append(nextChar); b.append("</span>"); inValue = false; } else if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { if (inValue) { b.append("<span class='hlQuot'>&quot;"); } else { b.append("<span class='hlTagName'>&quot;"); } inQuote = true; i += 5; } else if (nextChar == ':') { b.append("<span class='hlControl'>"); b.append(nextChar); b.append("</span>"); inValue = true; } else { b.append(nextChar); } } } } else { boolean inQuote = false; boolean inTag = false; for (int i = 0; i < str.length(); i++) { char nextChar = str.charAt(i); char nextChar2 = (i + 1) < str.length() ? str.charAt(i + 1) : ' '; char nextChar3 = (i + 2) < str.length() ? str.charAt(i + 2) : ' '; char nextChar4 = (i + 3) < str.length() ? str.charAt(i + 3) : ' '; char nextChar5 = (i + 4) < str.length() ? str.charAt(i + 4) : ' '; char nextChar6 = (i + 5) < str.length() ? str.charAt(i + 5) : ' '; if (inQuote) { b.append(nextChar); if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { b.append("quot;</span>"); i += 5; inQuote = false; } } else if (inTag) { if (nextChar == '&' && nextChar2 == 'g' && nextChar3 == 't' && nextChar4 == ';') { b.append("</span><span class='hlControl'>&gt;</span>"); inTag = false; i += 3; } else if (nextChar == ' ') { b.append("</span><span class='hlAttr'>"); b.append(nextChar); } else if (nextChar == '&' && nextChar2 == 'q' && nextChar3 == 'u' && nextChar4 == 'o' && nextChar5 == 't' && nextChar6 == ';') { b.append("<span class='hlQuot'>&quot;"); inQuote = true; i += 5; } else { b.append(nextChar); } } else { if (nextChar == '&' && nextChar2 == 'l' && nextChar3 == 't' && nextChar4 == ';') { b.append("<span class='hlControl'>&lt;</span><span class='hlTagName'>"); inTag = true; i += 3; } else { b.append(nextChar); } } } } return b.toString(); } private String formatUrl(String theUrlBase, String theResultBody) { String str = theResultBody; if (str == null) { return str; } try { str = URLDecoder.decode(str, "UTF-8"); } catch (UnsupportedEncodingException e) { ourLog.error("Should not happen", e); } StringBuilder b = new StringBuilder(); b.append("<span class='hlUrlBase'>"); boolean inParams = false; for (int i = 0; i < str.length(); i++) { char nextChar = str.charAt(i); // char nextChar2 = i < str.length()-2 ? str.charAt(i+1):' '; // char nextChar3 = i < str.length()-2 ? str.charAt(i+2):' '; if (!inParams) { if (nextChar == '?') { inParams = true; b.append("</span><wbr /><span class='hlControl'>?</span><span class='hlTagName'>"); } else { if (i == theUrlBase.length()) { b.append("</span><wbr /><span class='hlText'>"); } b.append(nextChar); } } else { if (nextChar == '&') { b.append("</span><wbr /><span class='hlControl'>&amp;</span><span class='hlTagName'>"); } else if (nextChar == '=') { b.append("</span><span class='hlControl'>=</span><span class='hlAttr'>"); // }else if (nextChar=='%' && Character.isLetterOrDigit(nextChar2)&& Character.isLetterOrDigit(nextChar3)) { // URLDecoder.decode(s, enc) } else { b.append(nextChar); } } } if (inParams) { b.append("</span>"); } return b.toString(); } protected FhirContext getContext(HomeRequest theRequest) { FhirVersionEnum version = theRequest.getFhirVersion(myConfig); FhirContext retVal = myContexts.get(version); if (retVal == null) { retVal = newContext(version); myContexts.put(version, retVal); } return retVal; } protected RuntimeResourceDefinition getResourceType(HomeRequest theRequest, HttpServletRequest theReq) throws ServletException { String resourceName = StringUtils.defaultString(theReq.getParameter(PARAM_RESOURCE)); RuntimeResourceDefinition def = getContext(theRequest).getResourceDefinition(resourceName); if (def == null) { throw new ServletException("Invalid resourceName: " + resourceName); } return def; } protected ResultType handleClientException(GenericClient theClient, Exception e, ModelMap theModel) { ResultType returnsResource; returnsResource = ResultType.NONE; ourLog.warn("Failed to invoke server", e); if (e != null) { theModel.put("errorMsg", toDisplayError("Error: " + e.getMessage(), e)); } return returnsResource; } private IBaseResource loadAndAddConf(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { switch (theRequest.getFhirVersion(myConfig)) { case DSTU2: return loadAndAddConfDstu2(theServletRequest, theRequest, theModel); case DSTU3: return loadAndAddConfDstu3(theServletRequest, theRequest, theModel); case R4: return loadAndAddConfR4(theServletRequest, theRequest, theModel); case DSTU2_1: case DSTU2_HL7ORG: break; } throw new IllegalStateException("Unknown version: " + theRequest.getFhirVersion(myConfig)); } private IResource loadAndAddConfDstu2(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { CaptureInterceptor interceptor = new CaptureInterceptor(); GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); ca.uhn.fhir.model.dstu2.resource.Conformance conformance; try { conformance = (ca.uhn.fhir.model.dstu2.resource.Conformance) client.fetchConformance().ofType(Conformance.class).execute(); } catch (Exception e) { ourLog.warn("Failed to load conformance statement, error was: {}", e.toString()); theModel.put("errorMsg", toDisplayError("Failed to load conformance statement, error was: " + e.toString(), e)); conformance = new ca.uhn.fhir.model.dstu2.resource.Conformance(); } theModel.put("jsonEncodedConf", getContext(theRequest).newJsonParser().encodeResourceToString(conformance)); Map<String, Number> resourceCounts = new HashMap<String, Number>(); long total = 0; for (ca.uhn.fhir.model.dstu2.resource.Conformance.Rest nextRest : conformance.getRest()) { for (ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource nextResource : nextRest.getResource()) { List<ExtensionDt> exts = nextResource.getUndeclaredExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (exts != null && exts.size() > 0) { Number nextCount = ((DecimalDt) (exts.get(0).getValue())).getValueAsNumber(); resourceCounts.put(nextResource.getTypeElement().getValue(), nextCount); total += nextCount.longValue(); } } } theModel.put("resourceCounts", resourceCounts); if (total > 0) { for (ca.uhn.fhir.model.dstu2.resource.Conformance.Rest nextRest : conformance.getRest()) { Collections.sort(nextRest.getResource(), new Comparator<ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource>() { @Override public int compare(ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource theO1, ca.uhn.fhir.model.dstu2.resource.Conformance.RestResource theO2) { DecimalDt count1 = new DecimalDt(); List<ExtensionDt> count1exts = theO1.getUndeclaredExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count1exts != null && count1exts.size() > 0) { count1 = (DecimalDt) count1exts.get(0).getValue(); } DecimalDt count2 = new DecimalDt(); List<ExtensionDt> count2exts = theO2.getUndeclaredExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count2exts != null && count2exts.size() > 0) { count2 = (DecimalDt) count2exts.get(0).getValue(); } int retVal = count2.compareTo(count1); if (retVal == 0) { retVal = theO1.getTypeElement().getValue().compareTo(theO2.getTypeElement().getValue()); } return retVal; } }); } } theModel.put("conf", conformance); theModel.put("requiredParamExtension", ExtensionConstants.PARAM_IS_REQUIRED); return conformance; } private IBaseResource loadAndAddConfDstu3(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { CaptureInterceptor interceptor = new CaptureInterceptor(); GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); org.hl7.fhir.dstu3.model.CapabilityStatement capabilityStatement = new CapabilityStatement(); try { capabilityStatement = client.fetchConformance().ofType(org.hl7.fhir.dstu3.model.CapabilityStatement.class).execute(); } catch (Exception ex) { ourLog.warn("Failed to load conformance statement, error was: {}", ex.toString()); theModel.put("errorMsg", toDisplayError("Failed to load conformance statement, error was: " + ex.toString(), ex)); } theModel.put("jsonEncodedConf", getContext(theRequest).newJsonParser().encodeResourceToString(capabilityStatement)); Map<String, Number> resourceCounts = new HashMap<String, Number>(); long total = 0; for (CapabilityStatementRestComponent nextRest : capabilityStatement.getRest()) { for (CapabilityStatementRestResourceComponent nextResource : nextRest.getResource()) { List<Extension> exts = nextResource.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (exts != null && exts.size() > 0) { Number nextCount = ((DecimalType) (exts.get(0).getValue())).getValueAsNumber(); resourceCounts.put(nextResource.getTypeElement().getValue(), nextCount); total += nextCount.longValue(); } } } theModel.put("resourceCounts", resourceCounts); if (total > 0) { for (CapabilityStatementRestComponent nextRest : capabilityStatement.getRest()) { Collections.sort(nextRest.getResource(), new Comparator<CapabilityStatementRestResourceComponent>() { @Override public int compare(CapabilityStatementRestResourceComponent theO1, CapabilityStatementRestResourceComponent theO2) { DecimalType count1 = new DecimalType(); List<Extension> count1exts = theO1.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count1exts != null && count1exts.size() > 0) { count1 = (DecimalType) count1exts.get(0).getValue(); } DecimalType count2 = new DecimalType(); List<Extension> count2exts = theO2.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count2exts != null && count2exts.size() > 0) { count2 = (DecimalType) count2exts.get(0).getValue(); } int retVal = count2.compareTo(count1); if (retVal == 0) { retVal = theO1.getTypeElement().getValue().compareTo(theO2.getTypeElement().getValue()); } return retVal; } }); } } theModel.put("requiredParamExtension", ExtensionConstants.PARAM_IS_REQUIRED); theModel.put("conf", capabilityStatement); return capabilityStatement; } private IBaseResource loadAndAddConfR4(HttpServletRequest theServletRequest, final HomeRequest theRequest, final ModelMap theModel) { CaptureInterceptor interceptor = new CaptureInterceptor(); GenericClient client = theRequest.newClient(theServletRequest, getContext(theRequest), myConfig, interceptor); org.hl7.fhir.r4.model.CapabilityStatement capabilityStatement = new org.hl7.fhir.r4.model.CapabilityStatement(); try { capabilityStatement = client.fetchConformance().ofType(org.hl7.fhir.r4.model.CapabilityStatement.class).execute(); } catch (Exception ex) { ourLog.warn("Failed to load conformance statement, error was: {}", ex.toString()); theModel.put("errorMsg", toDisplayError("Failed to load conformance statement, error was: " + ex.toString(), ex)); } theModel.put("jsonEncodedConf", getContext(theRequest).newJsonParser().encodeResourceToString(capabilityStatement)); Map<String, Number> resourceCounts = new HashMap<String, Number>(); long total = 0; for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : capabilityStatement.getRest()) { for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceComponent nextResource : nextRest.getResource()) { List<org.hl7.fhir.r4.model.Extension> exts = nextResource.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (exts != null && exts.size() > 0) { Number nextCount = ((org.hl7.fhir.r4.model.DecimalType) (exts.get(0).getValue())).getValueAsNumber(); resourceCounts.put(nextResource.getTypeElement().getValue(), nextCount); total += nextCount.longValue(); } } } theModel.put("resourceCounts", resourceCounts); if (total > 0) { for (org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestComponent nextRest : capabilityStatement.getRest()) { Collections.sort(nextRest.getResource(), new Comparator<org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceComponent>() { @Override public int compare(org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceComponent theO1, org.hl7.fhir.r4.model.CapabilityStatement.CapabilityStatementRestResourceComponent theO2) { org.hl7.fhir.r4.model.DecimalType count1 = new org.hl7.fhir.r4.model.DecimalType(); List<org.hl7.fhir.r4.model.Extension> count1exts = theO1.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count1exts != null && count1exts.size() > 0) { count1 = (org.hl7.fhir.r4.model.DecimalType) count1exts.get(0).getValue(); } org.hl7.fhir.r4.model.DecimalType count2 = new org.hl7.fhir.r4.model.DecimalType(); List<org.hl7.fhir.r4.model.Extension> count2exts = theO2.getExtensionsByUrl(RESOURCE_COUNT_EXT_URL); if (count2exts != null && count2exts.size() > 0) { count2 = (org.hl7.fhir.r4.model.DecimalType) count2exts.get(0).getValue(); } int retVal = count2.compareTo(count1); if (retVal == 0) { retVal = theO1.getTypeElement().getValue().compareTo(theO2.getTypeElement().getValue()); } return retVal; } }); } } theModel.put("requiredParamExtension", ExtensionConstants.PARAM_IS_REQUIRED); theModel.put("conf", capabilityStatement); return capabilityStatement; } protected String logPrefix(ModelMap theModel) { return "[server=" + theModel.get("serverId") + "] - "; } protected FhirContext newContext(FhirVersionEnum version) { FhirContext retVal; retVal = new FhirContext(version); return retVal; } private String parseNarrative(HomeRequest theRequest, EncodingEnum theCtEnum, String theResultBody) { try { IBaseResource par = theCtEnum.newParser(getContext(theRequest)).parseResource(theResultBody); String retVal; if (par instanceof IResource) { IResource resource = (IResource) par; retVal = resource.getText().getDiv().getValueAsString(); } else if (par instanceof IDomainResource) { retVal = ((IDomainResource) par).getText().getDivAsString(); } else { retVal = null; } return StringUtils.defaultString(retVal); } catch (Exception e) { ourLog.error("Failed to parse resource", e); return ""; } } protected String preProcessMessageBody(String theBody) { if (theBody == null) { return ""; } String retVal = theBody.trim(); StringBuilder b = new StringBuilder(); for (int i = 0; i < retVal.length(); i++) { char nextChar = retVal.charAt(i); int nextCharI = nextChar; if (nextCharI == 65533) { b.append(' '); continue; } if (nextCharI == 160) { b.append(' '); continue; } if (nextCharI == 194) { b.append(' '); continue; } b.append(nextChar); } retVal = b.toString(); return retVal; } protected void processAndAddLastClientInvocation(GenericClient theClient, ResultType theResultType, ModelMap theModelMap, long theLatency, String outcomeDescription, CaptureInterceptor theInterceptor, HomeRequest theRequest) { try { // ApacheHttpRequest lastRequest = theInterceptor.getLastRequest(); // HttpResponse lastResponse = theInterceptor.getLastResponse(); // String requestBody = null; // String requestUrl = lastRequest != null ? lastRequest.getApacheRequest().getURI().toASCIIString() : null; // String action = lastRequest != null ? lastRequest.getApacheRequest().getMethod() : null; // String resultStatus = lastResponse != null ? lastResponse.getStatusLine().toString() : null; // String resultBody = StringUtils.defaultString(theInterceptor.getLastResponseBody()); // if (lastRequest instanceof HttpEntityEnclosingRequest) { // HttpEntity entity = ((HttpEntityEnclosingRequest) lastRequest).getEntity(); // if (entity.isRepeatable()) { // requestBody = IOUtils.toString(entity.getContent()); // ContentType ct = lastResponse != null ? ContentType.get(lastResponse.getEntity()) : null; // String mimeType = ct != null ? ct.getMimeType() : null; IHttpRequest lastRequest = theInterceptor.getLastRequest(); IHttpResponse lastResponse = theInterceptor.getLastResponse(); String requestBody = null; String requestUrl = null; String action = null; String resultStatus = null; String resultBody = null; String mimeType = null; ContentType ct = null; if (lastRequest != null) { requestBody = lastRequest.getRequestBodyFromStream(); requestUrl = lastRequest.getUri(); action = lastRequest.getHttpVerbName(); } if (lastResponse != null) { resultStatus = "HTTP " + lastResponse.getStatus() + " " + lastResponse.getStatusInfo(); lastResponse.bufferEntity(); resultBody = IOUtils.toString(lastResponse.readEntity(), Constants.CHARSET_UTF8); List<String> ctStrings = lastResponse.getHeaders(Constants.HEADER_CONTENT_TYPE); if (ctStrings != null && ctStrings.isEmpty() == false) { ct = ContentType.parse(ctStrings.get(0)); mimeType = ct.getMimeType(); } } EncodingEnum ctEnum = EncodingEnum.forContentType(mimeType); String narrativeString = ""; StringBuilder resultDescription = new StringBuilder(); IBaseResource riBundle = null; FhirContext context = getContext(theRequest); if (ctEnum == null) { resultDescription.append("Non-FHIR response"); } else { switch (ctEnum) { case JSON: if (theResultType == ResultType.RESOURCE) { narrativeString = parseNarrative(theRequest, ctEnum, resultBody); resultDescription.append("JSON resource"); } else if (theResultType == ResultType.BUNDLE) { resultDescription.append("JSON bundle"); riBundle = context.newJsonParser().parseResource(resultBody); } break; case XML: default: if (theResultType == ResultType.RESOURCE) { narrativeString = parseNarrative(theRequest, ctEnum, resultBody); resultDescription.append("XML resource"); } else if (theResultType == ResultType.BUNDLE) { resultDescription.append("XML bundle"); riBundle = context.newXmlParser().parseResource(resultBody); } break; } } resultDescription.append(" (").append(defaultString(resultBody).length() + " bytes)"); Header[] requestHeaders = lastRequest != null ? applyHeaderFilters(lastRequest.getAllHeaders()) : new Header[0]; Header[] responseHeaders = lastResponse != null ? applyHeaderFilters(lastResponse.getAllHeaders()) : new Header[0]; theModelMap.put("outcomeDescription", outcomeDescription); theModelMap.put("resultDescription", resultDescription.toString()); theModelMap.put("action", action); theModelMap.put("ri", riBundle instanceof IAnyResource); theModelMap.put("riBundle", riBundle); theModelMap.put("resultStatus", resultStatus); theModelMap.put("requestUrl", requestUrl); theModelMap.put("requestUrlText", formatUrl(theClient.getUrlBase(), requestUrl)); String requestBodyText = format(requestBody, ctEnum); theModelMap.put("requestBody", requestBodyText); String resultBodyText = format(resultBody, ctEnum); theModelMap.put("resultBody", resultBodyText); theModelMap.put("resultBodyIsLong", resultBodyText.length() > 1000); theModelMap.put("requestHeaders", requestHeaders); theModelMap.put("responseHeaders", responseHeaders); theModelMap.put("narrative", narrativeString); theModelMap.put("latencyMs", theLatency); } catch (Exception e) { ourLog.error("Failure during processing", e); theModelMap.put("errorMsg", toDisplayError("Error during processing: " + e.getMessage(), e)); } } /** * A hook to be overridden by subclasses. The overriding method can modify the error message * based on its content and/or the related exception. * * @param theErrorMsg The original error message to be displayed to the user. * @param theException The exception that occurred. May be null. * @return The modified error message to be displayed to the user. */ protected String toDisplayError(String theErrorMsg, Exception theException) { return theErrorMsg; } protected enum ResultType { BUNDLE, NONE, RESOURCE, TAGLIST } public static class CaptureInterceptor implements IClientInterceptor { private IHttpRequest myLastRequest; private IHttpResponse myLastResponse; // private String myResponseBody; public IHttpRequest getLastRequest() { return myLastRequest; } public IHttpResponse getLastResponse() { return myLastResponse; } // public String getLastResponseBody() { // return myResponseBody; @Override public void interceptRequest(IHttpRequest theRequest) { assert myLastRequest == null; myLastRequest = theRequest; } @Override public void interceptResponse(IHttpResponse theResponse) throws IOException { assert myLastResponse == null; myLastResponse = theResponse; // myLastResponse = ((ApacheHttpResponse) theResponse).getResponse(); // HttpEntity respEntity = myLastResponse.getEntity(); // if (respEntity != null) { // final byte[] bytes; // try { // bytes = IOUtils.toByteArray(respEntity.getContent()); // throw new InternalErrorException(e); // myResponseBody = new String(bytes, "UTF-8"); // myLastResponse.setEntity(new MyEntityWrapper(respEntity, bytes)); } // private static class MyEntityWrapper extends HttpEntityWrapper { // private byte[] myBytes; // public MyEntityWrapper(HttpEntity theWrappedEntity, byte[] theBytes) { // super(theWrappedEntity); // myBytes = theBytes; // @Override // public InputStream getContent() throws IOException { // return new ByteArrayInputStream(myBytes); // @Override // public void writeTo(OutputStream theOutstream) throws IOException { // theOutstream.write(myBytes); } }
package edu.chl.proton.model; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class Workspace implements IFileHandler, IDocumentHandler { private List<Document> tabs = new ArrayList<>(); private Document currentDocument; private Folder currentDirectory; private DocumentFactory factory = new DocumentFactory(); public Workspace() { //setCurrentDirectory(new Folder("Root")); } public void setCurrentDocument(Document doc) { currentDocument = doc; } public Document getCurrentDocument() { return currentDocument; } public void saveCurrentDocument() throws IOException { currentDocument.save(); } public void setCurrentDirectory(Folder folder) { currentDirectory = folder; } public String getCurrentDirectory() { return "./"; //currentDirectory.getPath(); } public void createDocument(DocumentType type) { factory.createDocument(type); } @Override public void openDocument(String filePath) { factory.getDocument(filePath); } @Override public void removeCurrentDocument() { } public void removeDocument(Document doc) { if (tabs.contains(doc)) { tabs.remove(doc); } } @Override public void setDirectory(String folderPath) { } @Override public String getDirectory() { return null; } public void setDirectory(Folder folder) { currentDirectory = folder; } public Folder getDirectory(Folder folder) { return currentDirectory; } @Override public void setText(List<String> text) { } @Override public List<String> getText() { return new ArrayList<>(); } @Override public void insertPart(String part) { currentDocument.insertPart(part); } }
package main.java.elegit; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; import de.jensd.fx.glyphs.GlyphsDude; import de.jensd.fx.glyphs.fontawesome.FontAwesomeIcon; import javafx.application.Platform; import javafx.beans.property.BooleanProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.scene.Node; import javafx.scene.control.*; import javafx.scene.layout.GridPane; import javafx.scene.text.Text; import javafx.util.Pair; import main.java.elegit.exceptions.CancelledAuthorizationException; import main.java.elegit.exceptions.NoRepoSelectedException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.eclipse.jgit.api.LsRemoteCommand; import org.eclipse.jgit.api.TransportConfigCallback; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.InvalidRemoteException; import org.eclipse.jgit.api.errors.TransportException; import org.eclipse.jgit.transport.*; import org.eclipse.jgit.util.FS; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Optional; /** * * An implementation of the abstract RepoHelperBuilder that builds * a ClonedRepoHelper by presenting dialogs to get the necessary * parameters. * */ public class ClonedRepoHelperBuilder extends RepoHelperBuilder { private static String prevRemoteURL, prevDestinationPath, prevRepoName; static final Logger logger = LogManager.getLogger(); public ClonedRepoHelperBuilder(SessionModel sessionModel) { super(sessionModel); } /** * Builds (with a grid) and shows dialogs that prompt the user for * information needed to construct a ClonedRepoHelper. * * @return the new ClonedRepoHelper. * @throws Exception when constructing the new ClonedRepoHelper */ @Override public RepoHelper getRepoHelperFromDialogs() throws GitAPIException, IOException, NoRepoSelectedException, CancelledAuthorizationException{ logger.info("Load remote repo dialog started"); Dialog<Pair<String, String>> dialog = createCloneDialog(); ButtonType cloneButtonType = setUpDialogButtons(dialog); // Create the Remote URL and destination path labels and fields. GridPane grid = new GridPane(); grid.setHgap(10); grid.setVgap(10); grid.setPadding(new Insets(10, 10, 10, 10)); Text instructionsText = new Text("Select an enclosing folder for the repository folder\n" + "to be created in."); // Set protocol ObservableList<String> protocolChoices = FXCollections.observableArrayList( "HTTP", "SSH private key" ); final ComboBox protocolChoiceList = new ComboBox(protocolChoices); protocolChoiceList.setValue("HTTP"); // Set URL TextField remoteURLField = new TextField(); remoteURLField.setPromptText("Remote URL"); if(prevRemoteURL != null) remoteURLField.setText(prevRemoteURL); TextField enclosingFolderField = new TextField(); enclosingFolderField.setEditable(false); // for now, it will just show the folder you selected if(prevDestinationPath != null) enclosingFolderField.setText(prevDestinationPath); Text enclosingDirectoryPathText = new Text(); Button chooseDirectoryButton = new Button(); Text folderIcon = GlyphsDude.createIcon(FontAwesomeIcon.FOLDER_OPEN); chooseDirectoryButton.setGraphic(folderIcon); chooseDirectoryButton.setOnAction(t -> { File cloneRepoDirectory = this.getDirectoryPathFromChooser("Choose clone destination folder", null); enclosingFolderField.setText(cloneRepoDirectory.toString()); enclosingDirectoryPathText.setText(cloneRepoDirectory.toString() + File.separator); }); TextField repoNameField = new TextField(); repoNameField.setPromptText("Repository name..."); if(prevRepoName != null) repoNameField.setText(prevRepoName); int instructionsRow = 0; int protocolRow = instructionsRow + 1; int remoteURLRow = protocolRow + 1; int enclosingFolderRow = remoteURLRow + 1; int repositoryNameRow = enclosingFolderRow + 1; grid.add(instructionsText, 0, instructionsRow, 2, 1); grid.add(new Label("Protocol:"), 0, protocolRow); grid.add(protocolChoiceList, 1, protocolRow); grid.add(new Label("Remote URL:"), 0, remoteURLRow); grid.add(remoteURLField, 1, remoteURLRow); grid.add(new Label("Enclosing folder:"), 0, enclosingFolderRow); grid.add(enclosingFolderField, 1, enclosingFolderRow); grid.add(chooseDirectoryButton, 2, enclosingFolderRow); grid.add(new Label("Repository name:"), 0, repositoryNameRow); grid.add(repoNameField, 1, repositoryNameRow); // Enable/Disable login button depending on whether a username was entered. Node cloneButton = dialog.getDialogPane().lookupButton(cloneButtonType); cloneButton.setDisable(true); // Do some validation: // On completion of every field, check that the other fields // are also filled in and with valid characters. Then enable login. BooleanProperty invalidRepoNameProperty = new SimpleBooleanProperty(repoNameField.getText().trim().contains("/") || repoNameField.getText().trim().contains(".")); cloneButton.disableProperty().bind(enclosingFolderField.textProperty().isEmpty() .or(repoNameField.textProperty().isEmpty()) .or(remoteURLField.textProperty().isEmpty()) .or(invalidRepoNameProperty)); repoNameField.textProperty().addListener((observable, oldValue, newValue) -> { invalidRepoNameProperty.set(newValue.trim().contains("/") || newValue.trim().contains(".")); }); dialog.getDialogPane().setContent(grid); // Request focus on the remote URL field by default. Platform.runLater(remoteURLField::requestFocus); // Convert the result to a destination-remote pair when the clone button is clicked. dialog.setResultConverter(dialogButton -> { if (dialogButton == cloneButtonType) { // Store these values for callback after a login (if user isn't logged in): prevRemoteURL = remoteURLField.getText().trim(); prevDestinationPath = enclosingFolderField.getText().trim(); prevRepoName = repoNameField.getText().trim(); return new Pair<>(enclosingFolderField.getText().trim() + File.separator + repoNameField.getText().trim(), remoteURLField.getText().trim()); } return null; }); Optional<Pair<String, String>> result = dialog.showAndWait(); if (result.isPresent()) { // Unpack the destination-remote Pair created above: Path destinationPath = Paths.get(result.get().getKey()); String remoteURL = result.get().getValue(); try { //LsRemoteCommand lsRemoteCommand = new LsRemoteCommand(this.sessionModel.getCurrentRepo()); //lsRemoteCommand.setRemote(remoteURL); LsRemoteCommand lsRemoteCommand = Git.lsRemoteRepository().setRemote(remoteURL); if (remoteURL.substring(0,6).equals("ssh: SshSessionFactory sshSessionFactory = new JschConfigSessionFactory() { @Override protected void configure(OpenSshConfig.Host host, Session session) { // do nothing } @Override protected JSch createDefaultJSch(FS fs) throws JSchException { JSch defaultJSch = super.createDefaultJSch(fs); defaultJSch.addIdentity("/Users/dmusican/.ssh/mathcs", "my password"); return defaultJSch; } }; lsRemoteCommand.setTransportConfigCallback( new TransportConfigCallback() { @Override public void configure(Transport transport) { SshTransport sshTransport = (SshTransport) transport; sshTransport.setSshSessionFactory(sshSessionFactory); } }); } lsRemoteCommand.call(); } catch (TransportException e) { // If the URL doesn't have a repo, a Transport Exception is thrown when this command is called. // We want the SessionController to report an InvalidRemoteException, though, because // that's the issue. logger.error("Invalid remote exception thrown"); throw new InvalidRemoteException("Caught invalid repository when building a ClonedRepoHelper."); } // Without the above try/catch block, the next line would run and throw the desired InvalidRemoteException, // but it would create a destination folder for the repo before stopping. By catching the error above, // we prevent unnecessary folder creation. RepoHelper repoHelper = new ClonedRepoHelper(destinationPath, remoteURL, this.sessionModel.getDefaultUsername()); return repoHelper; } else { logger.info("Cloned repo helper dialog canceled"); // This happens when the user pressed cancel. throw new NoRepoSelectedException(); } } private Dialog<Pair<String, String>> createCloneDialog() { Dialog<Pair<String, String>> dialog = new Dialog<>(); dialog.setTitle("Clone"); dialog.setHeaderText("Clone a remote repository"); return dialog; } private ButtonType setUpDialogButtons(Dialog<Pair<String, String>> dialog) { // Set the button types. ButtonType cloneButtonType = new ButtonType("Clone", ButtonBar.ButtonData.OK_DONE); dialog.getDialogPane().getButtonTypes().addAll(cloneButtonType, ButtonType.CANCEL); dialog.setOnCloseRequest(new EventHandler<DialogEvent>() { @Override public void handle(DialogEvent event) { logger.info("Closed clone from remote dialog"); } }); return cloneButtonType; } public String getPrevDestinationPath() { return prevDestinationPath; } public String getPrevRepoName() { return prevRepoName; } }
package com.example; import com.example.Contact; import java.sql.*; import java.util.ArrayList; public class ContactQuery { private int page; private int perPage; private ArrayList<Contact> collection; private Connection conn; public ContactQuery(int page, int perPage) { this.page = page <= 0 ? 1 : page ; this.perPage = perPage; this.conn = new DBConn().get(); this.collection = new ArrayList<Contact>(); } public ArrayList<Contact> all() { PreparedStatement ps = null; ResultSet rs = null; try{ ps = prepareStatement(); rs = ps.executeQuery(); while ( rs.next() ) { Contact contact = new Contact( rs.getString("id") == null ? null : rs.getInt("id"), // I don\t know how to do it better :( rs.getString("email"), rs.getString("first_name"), rs.getString("last_name"), rs.getString("middle_name"), rs.getString("date_of_birth"), rs.getString("sex") == null ? null : rs.getInt("sex") // the same ); collection.add(contact); } } catch (SQLException e) { System.err.println(e.getMessage()); } finally { close(ps); close(rs); } return collection; } private PreparedStatement prepareStatement() throws SQLException { if (conn == null) { return null; } PreparedStatement ps = conn.prepareStatement( "select id," + " email," + " first_name," + " last_name," + " middle_name," + " date_of_birth," + " sex" + " from users" + " where deleted_at is null" + " order by id" + " limit ?" + " offset ?"); ps.setInt(1, limit()); ps.setInt(2, offset()); return ps; } private int limit() { return perPage; } private int offset() { return perPage * (page - 1); } private void close(AutoCloseable c) { try { c.close(); } catch (Exception e) { System.err.println(e.getMessage()); } } }
package eme.generator; import java.util.List; import java.util.Map; import org.eclipse.emf.ecore.EAttribute; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EClassifier; import org.eclipse.emf.ecore.EEnum; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EOperation; import org.eclipse.emf.ecore.EParameter; import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.EcoreFactory; import org.eclipse.emf.ecore.EcorePackage; import eme.model.ExtractedMethod; import eme.model.ExtractedType; import eme.model.datatypes.ExtractedDataType; import eme.model.datatypes.ExtractedField; import eme.model.datatypes.ExtractedParameter; /** * Generator class for Ecore members ({@link EOperation}s and {@link EStructuralFeature}s). * @author Timur Saglam */ public class EMemberGenerator { private final Map<String, EClassifier> eClassifierMap; private final EcoreFactory ecoreFactory; private final SelectionHelper selector; private final EDataTypeGenerator typeGenerator; /** * Basic constructor. * @param typeGenerator is the {@link EDataTypeGenerator} instance. * @param selector is the {@link SelectionHelper} instance. * @param eClassifierMap is the map of already generated {@link EClassifier}s. */ public EMemberGenerator(EDataTypeGenerator typeGenerator, SelectionHelper selector, Map<String, EClassifier> eClassifierMap) { this.typeGenerator = typeGenerator; this.selector = selector; this.eClassifierMap = eClassifierMap; ecoreFactory = EcoreFactory.eINSTANCE; } /** * Adds all the fields of an {@link ExtractedType} to a specific {@link EClass}. * @param type is the {@link ExtractedType} * @param eClass is the {@link EClass}. */ public void addFields(ExtractedType type, EClass eClass) { for (ExtractedField field : type.getFields()) { // for every field if (selector.allowsGenerating(field)) { // if it is selected addField(field, eClass); // add to EClass by creating an Ecore representation } } } /** * Adds the operations of an {@link ExtractedType} to an {@link EClass}. * @param type is the {@link ExtractedType}. * @param eClass is the {@link EClass}. */ public void addOperations(ExtractedType type, EClass eClass) { EOperation operation; for (ExtractedMethod method : type.getMethods()) { // for every method if (selector.allowsGenerating(method)) { // if should be generated. operation = ecoreFactory.createEOperation(); // create object operation.setName(method.getName()); // set name eClass.getEOperations().add(operation); typeGenerator.addTypeParameters(operation, method); TypeParameterSource source = new TypeParameterSource(operation); // source of type parameters addReturnType(operation, method.getReturnType(), source); // add return type addExceptions(operation, method, source); // add throws declarations addParameters(method, operation.getEParameters(), source); // add parameters } } } /** * Adds a root container {@link EReference} to an root container {@link EClass}. The root container * {@link EReference} is a one-to-many reference to {@link EObject}. * @param rootContainer is the root container {@link EClass}. */ public void addRootContainerReference(EClass rootContainer) { EReference reference = ecoreFactory.createEReference(); reference.setName("containedElements"); reference.setUpperBound(-1); // one to many relation reference.setEType(EcorePackage.eINSTANCE.getEObject()); rootContainer.getEStructuralFeatures().add(reference); } /** * Adds the declared exceptions of an {@link ExtractedMethod} to an {@link EOperation}. */ private void addExceptions(EOperation operation, ExtractedMethod method, TypeParameterSource source) { for (ExtractedDataType exception : method.getThrowsDeclarations()) { typeGenerator.addException(operation, exception, source); } } /** * Adds a field to a {@link EClass} by creating a {@link EStructuralFeature} as Ecore representation, which is * either a {@link EReference} or an {@link EAttribute}. List types are represented by an {@link EStructuralFeature} * with an undefined upper bound property, which represents an one-to-many reference. */ private void addField(ExtractedField field, EClass eClass) { ExtractedDataType dataType = field; if (field.isListType() && selector.allowsMultiplicities()) { // only if one-to-many multiplicities are enabled dataType = field.getGenericArguments().get(0); // get type of generic argument: List<String> => String } EStructuralFeature representation = getRepresentation(dataType); addStructuralFeature(representation, dataType, field, eClass); // build reference } /** * Adds the parameters of an {@link ExtractedMethod} to a specific List of {@link EParameter}s. */ private void addParameters(ExtractedMethod method, List<EParameter> list, TypeParameterSource source) { EParameter eParameter; for (ExtractedParameter parameter : method.getParameters()) { // for every parameter eParameter = ecoreFactory.createEParameter(); eParameter.setName(parameter.getIdentifier()); // set identifier typeGenerator.addDataType(eParameter, parameter, source); // add type type to EParameter list.add(eParameter); } } /** * Adds the return type of an {@link ExtractedMethod} to an {@link EOperation}. */ private void addReturnType(EOperation operation, ExtractedDataType returnType, TypeParameterSource source) { if (returnType != null) { // if return type is not void typeGenerator.addDataType(operation, returnType, source); // add type to return type } } /** * Builds a structural feature from an extracted attribute and adds it to an EClass. A structural feature can be an * EAttribute or an EReference. If it is a reference, containment has to be set manually. */ private void addStructuralFeature(EStructuralFeature feature, ExtractedDataType dataType, ExtractedField field, EClass eClass) { feature.setName(field.getIdentifier()); // set name feature.setChangeable(!(field.isFinal() && selector.allowsUnchangeable())); // make unchangeable if final if (!dataType.equals(field)) { // if is list type feature.setUpperBound(-1); // no upper bound } typeGenerator.addDataType(feature, dataType, new TypeParameterSource(eClass)); // add type to attribute eClass.getEStructuralFeatures().add(feature); // add feature to EClass } /** * Factory method for the Ecore representations of any {@link ExtractedDataType}. */ private EStructuralFeature getRepresentation(ExtractedDataType dataType) { if (isEClass(dataType)) { // if type is EClass: return ecoreFactory.createEReference(); } else { // if it is EDataType: return ecoreFactory.createEAttribute(); } } /** * Checks whether a specific type name is an already created EClass. */ private boolean isEClass(ExtractedDataType dataType) { String typeName = dataType.getFullType(); return eClassifierMap.containsKey(typeName) && !(eClassifierMap.get(typeName) instanceof EEnum); } }
package joliex.util; import java.util.regex.Matcher; import java.util.regex.Pattern; import jolie.net.CommMessage; import jolie.runtime.JavaService; import jolie.runtime.Value; import jolie.runtime.ValueVector; public class StringUtils extends JavaService { public CommMessage length( CommMessage request ) { return CommMessage.createResponse( request, Value.create( request.value().strValue().length() ) ); } public CommMessage replaceAll( CommMessage message ) { String regex = message.value().getChildren( "regex" ).first().strValue(); String replacement = message.value().getChildren( "replacement" ).first().strValue(); return CommMessage.createResponse( message, Value.create( message.value().strValue().replaceAll( regex, replacement ) ) ); } public CommMessage join( CommMessage request ) { ValueVector vec = request.value().getChildren( "piece" ); int size = vec.size() - 1; StringBuilder builder = new StringBuilder(); if ( size >= 0 ) { String delimiter = request.value().getFirstChild( "delimiter" ).strValue(); int i; for( i = 0; i < size; i++ ) { builder.append( vec.get( i ).strValue() ).append( delimiter ); } builder.append( vec.get( i ).strValue() ); } return CommMessage.createResponse( request, Value.create( builder.toString() ) ); } public CommMessage trim( CommMessage message ) { return CommMessage.createResponse( message, Value.create( message.value().strValue().trim() ) ); } public CommMessage split( CommMessage message ) { String str = message.value().strValue(); int limit = 0; Value lValue = message.value().getFirstChild( "limit" ); if ( lValue.isDefined() ) { limit = lValue.intValue(); } String[] ss = str.split( message.value().getFirstChild( "regex" ).strValue(), limit ); Value value = Value.create(); for( int i = 0; i < ss.length; i++ ) { value.getNewChild( "result" ).add( Value.create( ss[ i ] ) ); } return CommMessage.createResponse( message, value ); } public CommMessage splitByLength( CommMessage request ) { String str = request.value().strValue(); int length = request.value().getFirstChild( "length" ).intValue(); Value responseValue = Value.create(); ValueVector result = responseValue.getChildren( "result" ); int stringLength = str.length(); boolean keepRun = true; int offset = 0; while( keepRun ) { if ( offset + length >= stringLength ) { keepRun = false; length = stringLength - offset; } result.add( Value.create( str.substring( offset, offset += length ) ) ); } return CommMessage.createResponse( request, responseValue ); } public CommMessage match( CommMessage message ) { Pattern p = Pattern.compile( message.value().getFirstChild( "regex" ).strValue() ); Matcher m = p.matcher( message.value().strValue() ); Value response = Value.create(); if ( m.matches() ) { response.setValue( 1 ); ValueVector groups = response.getChildren( "group" ); groups.add( Value.create( m.group( 0 ) ) ); for( int i = 0; i < m.groupCount(); i++ ) { groups.add( Value.create( m.group( i+1 ) ) ); } } else { response.setValue( 0 ); } return CommMessage.createResponse( message, response ); } public CommMessage leftPad( CommMessage request ) { String orig = request.value().strValue(); int length = request.value().getFirstChild( "length" ).intValue(); if ( orig.length() >= length ) { return CommMessage.createResponse( request, Value.create( orig ) ); } char padChar = request.value().getFirstChild( "char" ).strValue().charAt( 0 ); StringBuilder builder = new StringBuilder(); int padLength = orig.length() - length; for( int i = 0; i < padLength; i++ ) { builder.append( padChar ); } builder.append( orig ); return CommMessage.createResponse( request, Value.create( builder.toString() ) ); } }
package org.jgroups.protocols; import org.jgroups.Address; import org.jgroups.Event; import org.jgroups.PhysicalAddress; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Loopback transport shared by all channels within the same VM. Property for testing is that no messages are lost. Allows * us to test various protocols (with ProtocolTester) at maximum speed. * @author Bela Ban */ public class SHARED_LOOPBACK extends TP { private PhysicalAddress physical_addr=null; /** Map of cluster names and address-protocol mappings. Used for routing messages to all or single members */ private static final ConcurrentMap<String,Map<Address,SHARED_LOOPBACK>> routing_table=new ConcurrentHashMap<String,Map<Address,SHARED_LOOPBACK>>(); public boolean supportsMulticasting() { return false; } public String toString() { return "SHARED_LOOPBACK(local address: " + local_addr + ')'; } public void sendMulticast(byte[] data, int offset, int length) throws Exception { Map<Address,SHARED_LOOPBACK> dests=routing_table.get(channel_name); if(dests == null) { if(log.isWarnEnabled()) log.warn("no destination found for " + channel_name); return; } for(Map.Entry<Address,SHARED_LOOPBACK> entry: dests.entrySet()) { Address dest=entry.getKey(); SHARED_LOOPBACK target=entry.getValue(); try { target.receive(local_addr, data, offset, length); } catch(Throwable t) { log.error("failed sending message to " + dest, t); } } } public void sendUnicast(PhysicalAddress dest, byte[] data, int offset, int length) throws Exception { Map<Address,SHARED_LOOPBACK> dests=routing_table.get(channel_name); if(dests == null) { if(log.isWarnEnabled()) log.warn("no destination found for " + channel_name); return; } SHARED_LOOPBACK target=dests.get(dest); if(target == null) { if(log.isWarnEnabled()) log.warn("destination address " + dest + " not found"); return; } target.receive(local_addr, data, offset, length); } protected void sendToSingleMember(Address dest, byte[] buf, int offset, int length) throws Exception { Map<Address,SHARED_LOOPBACK> dests=routing_table.get(channel_name); if(dests == null) { if(log.isWarnEnabled()) log.warn("no destination found for " + channel_name); return; } SHARED_LOOPBACK target=dests.get(dest); if(target == null) { if(log.isWarnEnabled()) log.warn("destination address " + dest + " not found"); return; } target.receive(local_addr, buf, offset, length); } public String getInfo() { return toString(); } protected PhysicalAddress getPhysicalAddress() { return physical_addr; } public Object down(Event evt) { Object retval=super.down(evt); switch(evt.getType()) { case Event.CONNECT: case Event.CONNECT_WITH_STATE_TRANSFER: case Event.CONNECT_USE_FLUSH: case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH: register(channel_name, local_addr, this); break; case Event.SET_LOCAL_ADDRESS: local_addr=(Address)evt.getArg(); break; } return retval; } public void stop() { super.stop(); // unregister(channel_name, local_addr); } protected static void register(String channel_name, Address local_addr, SHARED_LOOPBACK shared_loopback) { Map<Address,SHARED_LOOPBACK> map=routing_table.get(channel_name); if(map == null) { map=new ConcurrentHashMap<Address,SHARED_LOOPBACK>(); Map<Address,SHARED_LOOPBACK> tmp=routing_table.putIfAbsent(channel_name,map); if(tmp != null) map=tmp; } map.put(local_addr, shared_loopback); } protected static void unregister(String channel_name, Address local_addr) { Map<Address,SHARED_LOOPBACK> map=routing_table.get(channel_name); if(map != null) { map.remove(local_addr); if(map.isEmpty()) { routing_table.remove(channel_name); } } } }
package org.lilycms.indexer.engine; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.tika.metadata.Metadata; import org.apache.tika.parser.AutoDetectParser; import org.apache.tika.parser.ParseContext; import org.apache.tika.parser.Parser; import org.apache.tika.sax.BodyContentHandler; import org.lilycms.indexer.model.indexerconf.DerefValue; import org.lilycms.indexer.model.indexerconf.DerefValue.Follow; import org.lilycms.indexer.model.indexerconf.DerefValue.FieldFollow; import org.lilycms.indexer.model.indexerconf.DerefValue.VariantFollow; import org.lilycms.indexer.model.indexerconf.DerefValue.MasterFollow; import org.lilycms.indexer.model.indexerconf.FieldValue; import org.lilycms.indexer.model.indexerconf.IndexerConf; import org.lilycms.indexer.model.indexerconf.Value; import org.lilycms.indexer.model.indexerconf.Formatter; import org.lilycms.repository.api.*; import org.lilycms.util.io.Closer; import org.lilycms.util.repo.VersionTag; import java.io.InputStream; import java.util.*; /** * Evaluates an index field value (a {@link Value}) to a value. */ public class ValueEvaluator { private Log log = LogFactory.getLog(getClass()); private IndexerConf conf; public ValueEvaluator(IndexerConf conf) { this.conf = conf; } /** * Evaluates a value for a given record & vtag. * * @return null if there is no value */ public List<String> eval(Value valueDef, IdRecord record, Repository repository, String vtag) { Object value = evalValue(valueDef, record, repository, vtag); if (value == null) return null; if (valueDef.extractContent()) { return extractContent(valueDef, value, record, repository); } ValueType valueType = valueDef.getValueType(); Formatter formatter = valueDef.getFormatter() != null ? conf.getFormatters().getFormatter(valueDef.getFormatter()) : conf.getFormatters().getFormatter(valueType); return formatter.format(value, valueType); } private List<String> extractContent(Value valueDef, Object value, IdRecord record, Repository repository) { // At this point we can be sure the value will be a blob, this is validated during // the construction of the indexer conf. ValueType valueType = valueDef.getValueType(); List<Blob> blobs = new ArrayList<Blob>(); collectBlobs(value, valueType, blobs); if (blobs.size() == 0) return null; List<String> result = new ArrayList<String>(blobs.size()); Parser parser = new AutoDetectParser(); // TODO add some debug (or even info) logging to indicate what we are working on. for (Blob blob : blobs) { InputStream is = null; try { is = repository.getInputStream(blob); // TODO make write limit configurable BodyContentHandler ch = new BodyContentHandler(); Metadata metadata = new Metadata(); metadata.add(Metadata.CONTENT_TYPE, blob.getMimetype()); if (blob.getName() != null) metadata.add(Metadata.RESOURCE_NAME_KEY, blob.getName()); ParseContext parseContext = new ParseContext(); parser.parse(is, ch, metadata, parseContext); String text = ch.toString(); if (text.length() > 0) result.add(text); } catch (Throwable t) { log.error("Error extracting blob content. Field: " + valueDef.getTargetFieldType().getName() + ", record: " + record.getId(), t); } finally { Closer.close(is); } } return result.isEmpty() ? null : result; } private void collectBlobs(Object value, ValueType valueType, List<Blob> blobs) { if (valueType.isMultiValue()) { List values = (List)value; for (Object item : values) collectBlobsHierarchical(item, valueType, blobs); } else { collectBlobsHierarchical(value, valueType, blobs); } } private void collectBlobsHierarchical(Object value, ValueType valueType, List<Blob> blobs) { if (valueType.isHierarchical()) { HierarchyPath hierarchyPath = (HierarchyPath)value; for (Object item : hierarchyPath.getElements()) blobs.add((Blob)item); } else { blobs.add((Blob)value); } } private Object evalValue(Value value, IdRecord record, Repository repository, String vtag) { if (value instanceof FieldValue) { return evalFieldValue((FieldValue)value, record, repository, vtag); } else if (value instanceof DerefValue) { return evalDerefValue((DerefValue)value, record, repository, vtag); } else { throw new RuntimeException("Unexpected type of value: " + value.getClass().getName()); } } private Object evalFieldValue(FieldValue value, IdRecord record, Repository repository, String vtag) { try { return record.getField(value.getFieldType().getId()); } catch (FieldNotFoundException e) { // TODO return null; } } private Object evalDerefValue(DerefValue deref, IdRecord record, Repository repository, String vtag) { FieldType fieldType = deref.getTargetFieldType(); if (vtag.equals(VersionTag.VERSIONLESS_TAG) && fieldType.getScope() != Scope.NON_VERSIONED) { // From a versionless record, it is impossible to deref a versioned field. return null; } List<IdRecord> records = new ArrayList<IdRecord>(); records.add(record); for (Follow follow : deref.getFollows()) { List<IdRecord> linkedRecords = new ArrayList<IdRecord>(); for (IdRecord item : records) { List<IdRecord> evalResult = evalFollow(deref, follow, item, repository, vtag); if (evalResult != null) { linkedRecords.addAll(evalResult); } } records = linkedRecords; } if (records.isEmpty()) return null; List<Object> result = new ArrayList<Object>(); for (IdRecord item : records) { if (item.hasField(fieldType.getId())) { Object value = item.getField(fieldType.getId()); if (value != null) { if (deref.getTargetField().getValueType().isMultiValue()) { result.addAll((List)value); } else { result.add(value); } } } } if (result.isEmpty()) return null; if (!deref.getValueType().isMultiValue()) return result.get(0); return result; } private List<IdRecord> evalFollow(DerefValue deref, Follow follow, IdRecord record, Repository repository, String vtag) { if (follow instanceof FieldFollow) { return evalFieldFollow(deref, (FieldFollow)follow, record, repository, vtag); } else if (follow instanceof VariantFollow) { return evalVariantFollow((VariantFollow)follow, record, repository, vtag); } else if (follow instanceof MasterFollow) { return evalMasterFollow((MasterFollow)follow, record, repository, vtag); } else { throw new RuntimeException("Unexpected type of follow: " + follow.getClass().getName()); } } private List<IdRecord> evalFieldFollow(DerefValue deref, FieldFollow follow, IdRecord record, Repository repository, String vtag) { FieldType fieldType = follow.getFieldType(); if (!record.hasField(fieldType.getId())) { return null; } if (vtag.equals(VersionTag.VERSIONLESS_TAG) && fieldType.getScope() != Scope.NON_VERSIONED) { // From a versionless record, it is impossible to deref a versioned field. // This explicit check could be removed if in case of the versionless vtag we only read // the non-versioned fields of the record. However, it is not possible to do this right // now with the repository API. return null; } Object value = record.getField(fieldType.getId()); if (value instanceof Link) { RecordId recordId = ((Link)value).resolve(record, repository.getIdGenerator()); IdRecord linkedRecord = resolveRecordId(recordId, vtag, repository); return linkedRecord == null ? null : Collections.singletonList(linkedRecord); } else if (value instanceof List && ((List)value).size() > 0 && ((List)value).get(0) instanceof Link) { List list = (List)value; List<IdRecord> result = new ArrayList<IdRecord>(list.size()); for (Object link : list) { RecordId recordId = ((Link)link).resolve(record, repository.getIdGenerator()); IdRecord linkedRecord = resolveRecordId(recordId, vtag, repository); if (linkedRecord != null) { result.add(linkedRecord); } } return list.isEmpty() ? null : result; } return null; } private IdRecord resolveRecordId(RecordId recordId, String vtag, Repository repository) { try { // TODO we could limit this to only load the field necessary for the next follow return VersionTag.getIdRecord(recordId, vtag, repository); } catch (Exception e) { return null; } } private List<IdRecord> evalVariantFollow(VariantFollow follow, IdRecord record, Repository repository, String vtag) { RecordId recordId = record.getId(); Map<String, String> varProps = new HashMap<String, String>(recordId.getVariantProperties()); for (String dimension : follow.getDimensions()) { if (!varProps.containsKey(dimension)) { return null; } varProps.remove(dimension); } RecordId resolvedRecordId = repository.getIdGenerator().newRecordId(recordId.getMaster(), varProps); try { IdRecord lessDimensionedRecord = VersionTag.getIdRecord(resolvedRecordId, vtag, repository); return lessDimensionedRecord == null ? null : Collections.singletonList(lessDimensionedRecord); } catch (Exception e) { return null; } } private List<IdRecord> evalMasterFollow(MasterFollow follow, IdRecord record, Repository repository, String vtag) { if (record.getId().isMaster()) return null; RecordId masterId = record.getId().getMaster(); try { IdRecord master = VersionTag.getIdRecord(masterId, vtag, repository); return master == null ? null : Collections.singletonList(master); } catch (Exception e) { return null; } } }
package org.jpos.iso; import java.io.IOException; import java.io.PrintStream; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Externalizable; import java.util.Map; import java.util.Iterator; import org.jpos.util.FSDMsg; public class FSDISOMsg extends ISOMsg { FSDMsg fsd; public FSDISOMsg () { super(); } public FSDISOMsg (FSDMsg fsd) { super(); this.fsd = fsd; } public String getMTI() { return getString(0); } public byte[] pack() throws ISOException { try { return fsd.packToBytes(); } catch (Exception e) { throw new ISOException (e); } } public int unpack(byte[] b) throws ISOException { try { fsd.unpack (b); return b.length; } catch (Exception e) { throw new ISOException (e); } } public FSDMsg getFSDMsg() { return fsd; } public String getString (int fldno) { String s = fsd.get (Integer.toString(fldno)); return s; } public boolean hasField (int fldno) { return getString(fldno) != null; } public void dump (PrintStream p, String indent) { if (fsd != null) fsd.dump (p, indent); } public void writeExternal (ObjectOutput out) throws IOException { out.writeByte (0); // reserved for future expansion (version id) out.writeUTF (fsd.getBasePath()); out.writeUTF (fsd.getBaseSchema()); out.writeObject (fsd.getMap()); } public void readExternal (ObjectInput in) throws IOException, ClassNotFoundException { in.readByte(); // ignore version for now String basePath = in.readUTF(); String baseSchema = in.readUTF(); fsd = new FSDMsg (basePath, baseSchema); Map map = (Map) in.readObject(); Iterator iter = map.entrySet().iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); fsd.set ((String) entry.getKey(), (String) entry.getValue()); } } private static final long serialVersionUID = 1L; }
package com.ziclix.python.sql; import java.sql.Connection; import java.sql.SQLException; import java.util.Collections; import java.util.WeakHashMap; import java.util.Set; import org.python.core.ClassDictInit; import org.python.core.ContextManager; import org.python.core.Py; import org.python.core.PyBuiltinMethodSet; import org.python.core.PyException; import org.python.core.PyInteger; import org.python.core.PyList; import org.python.core.PyObject; import org.python.core.PyString; import org.python.core.PyUnicode; import org.python.core.ThreadState; import org.python.core.Traverseproc; import org.python.core.Visitproc; import com.ziclix.python.sql.util.PyArgParser; /** * A connection to the database. * * @author brian zimmer */ public class PyConnection extends PyObject implements ClassDictInit, ContextManager, Traverseproc { /** True if closed. */ protected boolean closed; /** Whether transactions are supported. */ protected boolean supportsTransactions; /** Whether multiple ResultSets are supported. */ protected boolean supportsMultipleResultSets; /** The underlying java.sql.Connection. */ protected Connection connection; /** Underlying cursors. */ private Set<PyCursor> cursors; /** Underlying statements. */ private Set<PyStatement> statements; /** Field __members__ */ protected static PyList __members__; /** Field __methods__ */ protected static PyList __methods__; static { PyObject[] m = new PyObject[5]; m[0] = new PyString("close"); m[1] = new PyString("commit"); m[2] = new PyString("cursor"); m[3] = new PyString("rollback"); m[4] = new PyString("nativesql"); __methods__ = new PyList(m); m = new PyObject[10]; m[0] = new PyString("autocommit"); m[1] = new PyString("dbname"); m[2] = new PyString("dbversion"); m[3] = new PyString("drivername"); m[4] = new PyString("driverversion"); m[5] = new PyString("url"); m[6] = new PyString("__connection__"); m[7] = new PyString("__cursors__"); m[8] = new PyString("__statements__"); m[9] = new PyString("closed"); __members__ = new PyList(m); } /** * Create a PyConnection with the open connection. * * @param connection * @throws SQLException */ public PyConnection(Connection connection) throws SQLException { this.closed = false; cursors = Collections.newSetFromMap(new WeakHashMap<PyCursor, Boolean>()); cursors = Collections.synchronizedSet(cursors); this.connection = connection; statements = Collections.newSetFromMap(new WeakHashMap<PyStatement, Boolean>()); statements = Collections.synchronizedSet(statements); this.supportsTransactions = this.connection.getMetaData().supportsTransactions(); this.supportsMultipleResultSets = this.connection.getMetaData().supportsMultipleResultSets(); if (this.supportsTransactions) { this.connection.setAutoCommit(false); } } /** * Produces a string representation of the object. * * @return string representation of the object. */ @Override public String toString() { try { return String.format("<PyConnection object at %s user='%s', url='%s'>", Py.idstr(this), connection.getMetaData().getUserName(), connection.getMetaData().getURL()); } catch (SQLException e) { return String.format("<PyConnection object at %s", Py.idstr(this)); } } /** * Method classDictInit * * @param dict */ static public void classDictInit(PyObject dict) { dict.__setitem__("autocommit", new PyInteger(0)); dict.__setitem__("close", new ConnectionFunc("close", 0, 0, 0, zxJDBC.getString("close"))); dict.__setitem__("commit", new ConnectionFunc("commit", 1, 0, 0, zxJDBC.getString("commit"))); dict.__setitem__("cursor", new ConnectionFunc("cursor", 2, 0, 4, zxJDBC.getString("cursor"))); dict.__setitem__("rollback", new ConnectionFunc("rollback", 3, 0, 0, zxJDBC.getString("rollback"))); dict.__setitem__("nativesql", new ConnectionFunc("nativesql", 4, 1, 1, zxJDBC.getString("nativesql"))); dict.__setitem__("__enter__", new ConnectionFunc("__enter__", 5, 0, 0, "__enter__")); dict.__setitem__("__exit__", new ConnectionFunc("__exit__", 6, 3, 3, "__exit__")); // hide from python dict.__setitem__("initModule", null); dict.__setitem__("toString", null); dict.__setitem__("setConnection", null); dict.__setitem__("getPyClass", null); dict.__setitem__("connection", null); dict.__setitem__("classDictInit", null); dict.__setitem__("cursors", null); } /** * Sets the attribute. * * @param name * @param value */ @Override public void __setattr__(String name, PyObject value) { if ("autocommit".equals(name)) { try { if (this.supportsTransactions) { this.connection.setAutoCommit(value.__nonzero__()); } } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } return; } super.__setattr__(name, value); } /** * Finds the attribute. * * @param name the name of the attribute of interest * @return the value for the attribute of the specified name */ @Override public PyObject __findattr_ex__(String name) { if ("autocommit".equals(name)) { try { return connection.getAutoCommit() ? Py.One : Py.Zero; } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("dbname".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDatabaseProductName()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("dbversion".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDatabaseProductVersion()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("drivername".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDriverName()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("driverversion".equals(name)) { try { return Py.newString(this.connection.getMetaData().getDriverVersion()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("url".equals(name)) { try { return Py.newString(this.connection.getMetaData().getURL()); } catch (SQLException e) { throw zxJDBC.makeException(zxJDBC.DatabaseError, e); } } else if ("__connection__".equals(name)) { return Py.java2py(this.connection); } else if ("__cursors__".equals(name)) { return Py.java2py(Collections.unmodifiableSet(this.cursors)); } else if ("__statements__".equals(name)) { return Py.java2py(Collections.unmodifiableSet(this.statements)); } else if ("__methods__".equals(name)) { return __methods__; } else if ("__members__".equals(name)) { return __members__; } else if ("closed".equals(name)) { return Py.newBoolean(closed); } return super.__findattr_ex__(name); } /** * Close the connection now (rather than whenever __del__ is called). The connection * will be unusable from this point forward; an Error (or subclass) exception will be * raised if any operation is attempted with the connection. The same applies to all * cursor objects trying to use the connection. */ public void close() { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } // mark ourselves closed now so that any callbacks we get from closing down // cursors and statements to not try and modify our internal sets this.closed = true; synchronized (this.cursors) { for (PyCursor cursor: cursors) { cursor.close(); } this.cursors.clear(); } synchronized (this.statements) { for (PyStatement statement : statements) { statement.close(); } this.statements.clear(); } try { this.connection.close(); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * Commit any pending transaction to the database. Note that if the database supports * an auto-commit feature, this must be initially off. An interface method may be * provided to turn it back on. * <p/> * Database modules that do not support transactions should implement this method with * void functionality. */ public void commit() { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } if (!this.supportsTransactions) { return; } try { this.connection.commit(); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * <i>This method is optional since not all databases provide transaction support.</i> * <p/> * In case a database does provide transactions this method causes the database to * roll back to the start of any pending transaction. Closing a connection without * committing the changes first will cause an implicit rollback to be performed. */ public void rollback() { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } if (!this.supportsTransactions) { return; } try { this.connection.rollback(); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * Converts the given SQL statement into the system's native SQL grammar. A driver may * convert the JDBC sql grammar into its system's native SQL grammar prior to sending * it; this method returns the native form of the statement that the driver would have * sent. * * @param nativeSQL * @return the native form of this statement */ public PyObject nativesql(PyObject nativeSQL) { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } if (nativeSQL == Py.None) { return Py.None; } try { if (nativeSQL instanceof PyUnicode) { return Py.newUnicode(this.connection.nativeSQL(nativeSQL.toString())); } return Py.newString(this.connection.nativeSQL(nativeSQL.__str__().toString())); } catch (SQLException e) { throw zxJDBC.makeException(e); } } /** * Return a new Cursor Object using the connection. If the database does not provide a * direct cursor concept, the module will have to emulate cursors using other means to * the extent needed by this specification. * * @return a new cursor using this connection */ public PyCursor cursor() { return cursor(false); } /** * Return a new Cursor Object using the connection. If the database does not provide a * direct cursor concept, the module will have to emulate cursors using other means to * the extent needed by this specification. * * @param dynamicFetch if true, dynamically iterate the result * @return a new cursor using this connection */ public PyCursor cursor(boolean dynamicFetch) { return this.cursor(dynamicFetch, Py.None, Py.None); } /** * Return a new Cursor Object using the connection. If the database does not provide a * direct cursor concept, the module will have to emulate cursors using other means to * the extent needed by this specification. * * @param dynamicFetch if true, dynamically iterate the result * @param rsType the type of the underlying ResultSet * @param rsConcur the concurrency of the underlying ResultSet * @return a new cursor using this connection */ public PyCursor cursor(boolean dynamicFetch, PyObject rsType, PyObject rsConcur) { if (closed) { throw zxJDBC.makeException(zxJDBC.ProgrammingError, "connection is closed"); } PyCursor cursor = new PyExtendedCursor(this, dynamicFetch, rsType, rsConcur); this.cursors.add(cursor); return cursor; } /** * Remove an open PyCursor. * * @param cursor */ void remove(PyCursor cursor) { if (closed) { return; } this.cursors.remove(cursor); } /** * Method register * * @param statement statement */ void add(PyStatement statement) { if (closed) { return; } this.statements.add(statement); } /** * Method contains * * @param statement statement * @return boolean */ boolean contains(PyStatement statement) { if (closed) { return false; } return this.statements.contains(statement); } public PyObject __enter__(ThreadState ts) { return this; } public PyObject __enter__() { return this; } public boolean __exit__(ThreadState ts, PyException exception) { if (exception == null) { commit(); } else { rollback(); } return false; } public boolean __exit__(PyObject type, PyObject value, PyObject traceback) { if (type == null || type == Py.None) { commit(); } else { rollback(); } return false; } /* Traverseproc implementation */ @Override public int traverse(Visitproc visit, Object arg) { int retVal; for (PyObject ob: cursors) { if (ob != null) { retVal = visit.visit(ob, arg); if (retVal != 0) { return retVal; } } } for (PyObject ob: statements) { if (ob != null) { retVal = visit.visit(ob, arg); if (retVal != 0) { return retVal; } } } return 0; } @Override public boolean refersDirectlyTo(PyObject ob) { if (ob == null) { return false; } if (cursors != null && cursors.contains(ob)) { return true; } else if (statements != null && statements.contains(ob)) { return true; } else { return false; } } } class ConnectionFunc extends PyBuiltinMethodSet { ConnectionFunc(String name, int index, int minargs, int maxargs, String doc) { super(name, index, minargs, maxargs, doc, PyConnection.class); } @Override public PyObject __call__() { PyConnection c = (PyConnection) __self__; switch (index) { case 0: c.close(); return Py.None; case 1: c.commit(); return Py.None; case 2: return c.cursor(); case 3: c.rollback(); return Py.None; case 5: return c.__enter__(); default: throw info.unexpectedCall(0, false); } } @Override public PyObject __call__(PyObject arg) { PyConnection c = (PyConnection) __self__; switch (index) { case 2: return c.cursor(arg.__nonzero__()); case 4: return c.nativesql(arg); default: throw info.unexpectedCall(1, false); } } @Override public PyObject __call__(PyObject arg1, PyObject arg2, PyObject arg3) { PyConnection c = (PyConnection) __self__; switch (index) { case 2: return c.cursor(arg1.__nonzero__(), arg2, arg3); case 6: return Py.newBoolean(c.__exit__(arg1, arg2, arg3)); default: throw info.unexpectedCall(3, false); } } @Override public PyObject __call__(PyObject[] args, String[] keywords) { PyConnection c = (PyConnection) __self__; PyArgParser parser = new PyArgParser(args, keywords); switch (index) { case 2: PyObject dynamic = parser.kw("dynamic", Py.None); PyObject rstype = parser.kw("rstype", Py.None); PyObject rsconcur = parser.kw("rsconcur", Py.None); dynamic = (parser.numArg() >= 1) ? parser.arg(0) : dynamic; rstype = (parser.numArg() >= 2) ? parser.arg(1) : rstype; rsconcur = (parser.numArg() >= 3) ? parser.arg(2) : rsconcur; return c.cursor(dynamic.__nonzero__(), rstype, rsconcur); default: throw info.unexpectedCall(args.length, true); } } }
package failchat.goodgame; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import failchat.core.*; import org.apache.commons.io.IOUtils; import org.java_websocket.handshake.ServerHandshake; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Queue; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; public class GGChatClient implements ChatClient { private static final Logger logger = Logger.getLogger(GGChatClient.class.getName()); private static final String GG_WS_URL = "ws://chat.goodgame.ru:8081/chat/websocket"; private static final String GG_STREAM_API_URL = "http://goodgame.ru/api/getchannelstatus?fmt=json&id="; private static final Pattern EXTRACT_CHANNEL_ID_REGEX = Pattern.compile("\"stream_id\":\"(\\d*)\""); private static final String NEW_MESSAGE_SEQUENCE = "\"type\":\"message\""; private WSClient wsClient; private ChatClientStatus status; private MessageManager messageManager = MessageManager.getInstance(); private Queue<Message> messageQueue = messageManager.getMessagesQueue(); private List<MessageHandler<GGMessage>> messageHandlers; private String channelName; private int channelId; private ObjectMapper objectMapper; public GGChatClient(String channelName) { this.channelName = channelName; objectMapper = new ObjectMapper(); messageHandlers = new ArrayList<>(); //noinspection unchecked messageHandlers.add(MessageObjectCleaner.getInstance()); messageHandlers.add(new UrlCleaner()); messageHandlers.add(new GGSmileHandler()); messageHandlers.add(new GGHighlightHandler(channelName)); status = ChatClientStatus.READY; } @Override public void goOnline() { if (status != ChatClientStatus.READY) { return; } channelId = getChannelIdByName(channelName); wsClient = new GGWSClient(); wsClient.connect(); } @Override public void goOffline() { status = ChatClientStatus.SHUTDOWN; if (wsClient != null) { wsClient.close(); } } @Override public ChatClientStatus getStatus() { return status; } private int getChannelIdByName(String name) { try { URL url = new URL(GG_STREAM_API_URL + name); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); if (connection.getResponseCode() != 200) { return -1; } String response = IOUtils.toString(connection.getInputStream()); Matcher m = EXTRACT_CHANNEL_ID_REGEX.matcher(response); if (m.find()) { return Integer.parseInt(m.group(1)); } } catch (IOException e) { e.printStackTrace(); status = ChatClientStatus.ERROR; } return -1; } private class GGWSClient extends WSClient { GGWSClient() { super(GG_WS_URL); } @Override public void onOpen(ServerHandshake serverHandshake) { try { String connectToChannelMes = objectMapper.writeValueAsString(new JoinWSMessage((channelId))); wsClient.send(connectToChannelMes); status = ChatClientStatus.WORKING; logger.info("Connected to goodgame"); messageManager.sendInfoMessage(new InfoMessage(Source.GOODGAME, "connected")); } catch (JsonProcessingException e) { e.printStackTrace(); } } @Override public void onMessage(String s) { if (s.contains(NEW_MESSAGE_SEQUENCE)) { try { GoodgameWSMessage ggwsm = objectMapper.readValue(s, GoodgameWSMessage.class); GGMessage message = ggwsm.getMessage(); //handling messages for (MessageHandler<GGMessage> messageHandler : messageHandlers) { messageHandler.handleMessage(message); } messageQueue.add(message); synchronized (messageQueue) { messageQueue.notify(); } } catch (IOException e) { e.printStackTrace(); } } } @Override public void onClose(int i, String s, boolean b) { logger.info("Goodgame disconnected"); } @Override public void onReconnect() { logger.info("Goodgame disconnected, trying to reconnect ..."); messageManager.sendInfoMessage(new InfoMessage(Source.GOODGAME, "disconnected")); } } private static class JoinWSMessage { private String type = "join"; private JoinWSData data; public String getType() { return type; } public JoinWSData getData() { return data; } JoinWSMessage(int channelId) { data = new JoinWSData(channelId); } } private static class JoinWSData { private String channelId; private boolean hidden = false; JoinWSData(int channelId) { this.channelId = Integer.toString(channelId); } @JsonProperty("channel_id") public String getChannelId() { return channelId; } public boolean isHidden() { return hidden; } } @JsonIgnoreProperties(ignoreUnknown = true) private static class GoodgameWSMessage { protected String type; protected GGMessage message; public GGMessage getMessage() { return message; } @JsonProperty("data") public void setMessage(GGMessage message) { this.message = message; } public String getType() { return type; } public void setType(String type) { this.type = type; } } }
package org.mozilla.mozstumbler; import android.net.wifi.ScanResult; final class SSIDBlockList { private static final String[] PREFIX_LIST = { // Mobile devices "AndroidAP", "AndroidHotspot", "Android Hotspot", "barnacle", // Android tether app "Galaxy Note", "Galaxy S", "Galaxy Tab", "HTC ", "iPhone", "LG-MS770", "LG-MS870", "LG VS910 4G", "LG Vortex", "MIFI", "MiFi", "myLGNet", "myTouch 4G Hotspot", "NOKIA Lumia", "PhoneAP", "SCH-I", "Sprint MiFi", "Verizon ", "Verizon-", "VirginMobile MiFi", "VodafoneMobileWiFi-", "FirefoxHotspot", // Transportation Wi-Fi "ac_transit_wifi_bus", "AmtrakConnect", "Amtrak_", "amtrak_", "GBUS", "GBusWifi", "gogoinflight", // Gogo in-flight WiFi "SF Shuttle Wireless", "ShuttleWiFi", "Southwest WiFi", // Southwest Airlines in-flight WiFi "SST-PR-1", // Sears Home Service van hotspot?! "wifi_rail", // BART "egged.co.il", // Egged transportation services (Israel) "gb-tours.com", // GB Tours transportation services (Israel) "ISRAEL-RAILWAYS", "Omni-WiFi", // Omnibus transportation services (Israel) "Telekom_ICE", // Deutsche Bahn on-train WiFi }; private static final String[] SUFFIX_LIST = { // Mobile devices "iPhone", "iphone", "MIFI", "MIFI", "MiFi", "Mifi", "mifi", "mi-fi", "MyWi", "Phone", "Portable Hotspot", "Tether", "tether", // Google's SSID opt-out "_nomap", }; private SSIDBlockList() { } static boolean contains(ScanResult scanResult) { String SSID = scanResult.SSID; if (SSID == null) { return true; // no SSID? } for (String prefix : PREFIX_LIST) { if (SSID.startsWith(prefix)) { return true; // blocked! } } for (String suffix : SUFFIX_LIST) { if (SSID.endsWith(suffix)) { return true; // blocked! } } return false; } }
package frc.team4215.stronghold; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Victor; import jaci.openrio.toast.lib.registry.Registrar; /** * <dl> * <dt>Properties:</dt> * <dd><strong>Private:</strong></dd> * <dd>{@link Joystick} <i>GameCube</i></dd> * <dd>{@link Victor} <i>Intake</i></dd> * <dt>Methods:</dt> * <dd><strong>Constructors:</strong></dd> * <dd>{@link Intake#Intake()}</dd> * <dd><strong>Other Methods:</strong></dd> * <dd>{@link Intake#Run()}</dd> * </dl> * * @author James Yu */ public class Intake { /** * The Joystick used to control arms and the intake. */ private Joystick gameCube; /** * The Motor, Victor, for controling the intake. */ private Victor intake; static double coeff = .5; /** * Default constructor. */ public Intake() { this.gameCube = new Joystick(Const.JoyStick.Num.GameCube); this.intake = Registrar.victor(Const.Motor.Num.Intake); } /** * Run on this */ public void Run() { if (gameCube .getRawButton(Const.JoyStick.Button.GameCube_A)) intake.set(Const.Motor.Run.Backward); else if (gameCube .getRawButton(Const.JoyStick.Button.GameCube_B)) intake.set(Const.Motor.Run.Forward); else intake.set(Const.Motor.Run.Stop); } public double get(){ return intake.get(); } public void set(double setValue) { intake.set(setValue); } }
package org.opencms.jsp; import org.opencms.file.CmsFile; import org.opencms.file.CmsFolder; import org.opencms.file.CmsObject; import org.opencms.file.CmsProperty; import org.opencms.file.CmsPropertyDefinition; import org.opencms.file.CmsRequestContext; import org.opencms.file.CmsResource; import org.opencms.file.CmsResourceFilter; import org.opencms.file.types.CmsResourceTypeXmlContent; import org.opencms.file.types.CmsResourceTypeXmlPage; import org.opencms.i18n.CmsLocaleGroup; import org.opencms.jsp.util.CmsJspCategoryAccessBean; import org.opencms.jsp.util.CmsJspContentAccessBean; import org.opencms.jsp.util.CmsJspImageBean; import org.opencms.jsp.util.CmsJspValueTransformers.CmsLocalePropertyLoaderTransformer; import org.opencms.loader.CmsLoaderException; import org.opencms.main.CmsException; import org.opencms.main.CmsLog; import org.opencms.main.OpenCms; import org.opencms.relations.CmsRelation; import org.opencms.relations.CmsRelationFilter; import org.opencms.security.CmsSecurityException; import org.opencms.util.CmsCollectionsGenericWrapper; import org.opencms.util.CmsUUID; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.logging.Log; import com.google.common.collect.Maps; /** * Wrapper subclass of CmsResource with some convenience methods.<p> */ public class CmsJspResourceWrapper extends CmsResource { /** Logger instance for this class. */ @SuppressWarnings("unused") private static final Log LOG = CmsLog.getLog(CmsJspResourceWrapper.class); /** Serial version id. */ private static final long serialVersionUID = 1L; /** Parameter value used to select outgoing relations. */ public static final boolean RELATIONS_OUT = true; /** Parameter value used to select incoming relations. */ public static final boolean RELATIONS_IN = false; /** All resources that are sources of incoming relations. */ public List<CmsJspResourceWrapper> m_incomingRelations; /** All resources that are targets of outgoing relations. */ public List<CmsJspResourceWrapper> m_outgoingRelations; /** All parent folder of this resource in the current site as a list. */ public List<CmsJspResourceWrapper> m_parentFolders; /** The category access bean for this resource. */ private CmsJspCategoryAccessBean m_categories; /** The CMS context. */ private CmsObject m_cms; /** The resource / file content as a String. */ private String m_content; /** The file object for this resource. */ private CmsFile m_file; /** Image bean instance created from this resource. */ private CmsJspImageBean m_imageBean; /** Stores if this resource is an XML content or not. */ private Boolean m_isXml; /** The set of locale variants. */ private Map<String, CmsJspResourceWrapper> m_localeResources; /** The main locale. */ private Locale m_mainLocale; /** The navigation builder for this resource. */ private CmsJspNavBuilder m_navBuilder; /** The navigation info element for this resource. */ private CmsJspNavElement m_navigation; /** The default file of this resource, assumed that this resource is a folder. */ private CmsJspResourceWrapper m_navigationDefaultFile; /** The navigation info elements in this resource, assuming that this resource is a folder. */ private List<CmsJspNavElement> m_navigationForFolder; /** The parent folder of this resource in the current site. */ private CmsJspResourceWrapper m_parentFolder; /** Properties of this resource. */ private Map<String, String> m_properties; /** Locale properties of this resource. */ private Map<String, Map<String, String>> m_propertiesLocale; /** Locale properties of this resource with search. */ private Map<String, Map<String, String>> m_propertiesLocaleSearch; /** Properties of this resource with search. */ private Map<String, String> m_propertiesSearch; /** The calculated site path of the resource. */ private String m_sitePath; /** The type name of the resource. */ private String m_typeName; /** The XML content access bean. */ private CmsJspContentAccessBean m_xml; /** * Creates a new instance.<p> * * @param cms the current CMS context * @param res the resource to wrap */ private CmsJspResourceWrapper(CmsObject cms, CmsResource res) { super( res.getStructureId(), res.getResourceId(), res.getRootPath(), res.getTypeId(), res.isFolder(), res.getFlags(), res.getProjectLastModified(), res.getState(), res.getDateCreated(), res.getUserCreated(), res.getDateLastModified(), res.getUserLastModified(), res.getDateReleased(), res.getDateExpired(), res.getSiblingCount(), res.getLength(), res.getDateContent(), res.getVersion()); m_cms = cms; m_file = null; m_content = ""; } /** * Factory method to create a new {@link CmsJspResourceWrapper} instance from a {@link CmsResource}.<p> * * In case the parameter resource already is a wrapped resource AND the OpenCms request context is * the same as the provided context, the parameter object is returned.<p> * * @param cms the current CMS context * @param res the resource to wrap * * @return a new instance of a {@link CmsJspResourceWrapper} */ public static CmsJspResourceWrapper wrap(CmsObject cms, CmsResource res) { CmsJspResourceWrapper result = null; if ((cms != null) && (res != null)) { if (res instanceof CmsJspResourceWrapper) { CmsJspResourceWrapper wrapper = (CmsJspResourceWrapper)res; if (cms.getRequestContext().getSiteRoot().equals(wrapper.getRequestContext().getSiteRoot())) { result = wrapper; } else { result = new CmsJspResourceWrapper(cms, res); } } else { result = new CmsJspResourceWrapper(cms, res); } } return result; } /** * Two resources are considered equal in case their structure id is equal.<p> * * @see CmsResource#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (obj == this) { return true; } if (obj instanceof CmsResource) { return ((CmsResource)obj).getStructureId().equals(getStructureId()); } return false; } /** * Returns the categories assigned to this resource.<p> * * @return the categories assigned to this resource */ public CmsJspCategoryAccessBean getCategories() { if (m_categories == null) { m_categories = new CmsJspCategoryAccessBean(m_cms, this); } return m_categories; } /** * Returns the OpenCms user context this resource was initialized with.<p> * * @return the OpenCms user context this resource was initialized with */ public CmsObject getCmsObject() { return m_cms; } /** * Returns the content of the file as a String.<p> * * @return the content of the file as a String */ public String getContent() { if ((m_content.length() == 0) && (getFile() != null)) { m_content = new String(getFile().getContents()); } return m_content; } /** * Returns this resources name extension (if present).<p> * * The extension will always be lower case.<p> * * @return the extension or <code>null</code> if not available * * @see CmsResource#getExtension(String) * @see org.opencms.jsp.util.CmsJspVfsAccessBean#getResourceExtension(Object) */ public String getExtension() { return getExtension(getRootPath()); } /** * Returns the full file object for this resource.<p> * * @return the full file object for this resource */ public CmsFile getFile() { if ((m_file == null) && !isFolder()) { try { m_file = m_cms.readFile(this); } catch (CmsException e) { // this should not happen since we are updating from a resource object } } return m_file; } /** * Returns the folder of this resource.<p> * * In case this resource already is a {@link CmsFolder}, it is returned without modification. * In case it is a {@link CmsFile}, the parent folder of the file is returned.<p> * * @return the folder of this resource * * @see #getSitePathFolder() */ public CmsJspResourceWrapper getFolder() { CmsJspResourceWrapper result; if (isFolder()) { result = this; } else { result = readResource(getSitePathFolder()); } return result; } /** * Gets a list of resource wrappers for resources with relations pointing to this resource. * * @return the list of resource wrappers */ public List<CmsJspResourceWrapper> getIncomingRelations() { if (m_incomingRelations == null) { m_incomingRelations = getRelatedResources(RELATIONS_IN); } return m_incomingRelations; } /** * Gets a list of resource wrappers for resources with relations pointing to this resource, for a specific type. * * @param typeName name of the type to filter * @return the list of resource wrappers */ public List<CmsJspResourceWrapper> getIncomingRelations(String typeName) { return getIncomingRelations().stream().filter(res -> res.getTypeName().equals(typeName)).collect( Collectors.toList()); } /** * Returns <code>true</code> in case this resource is an image in the VFS.<p> * * @return <code>true</code> in case this resource is an image in the VFS */ public boolean getIsImage() { return getToImage().isImage(); } /** * Returns <code>true</code> in case this resource is an XML content.<p> * * @return <code>true</code> in case this resource is an XML content */ public boolean getIsXml() { if (m_isXml == null) { m_isXml = Boolean.valueOf( CmsResourceTypeXmlPage.isXmlPage(this) || CmsResourceTypeXmlContent.isXmlContent(this)); } return m_isXml.booleanValue(); } /** * Returns a substituted link to this resource.<p> * * @return the link */ public String getLink() { return OpenCms.getLinkManager().substituteLinkForUnknownTarget( m_cms, m_cms.getRequestContext().getSitePath(this)); } /** * Returns a map of the locale group for the current resource, with locale strings as keys.<p> * * @return a map with locale strings as keys and resource wrappers for the corresponding locale variants */ public Map<String, CmsJspResourceWrapper> getLocaleResource() { if (m_localeResources != null) { return m_localeResources; } try { CmsLocaleGroup localeGroup = m_cms.getLocaleGroupService().readLocaleGroup(this); Map<Locale, CmsResource> resourcesByLocale = localeGroup.getResourcesByLocale(); Map<String, CmsJspResourceWrapper> result = Maps.newHashMap(); for (Map.Entry<Locale, CmsResource> entry : resourcesByLocale.entrySet()) { result.put(entry.getKey().toString(), CmsJspResourceWrapper.wrap(m_cms, entry.getValue())); } m_localeResources = result; return result; } catch (CmsException e) { return new HashMap<String, CmsJspResourceWrapper>(); } } /** * Returns the main locale for this resource.<p> * * @return the main locale for this resource */ public Locale getMainLocale() { if (m_mainLocale != null) { return m_mainLocale; } try { CmsLocaleGroup localeGroup = m_cms.getLocaleGroupService().readLocaleGroup(this); m_mainLocale = localeGroup.getMainLocale(); return m_mainLocale; } catch (CmsException e) { return null; } } /** * Returns the mime type for this resource.<p> * * In case no valid mime type can be determined from the file extension, <code>text/plain</code> is returned.<p> * * @return the mime type for this resource */ public String getMimeType() { return OpenCms.getResourceManager().getMimeType(getRootPath(), null, "text/plain"); } /** * Returns the navigation builder for this resource.<p> * * This will be initialized with this resource as default URI.<p> * * @return the navigation builder for this resource */ public CmsJspNavBuilder getNavBuilder() { if (m_navBuilder == null) { m_navBuilder = new CmsJspNavBuilder(); m_navBuilder.init(m_cms, null, getSitePath()); } return m_navBuilder; } /** * Returns the navigation info element for this resource.<p> * * @return the navigation info element for this resource */ public CmsJspNavElement getNavigation() { if (m_navigation == null) { m_navigation = getNavBuilder().getNavigationForResource(); } return m_navigation; } /** * Returns the default resource for this resource.<p> * * If this resource is a file, then this file is returned.<p> * * Otherwise, in case this resource is a folder:<br> * <ol> * <li>the {@link CmsPropertyDefinition#PROPERTY_DEFAULT_FILE} is checked, and * <li>if still no file could be found, the configured default files in the * <code>opencms-vfs.xml</code> configuration are iterated until a match is * found, and * <li>if still no file could be found, <code>null</code> is returned * </ol> * * @return the default file for the given folder * * @see CmsObject#readDefaultFile(CmsResource, CmsResourceFilter) */ public CmsJspResourceWrapper getNavigationDefaultFile() { if (m_navigationDefaultFile == null) { if (isFolder()) { try { m_navigationDefaultFile = wrap(m_cms, m_cms.readDefaultFile(this, CmsResourceFilter.DEFAULT)); } catch (CmsSecurityException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getMessage(), e); } } } } else { m_navigationDefaultFile = this; } return m_navigationDefaultFile; } /** * Returns the navigation info elements in this resource, assuming that this resource is a folder.<p> * * @return the navigation info elements in this resource, assuming that this resource is a folder */ public List<CmsJspNavElement> getNavigationForFolder() { if (m_navigationForFolder == null) { m_navigationForFolder = getNavBuilder().getNavigationForFolder(); } return m_navigationForFolder; } /** * Gets a list of resources with relations pointing to them from this resources, as resource wrappers. * * @return the list of resource wrappers */ public List<CmsJspResourceWrapper> getOutgoingRelations() { if (m_outgoingRelations == null) { m_outgoingRelations = getRelatedResources(RELATIONS_OUT); } return m_outgoingRelations; } /** * Gets a list of resources with relations pointing to them from this resources, as resource wrappers. * * Only gets resources with the given type. * * @param typeName the name of the type to filter * @return the list of resource wrappers */ public List<CmsJspResourceWrapper> getOutgoingRelations(String typeName) { return getOutgoingRelations().stream().filter(res -> res.getTypeName().equals(typeName)).collect( Collectors.toList()); } /** * Returns the parent folder of this resource in the current site.<p> * * The parent folder of a file is the folder of the file. * The parent folder of a folder is the parent folder of the folder. * The parent folder of the root folder is <code>null</code>.<p> * * @return the parent folder of this resource in the current site * * @see #getSitePathParentFolder() * @see CmsResource#getParentFolder(String) * @see org.opencms.jsp.util.CmsJspVfsAccessBean#getParentFolder(Object) */ public CmsJspResourceWrapper getParentFolder() { if (m_parentFolder == null) { String parentFolder = getSitePathParentFolder(); if (parentFolder != null) { m_parentFolder = readResource(getSitePathParentFolder()); } } return m_parentFolder; } /** * Returns all parent folder of this resource in the current site as a list.<p> * * First resource in the list will be the direct parent folder of this resource, * the last element will be the site root folder.<p> * * @return all parent folder of this resource in the current site as a list */ public List<CmsJspResourceWrapper> getParentFolders() { if (m_parentFolders == null) { m_parentFolders = new ArrayList<CmsJspResourceWrapper>(); CmsJspResourceWrapper parentFolder = getParentFolder(); while (parentFolder != null) { m_parentFolders.add(parentFolder); parentFolder = parentFolder.getParentFolder(); } } return m_parentFolders; } /** * Returns the direct properties of this resource in a map.<p> * * This is without "search", so it will not include inherited properties from the parent folders.<p> * * @return the direct properties of this resource in a map */ public Map<String, String> getProperty() { if (m_properties == null) { try { List<CmsProperty> properties = m_cms.readPropertyObjects(this, false); m_properties = CmsProperty.toMap(properties); } catch (CmsException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getMessage(), e); } } } return m_properties; } /** * Returns the direct properties of this resource in a map for a given locale.<p> * * This is without "search", so it will not include inherited properties from the parent folders.<p> * * @return the direct properties of this resource in a map for a given locale */ public Map<String, Map<String, String>> getPropertyLocale() { if (m_propertiesLocale == null) { m_propertiesLocale = CmsCollectionsGenericWrapper.createLazyMap( new CmsLocalePropertyLoaderTransformer(getCmsObject(), this, false)); // result may still be null return (m_propertiesLocale == null) ? Collections.EMPTY_MAP : m_propertiesLocale; } return m_propertiesLocale; } /** * Returns the searched properties of this resource in a map for a given locale.<p> * * This is with "search", so it will include inherited properties from the parent folders.<p> * * @return the direct properties of this resource in a map for a given locale */ public Map<String, Map<String, String>> getPropertyLocaleSearch() { if (m_propertiesLocaleSearch == null) { m_propertiesLocaleSearch = CmsCollectionsGenericWrapper.createLazyMap( new CmsLocalePropertyLoaderTransformer(getCmsObject(), this, true)); // result may still be null return (m_propertiesLocaleSearch == null) ? Collections.EMPTY_MAP : m_propertiesLocaleSearch; } return m_propertiesLocaleSearch; } /** * Returns the searched properties of this resource in a map.<p> * * This is with "search", so it will include inherited properties from the parent folders.<p> * * @return the direct properties of this resource in a map */ public Map<String, String> getPropertySearch() { if (m_propertiesSearch == null) { try { List<CmsProperty> properties = m_cms.readPropertyObjects(this, true); m_propertiesSearch = CmsProperty.toMap(properties); } catch (CmsException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getMessage(), e); } } } return m_propertiesSearch; } /** * Returns the OpenCms user request context this resource was initialized with.<p> * * @return the OpenCms user request context this resource was initialized with */ public CmsRequestContext getRequestContext() { return m_cms.getRequestContext(); } /** * Returns this resources name extension (if present).<p> * * The extension will always be lower case.<p> * * @return the extension or <code>null</code> if not available * * @see CmsResource#getExtension(String) * @see org.opencms.jsp.util.CmsJspVfsAccessBean#getResourceExtension(Object) */ public String getResourceExtension() { return getExtension(); } /** * Returns the name of this resource without the path information.<p> * * The resource name of a file is the name of the file. * The resource name of a folder is the folder name with trailing "/". * The resource name of the root folder is <code>/</code>.<p> * * @return the name of this resource without the path information * * @see CmsResource#getName() * @see org.opencms.jsp.util.CmsJspVfsAccessBean#getResourceName(Object) */ public String getResourceName() { return getName(); } /** * Returns the folder name of this resource from the root site.<p> * * In case this resource already is a {@link CmsFolder}, the folder path is returned without modification. * In case it is a {@link CmsFile}, the parent folder name of the file is returned.<p> * * @return the folder name of this resource from the root site */ public String getRootPathFolder() { String result; if (isFile()) { result = getRootPathParentFolder(); } else { result = getRootPath(); } return result; } /** * Returns the directory level of a resource from the root site.<p> * * The root folder "/" has level 0, * a folder "/foo/" would have level 1, * a folder "/foo/bar/" level 2 etc.<p> * * @return the directory level of a resource from the root site * * @see CmsResource#getPathLevel(String) */ public int getRootPathLevel() { return getPathLevel(getRootPath()); } /** * Returns the parent folder of this resource from the root site.<p> * * @return the parent folder of this resource from the root site * * @see CmsResource#getParentFolder(String) */ public String getRootPathParentFolder() { return getParentFolder(getRootPath()); } /** * Returns the current site path to this resource.<p> * * @return the current site path to this resource * * @see org.opencms.file.CmsRequestContext#getSitePath(CmsResource) */ public String getSitePath() { if (m_sitePath == null) { m_sitePath = m_cms.getRequestContext().getSitePath(this); } return m_sitePath; } /** * Returns the folder name of this resource in the current site.<p> * * In case this resource already is a {@link CmsFolder}, the folder path is returned without modification. * In case it is a {@link CmsFile}, the parent folder name of the file is returned.<p> * * @return the folder name of this resource in the current site */ public String getSitePathFolder() { String result; if (isFile()) { result = getSitePathParentFolder(); } else { result = getSitePath(); } return result; } /** * Returns the directory level of a resource in the current site.<p> * * The root folder "/" has level 0, * a folder "/foo/" would have level 1, * a folder "/foo/bar/" level 2 etc.<p> * * @return the directory level of a resource in the current site * * @see CmsResource#getPathLevel(String) * @see org.opencms.jsp.util.CmsJspVfsAccessBean#getPathLevel(Object) */ public int getSitePathLevel() { return getPathLevel(getSitePath()); } /** * Returns the parent folder of this resource in the current site.<p> * * The parent folder of a file is the folder of the file. * The parent folder of a folder is the parent folder of the folder. * The parent folder of the root folder is <code>null</code>.<p> * * @return the parent folder of this resource in the current site * * @see CmsResource#getParentFolder(String) * @see org.opencms.jsp.util.CmsJspVfsAccessBean#getParentFolder(Object) */ public String getSitePathParentFolder() { return getParentFolder(getSitePath()); } /** * Returns a scaled image bean from the wrapped value.<p> * * In case the value does not point to an image resource, <code>null</code> is returned. * * @return the scaled image bean */ public CmsJspImageBean getToImage() { if (m_imageBean == null) { m_imageBean = new CmsJspImageBean(getCmsObject(), this, null); } return m_imageBean; } /** * Returns an XML content access bean created for this resource.<p> * * In case this resource is not an XML content, <code>null</code> is returned.<p> * * @return an XML content access bean created for this resource * * @see #getIsXml() */ public CmsJspContentAccessBean getToXml() { if ((m_xml == null) && getIsXml()) { m_xml = new CmsJspContentAccessBean(m_cms, this); } return m_xml; } /** * Returns the resource type name.<p> * * @return the resource type name */ public String getTypeName() { if (m_typeName == null) { try { m_typeName = OpenCms.getResourceManager().getResourceType(getTypeId()).getTypeName(); } catch (CmsLoaderException e) { // this should never happen, and anyway it is logged in the resource manage already } } return m_typeName; } /** * Returns an XML content access bean created for this resource.<p> * * In case this resource is not an XML content, <code>null</code> is returned.<p> * * @return an XML content access bean created for this resource * * @see #getToXml() * @see #getIsXml() */ public CmsJspContentAccessBean getXml() { return getToXml(); } /** * @see CmsResource#hashCode() * @see java.lang.Object#hashCode() */ @Override public int hashCode() { if (getStructureId() != null) { return getStructureId().hashCode(); } return CmsUUID.getNullUUID().hashCode(); } /** * Returns <code>true</code> in case this resource is child resource of the provided resource which is assumed to be a folder.<p> * * @param resource the resource to check * * @return <code>true</code> in case this resource is child resource of the provided resource which is assumed to be a folder */ public boolean isChildResourceOf(CmsResource resource) { return (resource != null) && resource.isFolder() && !(getStructureId().equals(resource.getStructureId())) && ((getRootPath().indexOf(resource.getRootPath()) == 0)); } /** * Returns <code>true</code> in case this resource is child resource of the provided resource path which is assumed to be a folder in the current site.<p> * * No check is performed to see if the provided site path resource actually exists.<p> * * @param sitePath the resource to check * * @return <code>true</code> in case this resource is child resource of the provided resource path which is assumed to be a folder in the current site */ public boolean isChildResourceOf(String sitePath) { return (sitePath != null) && ((getSitePath().indexOf(sitePath) == 0)) && (sitePath.length() < getSitePath().length()); } /** * Returns <code>true</code> in case this resource is a parent folder of the provided resource.<p> * * @param resource the resource to check * * @return <code>true</code> in case this resource is a parent folder of the provided resource */ public boolean isParentFolderOf(CmsResource resource) { return (resource != null) && isFolder() && !(getStructureId().equals(resource.getStructureId())) && ((resource.getRootPath().indexOf(getRootPath()) == 0)); } /** * Returns <code>true</code> in case this resource is a parent folder of the provided resource path in the current site.<p> * * No check is performed to see if the provided site path resource actually exists.<p> * * @param sitePath the path to check * * @return <code>true</code> in case this resource is a parent folder of the provided resource path in the current site */ public boolean isParentFolderOf(String sitePath) { return (sitePath != null) && isFolder() && ((sitePath.indexOf(getSitePath()) == 0)) && (sitePath.length() > getSitePath().length()); } /** * Helper method for getting the related resources for this resource, with a given resource filter. * * @param out - true for outgoing relations, false for incoming relations * @return the list of related resources */ private List<CmsJspResourceWrapper> getRelatedResources(boolean out) { CmsObject cms = getCmsObject(); List<CmsJspResourceWrapper> result = new ArrayList<>(); try { CmsRelationFilter filter = out ? CmsRelationFilter.relationsFromStructureId(getStructureId()) : CmsRelationFilter.relationsToStructureId(getStructureId()); List<CmsRelation> relations = cms.readRelations(filter); for (CmsRelation rel : relations) { try { CmsResource other = out ? rel.getTarget(cms, CmsResourceFilter.DEFAULT) : rel.getSource(cms, CmsResourceFilter.DEFAULT); result.add(wrap(cms, other)); } catch (CmsException e) { LOG.warn(e.getLocalizedMessage(), e); } } } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); } return result; } /** * Reads a resource, suppressing possible exceptions.<p> * * @param sitePath the site path of the resource to read. * * @return the resource of <code>null</code> on case an exception occurred while reading */ private CmsJspResourceWrapper readResource(String sitePath) { CmsJspResourceWrapper result = null; try { result = new CmsJspResourceWrapper(m_cms, m_cms.readResource(sitePath)); } catch (CmsException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getMessage(), e); } } return result; } }
package hudson.plugins.antexec; import hudson.*; import hudson.model.*; import hudson.tasks.Ant; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.tasks._ant.AntConsoleAnnotator; import hudson.util.ArgumentListBuilder; import hudson.util.FormValidation; import hudson.util.VariableResolver; import org.jenkinsci.plugins.tokenmacro.MacroEvaluationException; import org.jenkinsci.plugins.tokenmacro.TokenMacro; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; /** * Invokes the Apache Ant script entered on the hudson build configuration. * <p/> * * @author Milos Svasek */ @SuppressWarnings("ALL") public class AntExec extends Builder { private static final String myName = "antexec"; protected static final String buildXml = myName + "_build.xml"; private final String scriptSource; private final String extendedScriptSource; private final String scriptName; private final String properties; private final String antOpts; private final Boolean keepBuildfile; private final Boolean verbose; private final Boolean emacs; private final Boolean noAntcontrib; private final String antName; // Fields in config.groovy must match the parameter names in the "DataBoundConstructor" @SuppressWarnings("ALL") @DataBoundConstructor public AntExec(String scriptSource, String extendedScriptSource, String scriptName, String properties, String antName, String antOpts, Boolean keepBuildfile, Boolean verbose, Boolean emacs, Boolean noAntcontrib) { this.scriptSource = scriptSource; this.extendedScriptSource = extendedScriptSource; this.scriptName = scriptName; this.properties = properties; this.keepBuildfile = keepBuildfile; this.antName = antName; this.antOpts = antOpts; this.verbose = verbose; this.emacs = emacs; this.noAntcontrib = noAntcontrib; } /** * Returns content of text area with script source from job configuration screen * * @return String scriptSource */ public String getScriptSource() { return scriptSource; } /** * Returns content of text area with script source from job configuration screen * * @return String extendedScriptSource */ public String getExtendedScriptSource() { return extendedScriptSource; } /** * Returns content of text area with script name from job configuration screen * * @return String scriptName */ public String getScriptName() { return scriptName; } /** * Returns content of text field with properties from job configuration screen * * @return String properties */ public String getProperties() { return properties; } /** * Returns content of text field with java/ant options from job configuration screen. * It will be used for ANT_OPTS environment variable * * @return String antOpts */ public String getAntOpts() { return antOpts; } /** * Returns checkbox boolean from job configuration screen * * @return Boolean keepBuildfile */ public Boolean getKeepBuildfile() { return keepBuildfile; } /** * Returns checkbox boolean from job configuration screen * * @return Boolean verbose */ public Boolean getVerbose() { return verbose; } /** * Returns checkbox boolean from job configuration screen * * @return Boolean emacs */ public Boolean getEmacs() { return emacs; } /** * Returns checkbox boolean from job configuration screen * * @return Boolean noAntcontrib */ public Boolean getNoAntcontrib() { return noAntcontrib; } /** * @return Ant to invoke, or null to invoke the default one. */ Ant.AntInstallation getAnt() { for (Ant.AntInstallation i : getDescriptor().getInstallations()) { if (antName != null && antName.equals(i.getName())) return i; } return null; } @Override public boolean perform(AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { ArgumentListBuilder args = new ArgumentListBuilder(); String scriptSourceResolved = scriptSource; String extendedScriptSourceResolved = extendedScriptSource; try { //Resolve all the envirionment variables and properties before creating the build.xml scriptSourceResolved = TokenMacro.expandAll(build, listener, scriptSource); extendedScriptSourceResolved = TokenMacro.expandAll(build, listener, extendedScriptSource); } catch (MacroEvaluationException ex) { Logger.getLogger(AntExec.class.getName()).log(Level.SEVERE, null, ex); } EnvVars env = build.getEnvironment(listener); env.overrideAll(build.getBuildVariables()); Ant.AntInstallation ai = getAnt(); if (ai == null) { args.add(launcher.isUnix() ? "ant" : "ant.bat"); } else { ai = ai.forNode(Computer.currentComputer().getNode(), listener); ai = ai.forEnvironment(env); String exe = ai.getExecutable(launcher); if (exe == null) { return false; } args.add(exe); } //Create Ant build.xml file FilePath buildFile = makeBuildFile(scriptName, scriptSourceResolved, extendedScriptSourceResolved, build); //Make archive copy of build file to job directory //buildFile.copyTo(new FilePath(new File(build.getRootDir(), buildXml))); //Added build file to the command line args.add("-file", buildFile.getName()); @SuppressWarnings("unchecked") VariableResolver<String> vr = build.getBuildVariableResolver(); @SuppressWarnings("unchecked") Set<String> sensitiveVars = build.getSensitiveBuildVariables(); //noinspection unchecked //Resolve the properties passed args.addKeyValuePairsFromPropertyString("-D", properties, vr, sensitiveVars); if (ai != null) env.put("ANT_HOME", ai.getHome()); if (antOpts != null && antOpts.length() > 0 && !antOpts.equals("")) { env.put("ANT_OPTS", env.expand(antOpts)); } //Get and prepare ant-contrib.jar if (noAntcontrib == null || !noAntcontrib) { //TODO: Replace this with better methot if (verbose != null && verbose) listener.getLogger().println(Messages.AntExec_UseAntContribTasks()); FilePath antContribJarOnMaster = new FilePath(Hudson.getInstance().getRootPath(), "plugins/antexec/META-INF/lib/ant-contrib.jar"); FilePath antLibDir = new FilePath(build.getWorkspace(), "antlib"); FilePath antContribJar = new FilePath(antLibDir, "ant-contrib.jar"); antContribJar.copyFrom(antContribJarOnMaster.toURI().toURL()); args.add("-lib", antLibDir.getName()); } else { if (verbose != null && verbose) listener.getLogger().println(Messages.AntExec_UseAntCoreTasksOnly()); } //Add Ant option: -verbose if (verbose != null && verbose) args.add("-verbose"); //Add Ant option: -emacs if (emacs != null && emacs) args.add("-emacs"); //Fixing command line for windows if (!launcher.isUnix()) { args = args.toWindowsCommand(); // For some reason, ant on windows rejects empty parameters but unix does not. // Add quotes for any empty parameter values: List<String> newArgs = new ArrayList<String>(args.toList()); newArgs.set(newArgs.size() - 1, newArgs.get(newArgs.size() - 1).replaceAll("(?<= )(-D[^\" ]+)= ", "$1=\"\" ")); args = new ArgumentListBuilder(newArgs.toArray(new String[newArgs.size()])); } //Content of scriptSourceResolved and properties (only if verbose is true if (verbose != null && verbose) { listener.getLogger().println(); listener.getLogger().println(Messages.AntExec_DebugScriptSourceFieldBegin()); listener.getLogger().println(scriptSourceResolved); listener.getLogger().println(Messages.AntExec_DebugScriptSourceFieldEnd()); listener.getLogger().println(); listener.getLogger().println(Messages.AntExec_DebugPropertiesFieldBegin()); listener.getLogger().println(properties); listener.getLogger().println(Messages.AntExec_DebugPropertiesFieldEnd()); listener.getLogger().println(); } long startTime = System.currentTimeMillis(); try { AntConsoleAnnotator aca = new AntConsoleAnnotator(listener.getLogger(), build.getCharset()); int r; try { r = launcher.launch().cmds(args).envs(env).stdout(aca).pwd(buildFile.getParent()).join(); } finally { aca.forceEol(); //After the ant script has been executed, we delete the build.xml. //The plugin is a way to run an Ant Script from a small source code, we shoudn't keep the build.xml if (keepBuildfile == null || !keepBuildfile) { boolean deleteResponse = buildFile.delete(); if (!deleteResponse) listener.getLogger().println("The temporary Ant Build Script coudn't be deleted"); } } return r == 0; } catch (IOException e) { Util.displayIOException(e, listener); String errorMessage = hudson.tasks.Messages.Ant_ExecFailed(); if (ai == null && (System.currentTimeMillis() - startTime) < 1000) { if (getDescriptor().getInstallations() == null) // looks like the user didn't configure any Ant installation errorMessage += hudson.tasks.Messages.Ant_GlobalConfigNeeded(); else // There are Ant installations configured but the project didn't pick it errorMessage += hudson.tasks.Messages.Ant_ProjectConfigNeeded(); } e.printStackTrace(listener.fatalError(errorMessage)); return false; } } @Override public DescriptorImpl getDescriptor() { return (DescriptorImpl) super.getDescriptor(); } @SuppressWarnings("UnusedDeclaration") @Extension public static final class DescriptorImpl extends BuildStepDescriptor<Builder> { @CopyOnWrite private volatile Ant.AntInstallation[] installations = new Ant.AntInstallation[0]; @SuppressWarnings("UnusedDeclaration") public DescriptorImpl() { super(AntExec.class); load(); } // for compatibility reasons, the persistence is done by Ant.DescriptorImpl public Ant.AntInstallation[] getInstallations() { return Hudson.getInstance().getDescriptorByType(Ant.DescriptorImpl.class).getInstallations(); } //Check if entered script source is wellformed xml document public FormValidation doCheckScriptSource(@QueryParameter String value) throws IOException { String xmlContent = makeBuildFileXml("", value, "test_script"); try { XMLReader reader = XMLReaderFactory.createXMLReader(); reader.parse(new InputSource(new ByteArrayInputStream(xmlContent.getBytes()))); return FormValidation.ok(); } catch (SAXException sax) { return FormValidation.error("ERROR: " + sax.getLocalizedMessage()); } } //Check if entered extended script source is wellformed xml document private FormValidation doCheckExtendedScriptSource(@QueryParameter String value) throws IOException { String xmlContent = makeBuildFileXml(value, "", "test_script"); try { XMLReader reader = XMLReaderFactory.createXMLReader(); reader.parse(new InputSource(new ByteArrayInputStream(xmlContent.getBytes()))); return FormValidation.ok(); } catch (SAXException sax) { return FormValidation.error("ERROR: " + sax.getLocalizedMessage()); } } public boolean isApplicable(Class<? extends AbstractProject> aClass) { // indicates that this builder can be used with all kinds of project types return true; } public String getDisplayName() { return Messages.AntExec_DisplayName(); } } static String makeBuildFileXml(String scriptSource, String extendedScriptSource, String scriptName) { StringBuilder sb = new StringBuilder(); String myScripName = buildXml; if (scriptName != null && scriptName.length() > 0 && !scriptName.equals("")) { myScripName = scriptName; } sb.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"); sb.append("<project default=\"" + myScripName + "\" xmlns:antcontrib=\"antlib:net.sf.antcontrib\" basedir=\".\">\n\n"); sb.append("<target name=\"" + myScripName + "\">\n"); sb.append("<!-- Default target entered in the first textarea - begin -->\n"); sb.append(scriptSource); sb.append("\n<!-- Default target entered in the first textarea - end -->\n"); sb.append("</target>\n"); if (extendedScriptSource != null && extendedScriptSource.length() > 0 && !extendedScriptSource.equals("")) { sb.append("<!-- Extended script source entered in the second textarea - begin -->\n"); sb.append(extendedScriptSource); sb.append("\n<!-- Extended script source entered in the second textarea - end -->\n"); } sb.append("</project>\n"); return sb.toString(); } static FilePath makeBuildFile(String scriptName, String targetSource, String extendedScriptSource, AbstractBuild build) throws IOException, InterruptedException { String myScripName = buildXml; if (scriptName != null && scriptName.length() > 0 && !scriptName.equals("")) { myScripName = scriptName; } FilePath buildFile = new FilePath(build.getWorkspace(), myScripName); buildFile.write(makeBuildFileXml(targetSource, extendedScriptSource, scriptName), null); return buildFile; } }
package org.owasp.esapi.interfaces; import java.io.InputStream; import java.text.DateFormat; import java.util.Date; import java.util.List; import java.util.Set; import org.owasp.esapi.errors.IntrusionException; import org.owasp.esapi.errors.ValidationException; public interface IValidator { boolean isValidInput(String context, String input, String type, int maxLength, boolean allowNull) throws IntrusionException; String getValidInput(String context, String input, String type, int maxLength, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is a valid date according to the specified date format. */ boolean isValidDate(String context, String input, DateFormat format, boolean allowNull) throws IntrusionException; /** * Returns a valid date as a Date. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ Date getValidDate(String context, String input, DateFormat format, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is "safe" HTML. Implementors should reference the OWASP AntiSamy project for ideas * on how to do HTML validation in a whitelist way, as this is an extremely difficult problem. */ boolean isValidSafeHTML(String context, String input, int maxLength, boolean allowNull) throws IntrusionException; /** * Returns canonicalized and validated "safe" HTML. Implementors should reference the OWASP AntiSamy project for ideas * on how to do HTML validation in a whitelist way, as this is an extremely difficult problem. */ String getValidSafeHTML(String context, String input, int maxLength, boolean allowNull) throws ValidationException; /** * Returns true if input is a valid credit card. Maxlength is mandated by valid credit card type. */ boolean isValidCreditCard(String context, String input, boolean allowNull) throws IntrusionException; /** * Returns a canonicalized and validated credit card number as a String. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ String getValidCreditCard(String context, String input, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is a valid directory path. */ boolean isValidDirectoryPath(String context, String input, boolean allowNull) throws IntrusionException; /** * Returns a canonicalized and validated directory path as a String. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ String getValidDirectoryPath(String context, String input, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is a valid file name. */ boolean isValidFileName(String context, String input, boolean allowNull) throws IntrusionException; /** * Returns a canonicalized and validated file name as a String. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ String getValidFileName(String context, String input, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is a valid number. */ boolean isValidNumber(String context, String input, long minValue, long maxValue, boolean allowNull) throws IntrusionException; /** * Returns a validated number as a double. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ Double getValidNumber(String context, String input, long minValue, long maxValue, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is a valid integer. */ boolean isValidInteger(String context, String input, int minValue, int maxValue, boolean allowNull) throws IntrusionException; /** * Returns a validated integer as an int. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ Integer getValidInteger(String context, String input, int minValue, int maxValue, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is a valid double. */ boolean isValidDouble(String context, String input, double minValue, double maxValue, boolean allowNull) throws IntrusionException; /** * Returns a validated real number as a double. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ Double getValidDouble(String context, String input, double minValue, double maxValue, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if input is valid file content. */ boolean isValidFileContent(String context, byte[] input, int maxBytes, boolean allowNull) throws IntrusionException; /** * Returns validated file content as a byte array. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ byte[] getValidFileContent(String context, byte[] input, int maxBytes, boolean allowNull) throws ValidationException, IntrusionException; /** * Returns true if a file upload has a valid name, path, and content. */ boolean isValidFileUpload(String context, String filepath, String filename, byte[] content, int maxBytes, boolean allowNull) throws IntrusionException; /** * Validates the filepath, filename, and content of a file. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ void assertValidFileUpload(String context, String filepath, String filename, byte[] content, int maxBytes, boolean allowNull) throws ValidationException, IntrusionException; /** * Validate the current HTTP request by comparing parameters, headers, and cookies to a predefined whitelist of allowed * characters. See the SecurityConfiguration class for the methods to retrieve the whitelists. */ boolean isValidHTTPRequest() throws IntrusionException; /** * Validates the current HTTP request by comparing parameters, headers, and cookies to a predefined whitelist of allowed * characters. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ void assertIsValidHTTPRequest() throws ValidationException, IntrusionException; /** * Returns true if input is a valid list item. */ boolean isValidListItem(String context, String input, List list) throws IntrusionException; /** * Returns the list item that exactly matches the canonicalized input. Invalid or non-matching input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ String getValidListItem(String context, String input, List list) throws ValidationException, IntrusionException; /** * Returns true if the parameters in the current request contain all required parameters and only optional ones in addition. */ boolean isValidHTTPRequestParameterSet(String context, Set required, Set optional) throws IntrusionException; /** * Validates that the parameters in the current request contain all required parameters and only optional ones in * addition. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ void assertIsValidHTTPRequestParameterSet(String context, Set required, Set optional) throws ValidationException, IntrusionException; /** * Returns true if input is valid printable ASCII characters. */ boolean isValidPrintable(String context, byte[] input, int maxLength, boolean allowNull) throws IntrusionException; /** * Returns canonicalized and validated printable characters as a byte array. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ byte[] getValidPrintable(String context, byte[] input, int maxLength, boolean allowNull) throws ValidationException; /** * Returns true if input is valid printable ASCII characters (32-126). */ boolean isValidPrintable(String context, String input, int maxLength, boolean allowNull) throws IntrusionException; /** * Returns canonicalized and validated printable characters as a String. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ String getValidPrintable(String context, String input, int maxLength, boolean allowNull) throws ValidationException; /** * Returns true if input is a valid redirect location. */ boolean isValidRedirectLocation(String context, String input, boolean allowNull) throws IntrusionException; /** * Returns a canonicalized and validated redirect location as a String. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ String getValidRedirectLocation(String context, String input, boolean allowNull) throws ValidationException; /** * Reads from an input stream until end-of-line or a maximum number of * characters. This method protects against the inherent denial of service * attack in reading until the end of a line. If an attacker doesn't ever * send a newline character, then a normal input stream reader will read * until all memory is exhausted and the platform throws an OutOfMemoryError * and probably terminates. */ String safeReadLine(InputStream inputStream, int maxLength) throws ValidationException; }
package io.github.classgraph; import java.io.File; import java.lang.annotation.Inherited; import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URL; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import io.github.classgraph.json.Id; import io.github.classgraph.utils.JarUtils; import io.github.classgraph.utils.LogNode; import io.github.classgraph.utils.Parser.ParseException; import io.github.classgraph.utils.URLPathEncoder; /** Holds metadata about a class encountered during a scan. */ public class ClassInfo extends ScanResultObject implements Comparable<ClassInfo> { /** Name of the class. */ private @Id String name; /** Class modifier flags, e.g. Modifier.PUBLIC */ private int modifiers; /** True if the classfile indicated this is an interface (or an annotation, which is an interface). */ private boolean isInterface; /** True if the classfile indicated this is an annotation. */ private boolean isAnnotation; /** * This annotation has the {@link Inherited} meta-annotation, which means that any class that this annotation is * applied to also implicitly causes the annotation to annotate all subclasses too. */ boolean isInherited; /** The class type signature string. */ private String typeSignatureStr; /** The class type signature, parsed. */ private transient ClassTypeSignature typeSignature; /** The fully-qualified defining method name, for anonymous inner classes. */ private String fullyQualifiedDefiningMethodName; /** * If true, this class is only being referenced by another class' classfile as a superclass / implemented * interface / annotation, but this class is not itself a whitelisted (non-blacklisted) class, or in a * whitelisted (non-blacklisted) package. * * If false, this classfile was matched during scanning (i.e. its classfile contents read), i.e. this class is a * whitelisted (and non-blacklisted) class in a whitelisted (and non-blacklisted) package. */ private boolean isExternalClass; /** * The classpath element file (classpath root dir or jar) that this class was found within, or null if this * class was found in a module. */ transient File classpathElementFile; /** * The package root within a jarfile (e.g. "BOOT-INF/classes"), or the empty string if this is not a jarfile, or * the package root is the classpath element path (as opposed to within a subdirectory of the classpath * element). */ private transient String jarfilePackageRoot = ""; /** * The classpath element module that this class was found within, or null if this class was found within a * directory or jar. */ private transient ModuleRef moduleRef; /** The classpath element URL (classpath root dir or jar) that this class was found within. */ private transient URL classpathElementURL; /** The classloaders to try to load this class with before calling a MatchProcessor. */ transient ClassLoader[] classLoaders; /** Info on class annotations, including optional annotation param values. */ AnnotationInfoList annotationInfo; /** Info on fields. */ FieldInfoList fieldInfo; /** Reverse mapping from field name to FieldInfo. */ private transient Map<String, FieldInfo> fieldNameToFieldInfo; /** Info on fields. */ MethodInfoList methodInfo; /** For annotations, the default values of parameters. */ List<AnnotationParameterValue> annotationDefaultParamValues; /** The set of classes related to this one. */ private final Map<RelType, Set<ClassInfo>> relatedClasses = new HashMap<>(); /** Default constructor for deserialization. */ ClassInfo() { } private ClassInfo(final String name, final int classModifiers, final boolean isExternalClass) { this(); this.name = name; if (name.endsWith(";")) { // Spot check to make sure class names were parsed from descriptors throw new RuntimeException("Bad class name"); } this.modifiers = classModifiers; this.isExternalClass = isExternalClass; } /** How classes are related. */ private static enum RelType { // Classes: /** * Superclasses of this class, if this is a regular class. * * <p> * (Should consist of only one entry, or null if superclass is java.lang.Object or unknown). */ SUPERCLASSES, /** Subclasses of this class, if this is a regular class. */ SUBCLASSES, /** Indicates that an inner class is contained within this one. */ CONTAINS_INNER_CLASS, /** Indicates that an outer class contains this one. (Should only have zero or one entries.) */ CONTAINED_WITHIN_OUTER_CLASS, // Interfaces: /** * Interfaces that this class implements, if this is a regular class, or superinterfaces, if this is an * interface. * * <p> * (May also include annotations, since annotations are interfaces, so you can implement an annotation.) */ IMPLEMENTED_INTERFACES, /** * Classes that implement this interface (including sub-interfaces), if this is an interface. */ CLASSES_IMPLEMENTING, // Class annotations: /** * Annotations on this class, if this is a regular class, or meta-annotations on this annotation, if this is * an annotation. */ CLASS_ANNOTATIONS, /** Classes annotated with this annotation, if this is an annotation. */ CLASSES_WITH_ANNOTATION, // Method annotations: /** Annotations on one or more methods of this class. */ METHOD_ANNOTATIONS, /** * Classes that have one or more methods annotated with this annotation, if this is an annotation. */ CLASSES_WITH_METHOD_ANNOTATION, // Field annotations: /** Annotations on one or more fields of this class. */ FIELD_ANNOTATIONS, /** * Classes that have one or more fields annotated with this annotation, if this is an annotation. */ CLASSES_WITH_FIELD_ANNOTATION, } /** * Add a class with a given relationship type. Return whether the collection changed as a result of the call. */ private boolean addRelatedClass(final RelType relType, final ClassInfo classInfo) { Set<ClassInfo> classInfoSet = relatedClasses.get(relType); if (classInfoSet == null) { relatedClasses.put(relType, classInfoSet = new LinkedHashSet<>(4)); } return classInfoSet.add(classInfo); } private static final int ANNOTATION_CLASS_MODIFIER = 0x2000; /** * Get a ClassInfo object, or create it if it doesn't exist. N.B. not threadsafe, so ClassInfo objects should * only ever be constructed by a single thread. */ private static ClassInfo getOrCreateClassInfo(final String className, final int classModifiers, final Map<String, ClassInfo> classNameToClassInfo) { ClassInfo classInfo = classNameToClassInfo.get(className); if (classInfo == null) { classNameToClassInfo.put(className, classInfo = new ClassInfo(className, classModifiers, /* isExternalClass = */ true)); } classInfo.modifiers |= classModifiers; if ((classModifiers & ANNOTATION_CLASS_MODIFIER) != 0) { classInfo.isAnnotation = true; } if ((classModifiers & Modifier.INTERFACE) != 0) { classInfo.isInterface = true; } return classInfo; } /** Add a superclass to this class. */ void addSuperclass(final String superclassName, final Map<String, ClassInfo> classNameToClassInfo) { if (superclassName != null && !superclassName.equals("java.lang.Object")) { final ClassInfo superclassClassInfo = getOrCreateClassInfo(superclassName, /* classModifiers = */ 0, classNameToClassInfo); this.addRelatedClass(RelType.SUPERCLASSES, superclassClassInfo); superclassClassInfo.addRelatedClass(RelType.SUBCLASSES, this); } } /** Add an implemented interface to this class. */ void addImplementedInterface(final String interfaceName, final Map<String, ClassInfo> classNameToClassInfo) { final ClassInfo interfaceClassInfo = getOrCreateClassInfo(interfaceName, /* classModifiers = */ Modifier.INTERFACE, classNameToClassInfo); interfaceClassInfo.isInterface = true; interfaceClassInfo.modifiers |= Modifier.INTERFACE; this.addRelatedClass(RelType.IMPLEMENTED_INTERFACES, interfaceClassInfo); interfaceClassInfo.addRelatedClass(RelType.CLASSES_IMPLEMENTING, this); } /** Add class containment info */ static void addClassContainment(final List<SimpleEntry<String, String>> classContainmentEntries, final Map<String, ClassInfo> classNameToClassInfo) { for (final SimpleEntry<String, String> ent : classContainmentEntries) { final String innerClassName = ent.getKey(); final ClassInfo innerClassInfo = ClassInfo.getOrCreateClassInfo(innerClassName, /* classModifiers = */ 0, classNameToClassInfo); final String outerClassName = ent.getValue(); final ClassInfo outerClassInfo = ClassInfo.getOrCreateClassInfo(outerClassName, /* classModifiers = */ 0, classNameToClassInfo); innerClassInfo.addRelatedClass(RelType.CONTAINED_WITHIN_OUTER_CLASS, outerClassInfo); outerClassInfo.addRelatedClass(RelType.CONTAINS_INNER_CLASS, innerClassInfo); } } /** Add containing method name, for anonymous inner classes */ void addFullyQualifiedDefiningMethodName(final String fullyQualifiedDefiningMethodName) { this.fullyQualifiedDefiningMethodName = fullyQualifiedDefiningMethodName; } /** Add an annotation to this class. */ void addClassAnnotation(final AnnotationInfo classAnnotationInfo, final Map<String, ClassInfo> classNameToClassInfo) { final ClassInfo annotationClassInfo = getOrCreateClassInfo(classAnnotationInfo.getName(), ANNOTATION_CLASS_MODIFIER, classNameToClassInfo); if (this.annotationInfo == null) { this.annotationInfo = new AnnotationInfoList(2); } this.annotationInfo.add(classAnnotationInfo); this.addRelatedClass(RelType.CLASS_ANNOTATIONS, annotationClassInfo); annotationClassInfo.addRelatedClass(RelType.CLASSES_WITH_ANNOTATION, this); // Record use of @Inherited meta-annotation if (classAnnotationInfo.getName().equals(Inherited.class.getName())) { isInherited = true; } } /** Add field info. */ void addFieldInfo(final FieldInfoList fieldInfoList, final Map<String, ClassInfo> classNameToClassInfo) { for (final FieldInfo fieldInfo : fieldInfoList) { final AnnotationInfoList fieldAnnotationInfoList = fieldInfo.annotationInfo; if (fieldAnnotationInfoList != null) { for (final AnnotationInfo fieldAnnotationInfo : fieldAnnotationInfoList) { final ClassInfo annotationClassInfo = getOrCreateClassInfo(fieldAnnotationInfo.getName(), ANNOTATION_CLASS_MODIFIER, classNameToClassInfo); // Mark this class as having a field with this annotation this.addRelatedClass(RelType.FIELD_ANNOTATIONS, annotationClassInfo); annotationClassInfo.addRelatedClass(RelType.CLASSES_WITH_FIELD_ANNOTATION, this); } } } if (this.fieldInfo == null) { this.fieldInfo = fieldInfoList; } else { this.fieldInfo.addAll(fieldInfoList); } } /** Add method info. */ void addMethodInfo(final MethodInfoList methodInfoList, final Map<String, ClassInfo> classNameToClassInfo) { for (final MethodInfo methodInfo : methodInfoList) { final AnnotationInfoList methodAnnotationInfoList = methodInfo.annotationInfo; if (methodAnnotationInfoList != null) { for (final AnnotationInfo methodAnnotationInfo : methodAnnotationInfoList) { final ClassInfo annotationClassInfo = getOrCreateClassInfo(methodAnnotationInfo.getName(), ANNOTATION_CLASS_MODIFIER, classNameToClassInfo); // Mark this class as having a method with this annotation this.addRelatedClass(RelType.METHOD_ANNOTATIONS, annotationClassInfo); annotationClassInfo.addRelatedClass(RelType.CLASSES_WITH_METHOD_ANNOTATION, this); } } // // Currently it is not possible to find methods by annotation parameter annotation // final AnnotationInfo[][] methodParamAnnotationInfoList = methodInfo.parameterAnnotationInfo; // if (methodParamAnnotationInfoList != null) { // for (int i = 0; i < methodParamAnnotationInfoList.length; i++) { // final AnnotationInfo[] paramAnnotationInfoArr = methodParamAnnotationInfoList[i]; // if (paramAnnotationInfoArr != null) { // for (int j = 0; j < paramAnnotationInfoArr.length; j++) { // final AnnotationInfo methodParamAnnotationInfo = paramAnnotationInfoArr[j]; // final ClassInfo annotationClassInfo = getOrCreateClassInfo( // methodParamAnnotationInfo.getName(), ANNOTATION_CLASS_MODIFIER, // classNameToClassInfo); // // Index parameter annotations here } if (this.methodInfo == null) { this.methodInfo = methodInfoList; } else { this.methodInfo.addAll(methodInfoList); } } /** Add the class type signature, including type params */ void addTypeSignature(final String typeSignatureStr) { if (this.typeSignatureStr == null) { this.typeSignatureStr = typeSignatureStr; } else { if (typeSignatureStr != null && !this.typeSignatureStr.equals(typeSignatureStr)) { throw new RuntimeException("Trying to merge two classes with different type signatures for class " + name + ": " + this.typeSignatureStr + " ; " + typeSignatureStr); } } } /** * Add annotation default values. (Only called in the case of annotation class definitions, when the annotation * has default parameter values.) */ void addAnnotationParamDefaultValues(final List<AnnotationParameterValue> paramNamesAndValues) { if (this.annotationDefaultParamValues == null) { this.annotationDefaultParamValues = paramNamesAndValues; } else { this.annotationDefaultParamValues.addAll(paramNamesAndValues); } } /** * Add a class that has just been scanned (as opposed to just referenced by a scanned class). Not threadsafe, * should be run in single threaded context. */ static ClassInfo addScannedClass(final String className, final int classModifiers, final boolean isInterface, final boolean isAnnotation, final Map<String, ClassInfo> classNameToClassInfo, final ClasspathElement classpathElement, final ScanSpec scanSpec, final LogNode log) { boolean classEncounteredMultipleTimes = false; ClassInfo classInfo = classNameToClassInfo.get(className); if (classInfo == null) { // This is the first time this class has been seen, add it classNameToClassInfo.put(className, classInfo = new ClassInfo(className, classModifiers, /* isExternalClass = */ false)); } else { if (!classInfo.isExternalClass) { classEncounteredMultipleTimes = true; } } // Remember which classpath element (zipfile / classpath root directory / module) the class was found in final ModuleRef modRef = classpathElement.getClasspathElementModuleRef(); final File file = modRef != null ? null : classpathElement.getClasspathElementFile(log); if ((classInfo.moduleRef != null && modRef != null && !classInfo.moduleRef.equals(modRef)) || (classInfo.classpathElementFile != null && file != null && !classInfo.classpathElementFile.equals(file))) { classEncounteredMultipleTimes = true; } if (classEncounteredMultipleTimes) { // The same class was encountered more than once in a single jarfile -- should not happen. However, // actually there is no restriction for paths within a zipfile to be unique (!!), and in fact // zipfiles in the wild do contain the same classfiles multiple times with the same exact path, // e.g.: xmlbeans-2.6.0.jar!org/apache/xmlbeans/xml/stream/Location.class if (log != null) { log.log("Class " + className + " is defined in multiple different classpath elements or modules + "ClassInfo#getClasspathElementFile() and/or ClassInfo#getClasspathElementModuleRef " + "will only return the first of these; attempting to merge info from all copies of " + "the classfile"); } } if (classInfo.classpathElementFile == null) { // If class was found in more than one classpath element, keep the first classpath element reference classInfo.classpathElementFile = file; // Save jarfile package root, if any classInfo.jarfilePackageRoot = classpathElement.getJarfilePackageRoot(); } if (classInfo.moduleRef == null) { // If class was found in more than one module, keep the first module reference classInfo.moduleRef = modRef; } // Remember which classloader handles the class was found in, for classloading final ClassLoader[] classLoaders = classpathElement.getClassLoaders(); if (classInfo.classLoaders == null) { classInfo.classLoaders = classLoaders; } else if (classLoaders != null && !Arrays.equals(classInfo.classLoaders, classLoaders)) { // Merge together ClassLoader list (concatenate and dedup) final LinkedHashSet<ClassLoader> allClassLoaders = new LinkedHashSet<>( Arrays.asList(classInfo.classLoaders)); for (final ClassLoader classLoader : classLoaders) { allClassLoaders.add(classLoader); } final List<ClassLoader> classLoaderOrder = new ArrayList<>(allClassLoaders); classInfo.classLoaders = classLoaderOrder.toArray(new ClassLoader[0]); } // Mark the classfile as scanned classInfo.isExternalClass = false; // Merge modifiers classInfo.modifiers |= classModifiers; classInfo.isInterface |= isInterface; classInfo.isAnnotation |= isAnnotation; return classInfo; } /** The class type to return. */ private static enum ClassType { /** Get all class types. */ ALL, /** A standard class (not an interface or annotation). */ STANDARD_CLASS, /** * An interface (this is named "implemented interface" rather than just "interface" to distinguish it from * an annotation.) */ IMPLEMENTED_INTERFACE, /** An annotation. */ ANNOTATION, /** An interface or annotation (used since you can actually implement an annotation). */ INTERFACE_OR_ANNOTATION, } /** * Filter classes according to scan spec and class type. * * @param strictWhitelist * If true, exclude class if it is is external, blacklisted, or a system class. */ private static Set<ClassInfo> filterClassInfo(final Collection<ClassInfo> classes, final ScanSpec scanSpec, final boolean strictWhitelist, final ClassType... classTypes) { if (classes == null) { return null; } boolean includeAllTypes = classTypes.length == 0; boolean includeStandardClasses = false; boolean includeImplementedInterfaces = false; boolean includeAnnotations = false; for (final ClassType classType : classTypes) { switch (classType) { case ALL: includeAllTypes = true; break; case STANDARD_CLASS: includeStandardClasses = true; break; case IMPLEMENTED_INTERFACE: includeImplementedInterfaces = true; break; case ANNOTATION: includeAnnotations = true; break; case INTERFACE_OR_ANNOTATION: includeImplementedInterfaces = includeAnnotations = true; break; default: throw new RuntimeException("Unknown ClassType: " + classType); } } if (includeStandardClasses && includeImplementedInterfaces && includeAnnotations) { includeAllTypes = true; } final Set<ClassInfo> classInfoSetFiltered = new LinkedHashSet<>(classes.size()); for (final ClassInfo classInfo : classes) { // Check class type against requested type(s) if (includeAllTypes || includeStandardClasses && classInfo.isStandardClass() || includeImplementedInterfaces && classInfo.isImplementedInterface() || includeAnnotations && classInfo.isAnnotation()) { if ( // Always check blacklist !scanSpec.classIsBlacklisted(classInfo.name) // If not blacklisted, and strictWhitelist is false, add class && (!strictWhitelist || ( // Don't include external classes unless enableExternalClasses is true (!classInfo.isExternalClass || scanSpec.enableExternalClasses) // If this is a system class, ignore blacklist unless the blanket blacklisting of // all system jars or modules has been disabled, and this system class was specifically // blacklisted by name && (!scanSpec.blacklistSystemJarsOrModules || !JarUtils.isInSystemPackageOrModule(classInfo.name))))) { // Class passed strict whitelist criteria classInfoSetFiltered.add(classInfo); } } } return classInfoSetFiltered; } /** * A set of classes that indirectly reachable through a directed path, for a given relationship type, and a set * of classes that is directly related (only one relationship step away). */ static class ReachableAndDirectlyRelatedClasses { final Set<ClassInfo> reachableClasses; final Set<ClassInfo> directlyRelatedClasses; private ReachableAndDirectlyRelatedClasses(final Set<ClassInfo> reachableClasses, final Set<ClassInfo> directlyRelatedClasses) { this.reachableClasses = reachableClasses; this.directlyRelatedClasses = directlyRelatedClasses; } } private static final ReachableAndDirectlyRelatedClasses NO_REACHABLE_CLASSES = new ReachableAndDirectlyRelatedClasses(Collections.<ClassInfo> emptySet(), Collections.<ClassInfo> emptySet()); /** * Get the classes related to this one (the transitive closure) for the given relationship type, and those * directly related. */ private ReachableAndDirectlyRelatedClasses filterClassInfo(final RelType relType, final boolean strictWhitelist, final ClassType... classTypes) { final Set<ClassInfo> directlyRelatedClasses = this.relatedClasses.get(relType); if (directlyRelatedClasses == null) { return NO_REACHABLE_CLASSES; } final Set<ClassInfo> reachableClasses = new LinkedHashSet<>(directlyRelatedClasses); if (relType == RelType.METHOD_ANNOTATIONS || relType == RelType.FIELD_ANNOTATIONS) { // For method and field annotations, need to change the RelType when finding meta-annotations for (final ClassInfo annotation : directlyRelatedClasses) { reachableClasses.addAll( annotation.filterClassInfo(RelType.CLASS_ANNOTATIONS, strictWhitelist).reachableClasses); } } else if (relType == RelType.CLASSES_WITH_METHOD_ANNOTATION || relType == RelType.CLASSES_WITH_FIELD_ANNOTATION) { // If looking for meta-annotated methods or fields, need to find all meta-annotated annotations, then // look for the methods or fields that they annotate for (final ClassInfo subAnnotation : this.filterClassInfo(RelType.CLASSES_WITH_ANNOTATION, strictWhitelist, ClassType.ANNOTATION).reachableClasses) { final Set<ClassInfo> annotatedClasses = subAnnotation.relatedClasses.get(relType); if (annotatedClasses != null) { reachableClasses.addAll(annotatedClasses); } } } else { // For other relationship types, the reachable type stays the same over the transitive closure. Find the // transitive closure, breaking cycles where necessary. final LinkedList<ClassInfo> queue = new LinkedList<>(); queue.addAll(directlyRelatedClasses); while (!queue.isEmpty()) { final ClassInfo head = queue.removeFirst(); final Set<ClassInfo> headRelatedClasses = head.relatedClasses.get(relType); if (headRelatedClasses != null) { for (final ClassInfo directlyReachableFromHead : headRelatedClasses) { // Don't get in cycle if (reachableClasses.add(directlyReachableFromHead)) { queue.add(directlyReachableFromHead); } } } } } if (reachableClasses.isEmpty()) { return NO_REACHABLE_CLASSES; } // Special case -- don't inherit java.lang.annotation.* meta-annotations as related meta-annotations // (but still return them as direct meta-annotations on annotation classes). Set<ClassInfo> javaLangAnnotationRelatedClasses = null; for (final ClassInfo classInfo : reachableClasses) { if (classInfo.getName().startsWith("java.lang.annotation.")) { if (javaLangAnnotationRelatedClasses == null) { javaLangAnnotationRelatedClasses = new LinkedHashSet<>(); } javaLangAnnotationRelatedClasses.add(classInfo); } } if (javaLangAnnotationRelatedClasses != null) { // Remove all java.lang.annotation annotations that are not directly related to this class Set<ClassInfo> javaLangAnnotationDirectlyRelatedClasses = null; for (final ClassInfo classInfo : directlyRelatedClasses) { if (classInfo.getName().startsWith("java.lang.annotation.")) { if (javaLangAnnotationDirectlyRelatedClasses == null) { javaLangAnnotationDirectlyRelatedClasses = new LinkedHashSet<>(); } javaLangAnnotationDirectlyRelatedClasses.add(classInfo); } } if (javaLangAnnotationDirectlyRelatedClasses != null) { javaLangAnnotationRelatedClasses.removeAll(javaLangAnnotationDirectlyRelatedClasses); } reachableClasses.removeAll(javaLangAnnotationRelatedClasses); } return new ReachableAndDirectlyRelatedClasses( filterClassInfo(reachableClasses, scanResult.scanSpec, strictWhitelist), filterClassInfo(directlyRelatedClasses, scanResult.scanSpec, strictWhitelist)); } /** * Get all classes found during the scan. * * @return A list of all classes found during the scan, or the empty list if none. */ static ClassInfoList getAllClasses(final Collection<ClassInfo> classes, final ScanSpec scanSpec, final ScanResult scanResult) { return new ClassInfoList( ClassInfo.filterClassInfo(classes, scanSpec, /* strictWhitelist = */ true, ClassType.ALL), /* sortByName = */ true); } /** * Get all standard classes found during the scan. * * @return A list of all standard classes found during the scan, or the empty list if none. */ static ClassInfoList getAllStandardClasses(final Collection<ClassInfo> classes, final ScanSpec scanSpec, final ScanResult scanResult) { return new ClassInfoList(ClassInfo.filterClassInfo(classes, scanSpec, /* strictWhitelist = */ true, ClassType.STANDARD_CLASS), /* sortByName = */ true); } /** * Get all implemented interface (non-annotation interface) classes found during the scan. * * @return A list of all annotation classes found during the scan, or the empty list if none. */ static ClassInfoList getAllImplementedInterfaceClasses(final Collection<ClassInfo> classes, final ScanSpec scanSpec, final ScanResult scanResult) { return new ClassInfoList(ClassInfo.filterClassInfo(classes, scanSpec, /* strictWhitelist = */ true, ClassType.IMPLEMENTED_INTERFACE), /* sortByName = */ true); } /** * Get all annotation classes found during the scan. See also {@link #getAllInterfaceOrAnnotationClasses()}. * * @return A list of all annotation classes found during the scan, or the empty list if none. */ static ClassInfoList getAllAnnotationClasses(final Collection<ClassInfo> classes, final ScanSpec scanSpec, final ScanResult scanResult) { return new ClassInfoList( ClassInfo.filterClassInfo(classes, scanSpec, /* strictWhitelist = */ true, ClassType.ANNOTATION), /* sortByName = */ true); } /** * Get all interface or annotation classes found during the scan. (Annotations are technically interfaces, and * they can be implemented.) * * @return A list of all whitelisted interfaces found during the scan, or the empty list if none. */ static ClassInfoList getAllInterfacesOrAnnotationClasses(final Collection<ClassInfo> classes, final ScanSpec scanSpec, final ScanResult scanResult) { return new ClassInfoList(ClassInfo.filterClassInfo(classes, scanSpec, /* strictWhitelist = */ true, ClassType.INTERFACE_OR_ANNOTATION), /* sortByName = */ true); } // Predicates /** @return The name of the class. */ public String getName() { return name; } /** * @return true if this class is an external class, i.e. was referenced by a whitelisted class as a superclass, * interface, or annotation, but is not itself a whitelisted class. */ public boolean isExternalClass() { return isExternalClass; } /** * @return The class modifier bits, e.g. {@link Modifier#PUBLIC}. */ public int getModifiers() { return modifiers; } /** * @return The field modifiers as a string, e.g. "public static final". For the modifier bits, call * {@link #getModifiers()}. */ public String getModifiersStr() { final StringBuilder buf = new StringBuilder(); ClassTypeSignature.modifiersToString(modifiers, buf); return buf.toString(); } /** * @return true if this class is a public class. */ public boolean isPublic() { return (modifiers & Modifier.PUBLIC) != 0; } /** * @return true if this class is an abstract class. */ public boolean isAbstract() { return (modifiers & 0x400) != 0; } /** * @return true if this class is a synthetic class. */ public boolean isSynthetic() { return (modifiers & 0x1000) != 0; } /** * @return true if this class is a final class. */ public boolean isFinal() { return (modifiers & Modifier.FINAL) != 0; } /** * @return true if this class is static. */ public boolean isStatic() { return Modifier.isStatic(modifiers); } /** * @return true if this class is an annotation class. */ public boolean isAnnotation() { return isAnnotation; } /** * @return true if this class is an interface and is not an annotation (annotations are interfaces, and can be * implemented). */ public boolean isInterface() { return isInterface && !isAnnotation; } /** * @return true if this class is an interface or an annotation (annotations are interfaces, and can be * implemented). */ public boolean isInterfaceOrAnnotation() { return isInterface; } /** * @return true if this class is an {@link Enum}. */ public boolean isEnum() { return (modifiers & 0x4000) != 0; } /** * @return true if this class is a standard class (i.e. is not an annotation or interface). */ public boolean isStandardClass() { return !(isAnnotation || isInterface); } /** * @param superclassName * The name of a superclass. * @return true if this class extends the named superclass. */ public boolean extendsSuperclass(final String superclassName) { return getSuperclasses().containsName(superclassName); } /** * @return true if this is an inner class (call {@link #isAnonymousInnerClass()} to test if this is an anonymous * inner class). If true, the containing class can be determined by calling {@link #getOuterClasses()}. */ public boolean isInnerClass() { return !getOuterClasses().isEmpty(); } /** * @return true if this class contains inner classes. If true, the inner classes can be determined by calling * {@link #getInnerClasses()}. */ public boolean isOuterClass() { return !getInnerClasses().isEmpty(); } /** * @return true if this is an anonymous inner class. If true, the name of the containing method can be obtained * by calling {@link #getFullyQualifiedDefiningMethodName()}. */ public boolean isAnonymousInnerClass() { return fullyQualifiedDefiningMethodName != null; } /** * Return whether this class is an implemented interface (meaning a standard, non-annotation interface, or an * annotation that has also been implemented as an interface by some class). * * <p> * Annotations are interfaces, but you can also implement an annotation, so to we return whether an interface * (even an annotation) is implemented by a class or extended by a subinterface, or (failing that) if it is not * an interface but not an annotation. * * @return true if this class is an implemented interface. */ public boolean isImplementedInterface() { return relatedClasses.get(RelType.CLASSES_IMPLEMENTING) != null || (isInterface && !isAnnotation); } /** * @param interfaceName * The name of an interface. * @return true if this class implements the named interface. */ public boolean implementsInterface(final String interfaceName) { return getInterfaces().containsName(interfaceName); } /** * @param annotationName * The name of an annotation. * @return true if this class has the named annotation. */ public boolean hasAnnotation(final String annotationName) { return getAnnotations().containsName(annotationName); } /** * @param fieldName * The name of a field. * @return true if this class has the named field. */ public boolean hasField(final String fieldName) { return getFieldInfo().containsName(fieldName); } /** * @param fieldAnnotationName * The name of a field annotation. * @return true if this class has a field with the named annotation. */ public boolean hasFieldAnnotation(final String fieldAnnotationName) { for (final FieldInfo fieldInfo : getFieldInfo()) { if (fieldInfo.getAnnotationInfo().containsName(fieldAnnotationName)) { return true; } } return false; } /** * @param methodName * The name of a method. * @return true if this class has a method of the requested name. */ public boolean hasMethod(final String methodName) { return getMethodInfo().containsName(methodName); } /** * @param methodAnnotationName * The name of a mehtod annotation. * @return true if this class has a method with the named annotation. */ public boolean hasMethodAnnotation(final String methodAnnotationName) { for (final MethodInfo methodInfo : getMethodInfo()) { if (methodInfo.getAnnotationInfo().containsName(methodAnnotationName)) { return true; } } return false; } // Standard classes /** * Get the subclasses of this class, sorted in order of name. Call {@link ClassInfoList#directOnly()} to get * direct subclasses. * * @return the list of subclasses of this class, or the empty list if none. */ public ClassInfoList getSubclasses() { if (getName().equals("java.lang.Object")) { // Make an exception for querying all subclasses of java.lang.Object return scanResult.getAllClasses(); } else { return new ClassInfoList(this.filterClassInfo(RelType.SUBCLASSES, /* strictWhitelist = */ true), /* sortByName = */ true); } } /** * Get all superclasses of this class, in ascending order in the class hierarchy. Does not include * superinterfaces, if this is an interface (use {@link #getInterfaces()} to get superinterfaces of an * interface.} * * @return the list of all superclasses of this class, or the empty list if none. */ public ClassInfoList getSuperclasses() { return new ClassInfoList(this.filterClassInfo(RelType.SUPERCLASSES, /* strictWhitelist = */ false), /* sortByName = */ false); } /** * Get the single direct superclass of this class, or null if none. Does not return the superinterfaces, if this * is an interface (use {@link #getInterfaces()} to get superinterfaces of an interface.} * * @return the superclass of this class, or null if none. */ public ClassInfo getSuperclass() { final Set<ClassInfo> superClasses = relatedClasses.get(RelType.SUPERCLASSES); if (superClasses == null || superClasses.isEmpty()) { return null; } else if (superClasses.size() > 2) { throw new IllegalArgumentException("More than one superclass: " + superClasses); } else { final ClassInfo superclass = superClasses.iterator().next(); if (superclass.getName().equals("java.lang.Object")) { return null; } else { return superclass; } } } /** * @return A list of the containing outer classes, if this is an inner class, otherwise the empty list. Note * that all containing outer classes are returned, not just the innermost of the containing outer * classes. */ public ClassInfoList getOuterClasses() { return new ClassInfoList( this.filterClassInfo(RelType.CONTAINED_WITHIN_OUTER_CLASS, /* strictWhitelist = */ false), /* sortByName = */ false); } /** * @return A list of the inner classes contained within this class, or the empty list if none. */ public ClassInfoList getInnerClasses() { return new ClassInfoList(this.filterClassInfo(RelType.CONTAINS_INNER_CLASS, /* strictWhitelist = */ false), /* sortByName = */ true); } /** * @return The fully-qualified method name (i.e. fully qualified classname, followed by dot, followed by method * name) for the defining method, if this is an anonymous inner class, or null if not. */ public String getFullyQualifiedDefiningMethodName() { return fullyQualifiedDefiningMethodName; } // Interfaces /** * @return The list of interfaces implemented by this class or by one of its superclasses, if this is a standard * class, or the superinterfaces extended by this interface, if this is an interface. Returns the empty * list if none. */ public ClassInfoList getInterfaces() { // Classes also implement the interfaces of their superclasses final ReachableAndDirectlyRelatedClasses implementedInterfaces = this .filterClassInfo(RelType.IMPLEMENTED_INTERFACES, /* strictWhitelist = */ false); final Set<ClassInfo> allInterfaces = new LinkedHashSet<>(implementedInterfaces.reachableClasses); for (final ClassInfo superclass : this.filterClassInfo(RelType.SUPERCLASSES, /* strictWhitelist = */ false).reachableClasses) { final Set<ClassInfo> superclassImplementedInterfaces = superclass.filterClassInfo( RelType.IMPLEMENTED_INTERFACES, /* strictWhitelist = */ false).reachableClasses; allInterfaces.addAll(superclassImplementedInterfaces); } return new ClassInfoList(allInterfaces, implementedInterfaces.directlyRelatedClasses, /* sortByName = */ true); } /** * @return the list of the classes (and their subclasses) that implement this interface, if this is an * interface, otherwise returns the empty list. */ public ClassInfoList getClassesImplementing() { if (!isInterface) { throw new IllegalArgumentException("Class is not an interface: " + getName()); } // Subclasses of implementing classes also implement the interface final ReachableAndDirectlyRelatedClasses implementingClasses = this .filterClassInfo(RelType.CLASSES_IMPLEMENTING, /* strictWhitelist = */ true); final Set<ClassInfo> allImplementingClasses = new LinkedHashSet<>(implementingClasses.reachableClasses); for (final ClassInfo implementingClass : implementingClasses.reachableClasses) { final Set<ClassInfo> implementingSubclasses = implementingClass.filterClassInfo(RelType.SUBCLASSES, /* strictWhitelist = */ true).reachableClasses; allImplementingClasses.addAll(implementingSubclasses); } return new ClassInfoList(allImplementingClasses, implementingClasses.directlyRelatedClasses, /* sortByName = */ true); } // Annotations /** * Get the annotations and meta-annotations on this class. (Call {@link #getAnnotationInfo()} instead, if you * need the parameter values of annotations, rather than just the annotation classes.) * * <p> * Also handles the {@link Inherited} meta-annotation, which causes an annotation to annotate a class and all of * its subclasses. * * @return the list of annotations and meta-annotations on this class. */ public ClassInfoList getAnnotations() { if (!scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableAnnotationInfo() before #scan()"); } // Get all annotations on this class final ReachableAndDirectlyRelatedClasses annotationClasses = this.filterClassInfo(RelType.CLASS_ANNOTATIONS, /* strictWhitelist = */ false); // Check for any @Inherited annotations on superclasses Set<ClassInfo> inheritedSuperclassAnnotations = null; for (final ClassInfo superclass : getSuperclasses()) { for (final ClassInfo superclassAnnotationClass : superclass.filterClassInfo(RelType.CLASS_ANNOTATIONS, /* strictWhitelist = */ false).reachableClasses) { final Set<ClassInfo> superclassAnnotations = superclassAnnotationClass.relatedClasses .get(RelType.CLASS_ANNOTATIONS); if (superclassAnnotations != null) { // Check if any of the meta-annotations on this annotation are @Inherited, // which causes an annotation to annotate a class and all of its subclasses. if (isInherited) { // inheritedSuperclassAnnotations is an inherited annotation if (inheritedSuperclassAnnotations == null) { inheritedSuperclassAnnotations = new LinkedHashSet<>(); } inheritedSuperclassAnnotations.add(superclassAnnotationClass); } } } } if (inheritedSuperclassAnnotations == null) { // No inherited superclass annotations return new ClassInfoList(annotationClasses, /* sortByName = */ true); } else { // Merge inherited superclass annotations and annotations on this class inheritedSuperclassAnnotations.addAll(annotationClasses.reachableClasses); return new ClassInfoList(inheritedSuperclassAnnotations, annotationClasses.directlyRelatedClasses, /* sortByName = */ true); } } /** * Get a list of direct annotations on this method, along with any annotation parameter values, as a list of * {@link AnnotationInfo} objects, or the empty list if none. * * <p> * Also handles the {@link Inherited} meta-annotation, which causes an annotation to annotate a class and all of * its subclasses. * * @return A list of {@link AnnotationInfo} objects for the annotations on this method, or the empty list if * none. */ public AnnotationInfoList getAnnotationInfo() { if (!scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableAnnotationInfo() before #scan()"); } // Check for any @Inherited annotations on superclasses AnnotationInfoList inheritedSuperclassAnnotations = null; for (final ClassInfo superclass : getSuperclasses()) { for (final AnnotationInfo superclassAnnotationInfo : superclass.getAnnotationInfo()) { if (superclassAnnotationInfo.isInherited()) { // inheritedSuperclassAnnotations is an inherited annotation if (inheritedSuperclassAnnotations == null) { inheritedSuperclassAnnotations = new AnnotationInfoList(); } inheritedSuperclassAnnotations.add(superclassAnnotationInfo); } } } if (inheritedSuperclassAnnotations == null) { // No inherited superclass annotations return annotationInfo == null ? AnnotationInfoList.EMPTY_LIST : annotationInfo; } else { // Merge inherited superclass annotations and annotations on this class if (annotationInfo != null) { inheritedSuperclassAnnotations.addAll(annotationInfo); } Collections.sort(inheritedSuperclassAnnotations); return inheritedSuperclassAnnotations; } } /** * @return A list of {@link AnnotationParameterValue} objects for each of the default parameter values for this * annotation, if this is an annotation class with default parameter values, otherwise the empty list. */ public List<AnnotationParameterValue> getAnnotationDefaultParameterValues() { if (!scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableAnnotationInfo() before #scan()"); } if (!isAnnotation) { throw new IllegalArgumentException("Class is not an annotation: " + getName()); } return annotationDefaultParamValues == null ? Collections.<AnnotationParameterValue> emptyList() : annotationDefaultParamValues; } /** * @return A list of standard classes and non-annotation interfaces that are annotated by this class, if this is * an annotation class, or the empty list if none. Also handles the {@link Inherited} meta-annotation, * which causes an annotation on a class to be inherited by all of its subclasses. */ public ClassInfoList getClassesWithAnnotation() { if (!scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableAnnotationInfo() before #scan()"); } if (!isAnnotation) { throw new IllegalArgumentException("Class is not an annotation: " + getName()); } // Get classes that have this annotation final ReachableAndDirectlyRelatedClasses classesWithAnnotation = this .filterClassInfo(RelType.CLASSES_WITH_ANNOTATION, /* strictWhitelist = */ true); if (isInherited) { // If this is an inherited annotation, add into the result all subclasses of the annotated classes. final Set<ClassInfo> classesWithAnnotationAndTheirSubclasses = new LinkedHashSet<>( classesWithAnnotation.reachableClasses); for (final ClassInfo classWithAnnotation : classesWithAnnotation.reachableClasses) { classesWithAnnotationAndTheirSubclasses.addAll(classWithAnnotation.getSubclasses()); } return new ClassInfoList(classesWithAnnotationAndTheirSubclasses, classesWithAnnotation.directlyRelatedClasses, /* sortByName = */ true); } else { // If not inherited, only return the annotated classes return new ClassInfoList(classesWithAnnotation, /* sortByName = */ true); } } /** * @return The list of classes that are directly (i.e. are not meta-annotated) annotated with the requested * annotation, or the empty list if none. */ ClassInfoList getClassesWithAnnotationDirectOnly() { return new ClassInfoList( this.filterClassInfo(RelType.CLASSES_WITH_ANNOTATION, /* strictWhitelist = */ true), /* sortByName = */ true); } // Methods public MethodInfoList getMethodInfo() { if (!scanResult.scanSpec.enableMethodInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableMethodInfo() before #scan()"); } if (methodInfo == null) { return MethodInfoList.EMPTY_LIST; } else { final MethodInfoList nonConstructorMethods = new MethodInfoList(); for (final MethodInfo mi : methodInfo) { final String methodName = mi.getName(); if (!methodName.equals("<init>") && !methodName.equals("<clinit>")) { nonConstructorMethods.add(mi); } } return nonConstructorMethods; } } public MethodInfoList getConstructorInfo() { if (!scanResult.scanSpec.enableMethodInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableMethodInfo() before #scan()"); } if (methodInfo == null) { return MethodInfoList.EMPTY_LIST; } else { final MethodInfoList nonConstructorMethods = new MethodInfoList(); for (final MethodInfo mi : methodInfo) { final String methodName = mi.getName(); if (methodName.equals("<init>")) { nonConstructorMethods.add(mi); } } return nonConstructorMethods; } } public MethodInfoList getMethodAndConstructorInfo() { if (!scanResult.scanSpec.enableMethodInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableMethodInfo() before #scan()"); } return methodInfo == null ? MethodInfoList.EMPTY_LIST : methodInfo; } public MethodInfoList getMethodInfo(final String methodName) { if (!scanResult.scanSpec.enableMethodInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableMethodInfo() before #scan()"); } if (methodInfo == null) { return MethodInfoList.EMPTY_LIST; } boolean hasMethodWithName = false; for (final MethodInfo f : methodInfo) { if (f.getName().equals(methodName)) { hasMethodWithName = true; break; } } if (!hasMethodWithName) { return MethodInfoList.EMPTY_LIST; } final MethodInfoList methodInfoList = new MethodInfoList(); for (final MethodInfo f : methodInfo) { if (f.getName().equals(methodName)) { methodInfoList.add(f); } } return methodInfoList; } /** * @return A list of method annotations or meta-annotations declared by the class, as a list of * {@link ClassInfo} objects, or the empty list if none. N.B. these annotations do not contain specific * annotation parameters -- call {@link MethodInfo#getAnnotationInfo()} to get details on specific * method annotation instances. */ public ClassInfoList getMethodAnnotations() { if (!scanResult.scanSpec.enableMethodInfo || !scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException( "Please call ClassGraph#enableMethodInfo() and " + "#enableAnnotationInfo() before #scan()"); } final ReachableAndDirectlyRelatedClasses methodAnnotations = this .filterClassInfo(RelType.METHOD_ANNOTATIONS, /* strictWhitelist = */ false, ClassType.ANNOTATION); final Set<ClassInfo> methodAnnotationsAndMetaAnnotations = new LinkedHashSet<>( methodAnnotations.reachableClasses); for (final ClassInfo methodAnnotation : methodAnnotations.reachableClasses) { methodAnnotationsAndMetaAnnotations.addAll(methodAnnotation.filterClassInfo(RelType.CLASS_ANNOTATIONS, /* strictWhitelist = */ false).reachableClasses); } return new ClassInfoList(methodAnnotationsAndMetaAnnotations, methodAnnotations.directlyRelatedClasses, /* sortByName = */ true); } /** * @return A list of classes that have a method with this annotation or meta-annotation, or the empty list if * none. */ public ClassInfoList getClassesWithMethodAnnotation() { if (!scanResult.scanSpec.enableMethodInfo || !scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException( "Please call ClassGraph#enableMethodInfo() and " + "#enableAnnotationInfo() before #scan()"); } final ReachableAndDirectlyRelatedClasses classesWithDirectlyAnnotatedMethods = this .filterClassInfo(RelType.CLASSES_WITH_METHOD_ANNOTATION, /* strictWhitelist = */ true); final ReachableAndDirectlyRelatedClasses annotationsWithThisMetaAnnotation = this.filterClassInfo( RelType.CLASSES_WITH_ANNOTATION, /* strictWhitelist = */ false, ClassType.ANNOTATION); if (annotationsWithThisMetaAnnotation.reachableClasses.isEmpty()) { // This annotation does not meta-annotate another annotation that annotates a method return new ClassInfoList(classesWithDirectlyAnnotatedMethods, /* sortByName = */ true); } else { // Take the union of all classes with methods directly annotated by this annotation, // and classes with methods meta-annotated by this annotation final Set<ClassInfo> allClassesWithAnnotatedOrMetaAnnotatedMethods = new LinkedHashSet<>( classesWithDirectlyAnnotatedMethods.reachableClasses); for (final ClassInfo metaAnnotatedAnnotation : annotationsWithThisMetaAnnotation.reachableClasses) { allClassesWithAnnotatedOrMetaAnnotatedMethods .addAll(metaAnnotatedAnnotation.filterClassInfo(RelType.CLASSES_WITH_METHOD_ANNOTATION, /* strictWhitelist = */ true).reachableClasses); } return new ClassInfoList(allClassesWithAnnotatedOrMetaAnnotatedMethods, classesWithDirectlyAnnotatedMethods.directlyRelatedClasses, /* sortByName = */ true); } } /** * @return A list of classes that have methods that are directly annotated (i.e. are not meta-annotated) with * the requested method annotation, or the empty list if none. */ ClassInfoList getClassesWithMethodAnnotationDirectOnly() { return new ClassInfoList( this.filterClassInfo(RelType.CLASSES_WITH_METHOD_ANNOTATION, /* strictWhitelist = */ true), /* sortByName = */ true); } // Fields public FieldInfoList getFieldInfo() { if (!scanResult.scanSpec.enableFieldInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableFieldInfo() before #scan()"); } return fieldInfo == null ? FieldInfoList.EMPTY_LIST : fieldInfo; } public FieldInfo getFieldInfo(final String fieldName) { if (!scanResult.scanSpec.enableFieldInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableFieldInfo() before #scan()"); } if (fieldInfo == null) { return null; } if (fieldNameToFieldInfo == null) { // Lazily build reverse mapping cache fieldNameToFieldInfo = new HashMap<>(); for (final FieldInfo f : fieldInfo) { fieldNameToFieldInfo.put(f.getName(), f); } } return fieldNameToFieldInfo.get(fieldName); } /** * @return A list of annotations on fields declared by the class, or the empty list if none. N.B. these * annotations do not contain specific annotation parameters -- call * {@link FieldInfo#getAnnotationInfo()} to get details on specific field annotation instances. */ public ClassInfoList getFieldAnnotations() { if (!scanResult.scanSpec.enableFieldInfo || !scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableFieldInfo() and " + "ClassGraph#enableAnnotationInfo() before #scan()"); } final ReachableAndDirectlyRelatedClasses fieldAnnotations = this.filterClassInfo(RelType.FIELD_ANNOTATIONS, /* strictWhitelist = */ false, ClassType.ANNOTATION); final Set<ClassInfo> fieldAnnotationsAndMetaAnnotations = new LinkedHashSet<>( fieldAnnotations.reachableClasses); for (final ClassInfo fieldAnnotation : fieldAnnotations.reachableClasses) { fieldAnnotationsAndMetaAnnotations.addAll(fieldAnnotation.filterClassInfo(RelType.CLASS_ANNOTATIONS, /* strictWhitelist = */ false).reachableClasses); } return new ClassInfoList(fieldAnnotationsAndMetaAnnotations, fieldAnnotations.directlyRelatedClasses, /* sortByName = */ true); } /** * @return A list of classes that have a field with this annotation or meta-annotation, or the empty list if * none. */ public ClassInfoList getClassesWithFieldAnnotation() { if (!scanResult.scanSpec.enableFieldInfo || !scanResult.scanSpec.enableAnnotationInfo) { throw new IllegalArgumentException("Please call ClassGraph#enableFieldInfo() and " + "ClassGraph#enableAnnotationInfo() before #scan()"); } final ReachableAndDirectlyRelatedClasses classesWithDirectlyAnnotatedFields = this .filterClassInfo(RelType.CLASSES_WITH_FIELD_ANNOTATION, /* strictWhitelist = */ true); final ReachableAndDirectlyRelatedClasses annotationsWithThisMetaAnnotation = this.filterClassInfo( RelType.CLASSES_WITH_ANNOTATION, /* strictWhitelist = */ false, ClassType.ANNOTATION); if (annotationsWithThisMetaAnnotation.reachableClasses.isEmpty()) { // This annotation does not meta-annotate another annotation that annotates a field return new ClassInfoList(classesWithDirectlyAnnotatedFields, /* sortByName = */ true); } else { // Take the union of all classes with fields directly annotated by this annotation, // and classes with fields meta-annotated by this annotation final Set<ClassInfo> allClassesWithAnnotatedOrMetaAnnotatedFields = new LinkedHashSet<>( classesWithDirectlyAnnotatedFields.reachableClasses); for (final ClassInfo metaAnnotatedAnnotation : annotationsWithThisMetaAnnotation.reachableClasses) { allClassesWithAnnotatedOrMetaAnnotatedFields .addAll(metaAnnotatedAnnotation.filterClassInfo(RelType.CLASSES_WITH_FIELD_ANNOTATION, /* strictWhitelist = */ true).reachableClasses); } return new ClassInfoList(allClassesWithAnnotatedOrMetaAnnotatedFields, classesWithDirectlyAnnotatedFields.directlyRelatedClasses, /* sortByName = */ true); } } /** * @return A list of classes that declare fields that are directly annotated (i.e. are not meta-annotated) with * the requested method annotation, or the empty list if none. */ ClassInfoList getClassesWithFieldAnnotationDirectOnly() { return new ClassInfoList( this.filterClassInfo(RelType.CLASSES_WITH_FIELD_ANNOTATION, /* strictWhitelist = */ true), /* sortByName = */ true); } /** @return The class type signature, if available, otherwise returns null. */ public ClassTypeSignature getTypeSignature() { if (typeSignatureStr == null) { return null; } if (typeSignature == null) { try { typeSignature = ClassTypeSignature.parse(typeSignatureStr, this); typeSignature.setScanResult(scanResult); } catch (final ParseException e) { throw new IllegalArgumentException(e); } } return typeSignature; } /** * @return The URL of the classpath element that this class was found within. */ public URL getClasspathElementURL() { if (classpathElementURL == null) { try { if (moduleRef != null) { // Classpath elt is a module classpathElementURL = moduleRef.getLocation().toURL(); } else if (classpathElementFile.isFile() && !jarfilePackageRoot.isEmpty()) { // Classpath elt is a jarfile with a non-empty package root classpathElementURL = new URL("jar:" + classpathElementFile.toURI().toURL().toString() + "!" + URLPathEncoder.encodePath(jarfilePackageRoot)); } else { // Classpath elt is a directory, or a jarfile with an empty package root classpathElementURL = classpathElementFile.toURI().toURL(); } } catch (final MalformedURLException e) { // Shouldn't happen throw new IllegalArgumentException(e); } } return classpathElementURL; } /** * @return The {@link File} for the classpath element package root dir or jar that this class was found within, * or null if this class was found in a module. (See also {@link #getModuleRef}.) */ public File getClasspathElementFile() { return classpathElementFile; } /** * @return The module in the module path that this class was found within, as a {@link ModuleRef}, or null if * this class was found in a directory or jar in the classpath. (See also * {@link #getClasspathElementFile()}.) */ public ModuleRef getModuleRef() { return moduleRef; } @Override public <T> Class<T> loadClass(final Class<T> superclassOrInterfaceType, final boolean ignoreExceptions) { return super.loadClass(superclassOrInterfaceType, ignoreExceptions); } @Override public <T> Class<T> loadClass(final Class<T> superclassOrInterfaceType) { return super.loadClass(superclassOrInterfaceType, /* ignoreExceptions = */ false); } @Override public Class<?> loadClass(final boolean ignoreExceptions) { return super.loadClass(ignoreExceptions); } @Override public Class<?> loadClass() { return super.loadClass(/* ignoreExceptions = */ false); } @Override protected String getClassName() { return name; } @Override protected ClassInfo getClassInfo() { return this; } @Override void setScanResult(final ScanResult scanResult) { super.setScanResult(scanResult); if (this.typeSignature != null) { this.typeSignature.setScanResult(scanResult); } if (annotationInfo != null) { for (final AnnotationInfo ai : annotationInfo) { ai.setScanResult(scanResult); } } if (fieldInfo != null) { for (final FieldInfo fi : fieldInfo) { fi.setScanResult(scanResult); } } if (methodInfo != null) { for (final MethodInfo mi : methodInfo) { mi.setScanResult(scanResult); } } if (annotationDefaultParamValues != null) { for (final AnnotationParameterValue apv : annotationDefaultParamValues) { apv.setScanResult(scanResult); } } } /** * Get the names of any classes referenced in this class' type descriptor, or the type descriptors of fields, * methods or annotations. */ @Override protected void getClassNamesFromTypeDescriptors(final Set<String> classNames) { final Set<String> referencedClassNames = new LinkedHashSet<>(); if (methodInfo != null) { for (final MethodInfo mi : methodInfo) { mi.getClassNamesFromTypeDescriptors(classNames); } } if (fieldInfo != null) { for (final FieldInfo fi : fieldInfo) { fi.getClassNamesFromTypeDescriptors(classNames); } } if (annotationInfo != null) { for (final AnnotationInfo ai : annotationInfo) { ai.getClassNamesFromTypeDescriptors(referencedClassNames); } } if (annotationDefaultParamValues != null) { for (final AnnotationParameterValue paramValue : annotationDefaultParamValues) { paramValue.getClassNamesFromTypeDescriptors(referencedClassNames); } } final ClassTypeSignature classSig = getTypeSignature(); if (classSig != null) { classSig.getClassNamesFromTypeDescriptors(referencedClassNames); } } /** Compare based on class name. */ @Override public int compareTo(final ClassInfo o) { return this.name.compareTo(o.name); } /** Use class name for equals(). */ @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (this.getClass() != obj.getClass()) { return false; } final ClassInfo other = (ClassInfo) obj; return name.equals(other.name); } /** Use hash code of class name. */ @Override public int hashCode() { return name != null ? name.hashCode() : 33; } private String toString(final boolean typeNameOnly) { final ClassTypeSignature typeSig = getTypeSignature(); if (typeSig != null) { // Generic classes return typeSig.toString(name, typeNameOnly, modifiers, isAnnotation, isInterface); } else { // Non-generic classes final StringBuilder buf = new StringBuilder(); if (typeNameOnly) { buf.append(name); } else { ClassTypeSignature.modifiersToString(modifiers, buf); if (buf.length() > 0) { buf.append(' '); } buf.append(isAnnotation ? "@interface " : isInterface ? "interface " : (modifiers & 0x4000) != 0 ? "enum " : "class "); buf.append(name); final ClassInfo superclass = getSuperclass(); if (superclass != null && !superclass.getName().equals("java.lang.Object")) { buf.append(" extends " + superclass.toString(/* typeNameOnly = */ true)); } final Set<ClassInfo> interfaces = this.filterClassInfo(RelType.IMPLEMENTED_INTERFACES, /* strictWhitelist = */ false).directlyRelatedClasses; if (!interfaces.isEmpty()) { buf.append(isInterface ? " extends " : " implements "); boolean first = true; for (final ClassInfo iface : interfaces) { if (first) { first = false; } else { buf.append(", "); } buf.append(iface.toString(/* typeNameOnly = */ true)); } } } return buf.toString(); } } @Override public String toString() { return toString(false); } }
package io.metacake.core.common; /** * This class is meant to be used as extensible enumerations. * <p> * All symbols are 'unique', in that they can only be compared through * referential equality. * For example : * public class MovementAction extends Symbol { * public static final GO_UP = new MovementAction(); * public static final GO_DOWN = new MovementAction(); * public static final GO_LEFT = new MovementAction(); * public static final GO_RIGHT = new MovementAction(); * } * * In this example, none of these fields can ever be equal to any other field. * In addition, the example type that extends Symbol could later be extended to add other * types to the enumeration. * * @author florence * @author rpless */ public class Symbol { static final String PREFIX = "Symbol:"; public static Symbol genSym() { return Symbol.genSym(""); } public static Symbol genSym(String name) { return new Symbol(name); } private String name; public Symbol() { this(""); } public Symbol(String name) { this.name = name; } @Override public final boolean equals(Object that) { return this == that; } @Override public final int hashCode() { return super.hashCode(); } @Override public final String toString() { if(name.isEmpty()) { return super.toString(); } else { return PREFIX + name; } } }
package org.seqcode.projects.galaxyexo; import java.io.File; import java.io.FileNotFoundException; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.seqcode.deepseq.experiments.ControlledExperiment; import org.seqcode.deepseq.experiments.ExperimentCondition; import org.seqcode.deepseq.experiments.ExperimentManager; import org.seqcode.deepseq.experiments.ExptConfig; import org.seqcode.deepseq.experiments.Sample; import org.seqcode.genome.Genome; import org.seqcode.genome.GenomeConfig; import org.seqcode.genome.location.Region; import org.seqcode.gseutils.ArgParser; /** * Utility to output a signal fraction using a normalization factor between signal and control experiments. * It includes an option to output binned counts of signal and control experiments. * * Input: * - Genome * - Signal experiment * - Control experiment * Output: * - A text file listing condition, signal hits, control hits, scaling factor, and a signal fraction. * * @author naomi yamada */ public class ChIPQC { protected GenomeConfig gconfig; protected ExptConfig econfig; protected ExperimentManager manager; protected Genome genome; public ChIPQC(GenomeConfig gcon, ExptConfig econ, ExperimentManager man){ gconfig = gcon; econfig = econ; manager = man; genome = econfig.getGenome(); } public void printQCMetrics(){ double ncis, signalHits, controlHits; double IPstrength=0; for(ExperimentCondition exptCond: manager.getConditions()){ for(ControlledExperiment rep : exptCond.getReplicates()){ if (!rep.hasControl()){ System.err.println("Please provide a control experiment"); System.exit(1); } ncis = rep.getControlScaling(); signalHits = rep.getSignal().getHitCount(); controlHits = rep.getControl().getHitCount(); IPstrength = 1-(ncis/(signalHits/controlHits)); if (IPstrength<0) IPstrength=0; } double pooledncis = exptCond.getPooledSampleControlScaling(); double pooledsignal = exptCond.getTotalSignalCount(); double pooledcontrl = exptCond.getTotalControlCount(); double pooledIPstrength =1-(pooledncis/(pooledsignal/pooledcontrl)); if (pooledIPstrength<0) pooledIPstrength=0; System.out.println("Condition:"+exptCond.getName()+"\tSignal:"+pooledsignal+"\tControl:"+pooledcontrl+"\tScalingFactor:"+pooledncis+"\tIPstrength: "+pooledIPstrength); } manager.close(); } public void printPairedBinCounts(int scalingWindowSize) throws FileNotFoundException, UnsupportedEncodingException{ Map<Sample, List<Float>> sampleWindowCounts = new HashMap<Sample, List<Float>>(); List<Sample> allSamples = new ArrayList<Sample>(); List<Sample> signalSamples = new ArrayList<Sample>(); List<Sample> controlSamples = new ArrayList<Sample>(); for(ExperimentCondition exptCond: manager.getConditions()){ for(ControlledExperiment rep : exptCond.getReplicates()){ signalSamples.add(rep.getSignal()); controlSamples.add(rep.getControl()); } allSamples.addAll(signalSamples); allSamples.addAll(controlSamples); int listSize=0; for(Sample samp : allSamples){ List<Float> currSampCounts = new ArrayList<Float>(); for(String chrom:genome.getChromList()) { int chrlen = genome.getChromLength(chrom); for (int start = 1; start < chrlen - scalingWindowSize; start += scalingWindowSize) { Region r = new Region(genome, chrom, start, start + scalingWindowSize); currSampCounts.add(samp.countHits(r)); } } sampleWindowCounts.put(samp, currSampCounts); listSize = currSampCounts.size(); } } for(ExperimentCondition exptCond: manager.getConditions()){ for(ControlledExperiment rep : exptCond.getReplicates()){ PrintWriter writer = new PrintWriter(rep.getName()+rep.getCondName()+".counts.txt","UTF-8"); writer.println("#signalBinCounts : controlBinCounts"+"\t"+"#"+rep.getCondName()); List<Float> signalSampCounts = new ArrayList<Float>(); List<Float> controlSampCounts = new ArrayList<Float>(); signalSampCounts = sampleWindowCounts.get(rep.getSignal()); controlSampCounts = sampleWindowCounts.get(rep.getControl()); for (int i = 0; i < signalSampCounts.size(); i ++){ if (signalSampCounts.get(i)+controlSampCounts.get(i)>0) writer.println(signalSampCounts.get(i)+":"+controlSampCounts.get(i)); } } } manager.close(); } public void printGenomeBins(int scalingWindowSize) throws FileNotFoundException{ File outFile = new File(System.getProperty("user.dir")+File.separator+"genome_windows.bed"); PrintWriter writer = new PrintWriter(outFile); for(String chrom:genome.getChromList()) { int chrlen = genome.getChromLength(chrom); for (int start = 1; start < chrlen - scalingWindowSize; start += scalingWindowSize) { Region r = new Region(genome, chrom, start, start + scalingWindowSize); writer.write("chr"+chrom.toString()+"\t"+start+"\t"+(start+scalingWindowSize)+"\n"); } } writer.close(); } public static void main(String[] args) throws FileNotFoundException, UnsupportedEncodingException { ArgParser ap = new ArgParser(args); if((!ap.hasKey("species") && !ap.hasKey("geninfo"))) { System.err.println("Usage:\n" + "ChIPQC\n" + "\t--species <organism;genome> OR\n" + "\t--geninfo <genome info file> AND --seq <path to seqs>\n" + "\t--expt <signal experiment> \n" + "\t--expt <control experiment> \n" + "\t--format <BAM/IDX/BED/etc> \n" + "\nOPTIONS:\n" + "\t--scalewin <window size for scaling procedure (default=10000)>\n" + "\t--binCounts [flag to print bin counts] \n" + "\t--plotscaling [flag to plot diagnostic information for the chosen scaling method]\n" + "\t--printBins [flag to print genomic bin coordinates in bed file]\n" + ""); System.exit(0); } GenomeConfig gconf = new GenomeConfig(args); ExptConfig econf = new ExptConfig(gconf.getGenome(), args); econf.setPerBaseReadFiltering(false); ExperimentManager manager = new ExperimentManager(econf); ChIPQC exoQC = new ChIPQC(gconf, econf, manager); exoQC.printQCMetrics(); if (ap.hasKey("binCounts")) exoQC.printPairedBinCounts(econf.getScalingSlidingWindow()); if (ap.hasKey("printBins")) exoQC.printGenomeBins(econf.getScalingSlidingWindow()); manager.close(); } }
package com.m4gik; import static com.m4gik.HavalAttributes.BLOCK_SIZE; import static com.m4gik.HavalAttributes.HAVAL_128_BIT; import static com.m4gik.HavalAttributes.HAVAL_160_BIT; import static com.m4gik.HavalAttributes.HAVAL_192_BIT; import static com.m4gik.HavalAttributes.HAVAL_224_BIT; import static com.m4gik.HavalAttributes.HAVAL_256_BIT; import static com.m4gik.HavalAttributes.HAVAL_3_ROUND; import static com.m4gik.HavalAttributes.HAVAL_4_ROUND; import static com.m4gik.HavalAttributes.HAVAL_5_ROUND; import static com.m4gik.HavalAttributes.HAVAL_NAME; import static com.m4gik.HavalAttributes.HAVAL_VERSION; import static com.m4gik.HavalAttributes.WORD_PROCESING_ORDER_1; import static com.m4gik.HavalAttributes.WORD_PROCESING_ORDER_2; import java.util.Arrays; import java.util.Collections; import java.util.List; import javax.crypto.IllegalBlockSizeException; import com.m4gik.util.Util; public class Haval extends BaseHash { /** * Creates the {@link Haval} hash value for given input bytes with two * argument using {@link HavalAttributes.#HAVAL_128_BIT} as the value for * the output size (i.e. <code>128</code> bits, and {@link * HavalAttributes.#HAVAL_3_ROUND} for the value of number of rounds * * @param input * the value from which obtain the hash. * @return hash value for {@link Haval} algorithm. */ public static String hash(byte[] input) { return hash(input, HAVAL_128_BIT, HAVAL_3_ROUND); } public static String hash(byte[] input, int size, int rounds) { return Util.toString(new Haval(input, size, rounds).digest()) .toLowerCase(); } /** 128-bit interim result. */ private int h0, h1, h2, h3, h4, h5, h6, h7; /** * Fields keep amount of rounds. Default value is 3 rounds. */ private int rounds = HAVAL_3_ROUND; /** * Calls the constructor with two argument using {@link * HavalAttributes.#HAVAL_128_BIT} as the value for the output size (i.e. * <code>128</code> bits, and {@link HavalAttributes.#HAVAL_3_ROUND} for the * value of number of rounds. */ public Haval() { this(HAVAL_128_BIT, HAVAL_3_ROUND); } public Haval(byte[] input, int size, int rounds) { super(HAVAL_NAME, size, BLOCK_SIZE); checkHavalInput(input); checkHavalOutputSize(size); checkHavalRounds(rounds); this.setRounds(rounds); this.update(input); } /** * Private constructor for cloning purposes. * * @param originalInstance * the instance to clone. */ public Haval(Haval originalInstance) { this(originalInstance.hashSize(), originalInstance.getRounds()); this.h0 = originalInstance.h0; this.h1 = originalInstance.h1; this.h2 = originalInstance.h2; this.h3 = originalInstance.h3; this.h4 = originalInstance.h4; this.h5 = originalInstance.h5; this.h6 = originalInstance.h6; this.h7 = originalInstance.h7; this.count = originalInstance.count; this.buffer = originalInstance.buffer.clone(); } public Haval(int size, int rounds) { super(HAVAL_NAME, size, BLOCK_SIZE); checkHavalOutputSize(size); checkHavalRounds(rounds); this.setRounds(rounds); } private void checkHavalInput(byte[] input) throws IllegalArgumentException { if (input == null) { throw new IllegalArgumentException("Input cannot be null"); } } private void checkHavalOutputSize(int size) throws IllegalArgumentException { if (size != HAVAL_128_BIT && size != HAVAL_160_BIT && size != HAVAL_192_BIT && size != HAVAL_224_BIT && size != HAVAL_256_BIT) { throw new IllegalArgumentException("Invalid HAVAL output size"); } } private void checkHavalRounds(int rounds) throws IllegalArgumentException { if (rounds != HAVAL_3_ROUND && rounds != HAVAL_4_ROUND && rounds != HAVAL_5_ROUND) { throw new IllegalArgumentException("Invalid HAVAL number of rounds"); } } /** * This method checks proper size of padding with checking last 10 special * bytes. * * @param padBuffer * the padded message result. * @param padding * the value for pad data. * @return */ private byte[] checkPadBufferSize(byte[] padBuffer, int padding) { for (int i = 1; i > padding; i++) { if (padBuffer[i] != 0x00) { try { throw new IllegalBlockSizeException( "Padding is not filled correctly"); } catch (IllegalBlockSizeException e) { e.printStackTrace(); } } } return padBuffer; } /** * Returns a clone copy of this instance. This method overrides an existing * method. * * @see com.m4gik.BaseHash#clone() */ @Override public Object clone() { return new Haval(this); } private int f1(int x6, int x5, int x4, int x3, int x2, int x1, int x0) { return x1 & (x0 ^ x4) ^ x2 & x5 ^ x3 & x6 ^ x0; } private int f2(int x6, int x5, int x4, int x3, int x2, int x1, int x0) { return x2 & (x1 & ~x3 ^ x4 & x5 ^ x6 ^ x0) ^ x4 & (x1 ^ x5) ^ x3 & x5 ^ x0; } private int f3(int x6, int x5, int x4, int x3, int x2, int x1, int x0) { return x3 & (x1 & x2 ^ x6 ^ x0) ^ x1 & x4 ^ x2 & x5 ^ x0; } private int f4(int x6, int x5, int x4, int x3, int x2, int x1, int x0) { return x4 & (x5 & ~x2 ^ x3 & ~x6 ^ x1 ^ x6 ^ x0) ^ x3 & (x1 & x2 ^ x5 ^ x6) ^ x2 & x6 ^ x0; } private int f5(int x6, int x5, int x4, int x3, int x2, int x1, int x0) { return x0 & (x1 & x2 & x3 ^ ~x5) ^ x1 & x4 ^ x2 & x5 ^ x3 & x6; } /** * Permutations phi_{i,j}, i=3,4,5, j=1,...,i. * * rounds = 3: 6 5 4 3 2 1 0 (replaced by) phi_{3,1}: 1 0 3 5 6 2 4 * * rounds = 4: 6 5 4 3 2 1 0 (replaced by) phi_{4,1}: 2 6 1 4 5 3 0 * * rounds = 5: 6 5 4 3 2 1 0 (replaced by) phi_{5,1}: 3 4 1 0 5 2 6 * * @param collectionH * the data for interim result. * @param w * the extra value to add. * @return The value for first permutation. */ private Integer ff1(List<Integer> collectionH, int w) { Integer t = 0; if (getRounds() == 3) { f1(collectionH.get(1), collectionH.get(0), collectionH.get(3), collectionH.get(5), collectionH.get(6), collectionH.get(2), collectionH.get(4)); } else if (getRounds() == 4) { f1(collectionH.get(2), collectionH.get(6), collectionH.get(1), collectionH.get(4), collectionH.get(5), collectionH.get(3), collectionH.get(0)); } else { f1(collectionH.get(3), collectionH.get(4), collectionH.get(1), collectionH.get(0), collectionH.get(5), collectionH.get(2), collectionH.get(6)); } return (t >>> 7 | t << 25) + (collectionH.get(7) >>> 11 | collectionH.get(7) << 21) + w; } /** * Permutations phi_{i,j}, i=3,4,5, j=1,...,i. * * rounds = 3: 6 5 4 3 2 1 0 (replaced by) phi_{3,2}: 4 2 1 0 5 3 6 * * rounds = 4: 6 5 4 3 2 1 0 (replaced by) phi_{4,2}: 3 5 2 0 1 6 4 * * rounds = 5: 6 5 4 3 2 1 0 (replaced by) phi_{5,2}: 6 2 1 0 3 4 5 * * @param collectionH * the data for interim result. * @param w * the extra value to add. * @param c * @return The value for second permutation. */ private Integer ff2(List<Integer> collectionH, int w, Integer c) { Integer t = 0; if (getRounds() == 3) { t = f2(collectionH.get(4), collectionH.get(2), collectionH.get(1), collectionH.get(0), collectionH.get(5), collectionH.get(3), collectionH.get(6)); } else if (getRounds() == 4) { t = f2(collectionH.get(3), collectionH.get(5), collectionH.get(2), collectionH.get(0), collectionH.get(1), collectionH.get(6), collectionH.get(4)); } else { t = f2(collectionH.get(6), collectionH.get(2), collectionH.get(1), collectionH.get(0), collectionH.get(3), collectionH.get(4), collectionH.get(5)); } return (t >>> 7 | t << 25) + (collectionH.get(7) >>> 11 | collectionH.get(7) << 21) + w + c; } private void fifthPass(int[] xTable, List<Integer> collectionH, List<Integer> constants) { // TODO Auto-generated method stub } /** * This method makes first pass for haval transformation. * * @param xTable * the table with information for this algorithm. * @param collectionH * the data for interim result. */ private void firstPass(int[] xTable, List<Integer> collectionH) { int index = 0; setProperConfiguration(collectionH); for (int i = 0; i < 4; i++) { for (int j = collectionH.size() - 1; j >= 0; j collectionH.set( j, ff1(rotate(collectionH, 1), xTable[WORD_PROCESING_ORDER_1[index++]])); } } } private void fourthPass(int[] xTable, List<Integer> collectionH, List<Integer> constants) { // TODO Auto-generated method stub } @Override protected byte[] getResult() { // TODO Auto-generated method stub return "null".getBytes(); } /** * This methods gets set number of rounds for {@link Haval} algorithm. * * @return the rounds */ public int getRounds() { return rounds; } /** * Returns the byte array to use as padding before completing a hash * operation. This method overrides an existing method. HAVAL also uses a * 10-bit field DGSTLENG to specify the required number of bits in a digest. * In addition HAVAL uses a 3-bit field PASS to specify the number of passes * each message block is processed, and another 3-bit field VERSION to * indicate the version number of HAVAL. The number of bits in a digest can * be 128, 160, 192, 224 and 256, while the number of passes can be 3, 4 and * 5. The current version number of HAVAL is 1. HAVAL pads a message by * appending a single bit 1 next to the most significant bit of the message, * followed by zero or more bit 0s until the length of the (new) message is * 944 modulo 1024. Then, HAVAL appends to the message the 3-bit field * VERSION, followed by the 3-bit field PASS, the 10-bit field DGSTLENG and * the 64-bit field MSGLENG. * * * @return the bytes to pad the remaining bytes in the buffer before * completing a hash operation. * * @see com.m4gik.BaseHash#padBuffer() */ @Override protected byte[] padBuffer() { // Pad out to 118 mod 128. Other 10 bytes have special use. int n = (int) (count % BLOCK_SIZE); int padding = (n < 118) ? (118 - n) : (246 - n); byte[] result = new byte[padding + 10]; result[0] = (byte) 0x01; // Save the version number (LSB 3), the number of rounds (3 bits in the // middle), the fingerprint length (MSB 2 bits and next byte) and the // number of bits in the unpadded message. int bl = hashSize() * 8; int sigByte = (bl & 0x03) << 6; sigByte |= (getRounds() & 0x07) << 3; sigByte |= HAVAL_VERSION & 0x07; result[padding++] = (byte) sigByte; result[padding++] = (byte) (bl >>> 2); // Save number of bits, casting the long to an array of 8 bytes long bits = count << 3; int j = 0; for (int i = padding; i < result.length; i++, j++) { result[i] = (byte) (bits >>> (j * 8)); } return checkPadBufferSize(result, padding); } /** * Resets the instance for future re-use. This method overrides an existing * method. * * @see com.m4gik.BaseHash#resetContext() */ @Override protected void resetContext() { h0 = 0x243F6A88; h1 = 0x85A308D3; h2 = 0x13198A2E; h3 = 0x03707344; h4 = 0xA4093822; h5 = 0x299F31D0; h6 = 0x082EFA98; h7 = 0xEC4E6C89; } /** * Rotates the elements in the specified list by the specified distance. * After calling this method, the element at index i will be the element * previously at index (i - distance) mod list.size(), for all values of i * between 0 and list.size()-1, inclusive. (This method has no effect on the * size of the list.) * * @param <T> * * @param collection * the array to rotate. * @param index * the distance to rotate. */ private <T> List<T> rotate(List<T> collection, int index) { Collections.rotate(collection, index); return collection; } /** * This method makes second pass for haval transformation. constants * * @param xTable * the table with information for this algorithm. * @param collectionH * the data for interim result. * * @param constants */ private void secondPass(int[] xTable, List<Integer> collectionH, List<Integer> constants) { int index = 0; int iterator = 0; for (int i = 0; i < 4; i++) { for (int j = collectionH.size() - 1; j >= 0; j collectionH.set( j, ff2(rotate(collectionH, 1), xTable[WORD_PROCESING_ORDER_2[index++]], constants.get(iterator++))); } } } /** * This method sets collection in proper order. * * @param collectionH * the collection to configuration. */ private void setProperConfiguration(List<Integer> collectionH) { rotate(collectionH, 6); } /** * This method sets number of rounds for {@link Haval} algorithm. * * @param rounds * the rounds to set */ public void setRounds(int rounds) { this.rounds = rounds; } private void thirdPass(int[] xTable, List<Integer> collectionH, List<Integer> constants) { // TODO Auto-generated method stub } /** * The updating algorithm H processes a block in 3, 4 or 5 passes, which is * specified by the 3-bit field PASS in the last block. This method * overrides an existing method. The first 8 constant words correspond to * the first 256 bits of the fraction part of phi. The 32 constant words * used in Pass 2 correspond to the next 1024 bits of the fraction part of * phi, which is followed by the 32 constant words used by Pass 3, the 32 * constant words used by Pass 4 and the 32 constant words used by Pass 5. * The 136 constant words are listed in the following in hexadecimal form. * They appear in the following order: * * 243F6A88 85A308D3 13198A2E 03707344 A4093822 299F31D0 082EFA98 EC4E6C89 * 452821E6 38D01377 BE5466CF 34E90C6C C0AC29B7 C97C50DD 3F84D5B5 B5470917 * 9216D5D9 8979FB1B D1310BA6 98DFB5AC 2FFD72DB D01ADFB7 B8E1AFED 6A267E96 * BA7C9045 F12C7F99 24A19947 B3916CF7 0801F2E2 858EFC16 636920D8 71574E69 * A458FEA3 F4933D7E 0D95748F 728EB658 718BCD58 82154AEE 7B54A41D C25A59B5 * 9C30D539 2AF26013 C5D1B023 286085F0 CA417918 B8DB38EF 8E79DCB0 603A180E * 6C9E0E8B B01E8A3E D71577C1 BD314B27 78AF2FDA 55605C60 E65525F3 AA55AB94 * 57489862 63E81440 55CA396A 2AAB10B6 B4CC5C34 1141E8CE A15486AF 7C72E993 * B3EE1411 636FBC2A 2BA9C55D 741831F6 CE5C3E16 9B87931E AFD6BA33 6C24CF5C * 7A325381 28958677 3B8F4898 6B4BB9AF C4BFE81B 66282193 61D809CC FB21A991 * 487CAC60 5DEC8032 EF845D5D E98575B1 DC262302 EB651B88 23893E81 D396ACC5 * 0F6D6FF3 83F44239 2E0B4482 A4842004 69C8F04A 9E1F9B5E 21C66842 F6E96C9A * 670C9C61 ABD388F0 6A51A0D2 D8542F68 960FA728 AB5133A3 6EEF0B6C 137A3BE4 * BA3BF050 7EFB2A98 A1F1651D 39AF0176 66CA593E 82430E88 8CEE8619 456F9FB4 * 7D84A5C3 3B8B5EBE E06F75D8 85C12073 401A449F 56C16AA6 4ED3AA62 363F7706 * 1BFEDF72 429B023D 37D0D724 D00A1248 DB0FEAD3 49F1C09B 075372C9 80991B7B * 25D479D8 F6E8DEF7 E3FE501A B6794C3B 976CE0BD 04C006BA C1A94FB6 409F60C4 * * * @see com.m4gik.BaseHash#transform(byte[], int) */ @Override protected void transform(byte[] in, int offset) { List<Integer> collectionH = Arrays.asList(h0, h1, h2, h3, h4, h5, h6, h7); List<Integer> constants = Arrays.asList(0x452821E6, 0x38D01377, 0xBE5466CF, 0x34E90C6C, 0xC0AC29B7, 0xC97C50DD, 0x3F84D5B5, 0xB5470917, 0x9216D5D9, 0x8979FB1B, 0xD1310BA6, 0x98DFB5AC, 0x2FFD72DB, 0xD01ADFB7, 0xB8E1AFED, 0x6A267E96, 0xBA7C9045, 0xF12C7F99, 0x24A19947, 0xB3916CF7, 0x0801F2E2, 0x858EFC16, 0x636920D8, 0x71574E69, 0xA458FEA3, 0xF4933D7E, 0x0D95748F, 0x728EB658, 0x718BCD58, 0x82154AEE, 0x7B54A41D, 0xC25A59B5, 0x9C30D539, 0x2AF26013, 0xC5D1B023, 0x286085F0, 0xCA417918, 0xB8DB38EF, 0x8E79DCB0, 0x603A180E, 0x6C9E0E8B, 0xB01E8A3E, 0xD71577C1, 0xBD314B27, 0x78AF2FDA, 0x55605C60, 0xE65525F3, 0xAA55AB94, 0x57489862, 0x63E81440, 0x55CA396A, 0x2AAB10B6, 0xB4CC5C34, 0x1141E8CE, 0xA15486AF, 0x7C72E993, 0xB3EE1411, 0x636FBC2A, 0x2BA9C55D, 0x741831F6, 0xCE5C3E16, 0x9B87931E, 0xAFD6BA33, 0x6C24CF5C, 0x7A325381, 0x28958677, 0x3B8F4898, 0x6B4BB9AF, 0xC4BFE81B, 0x66282193, 0x61D809CC, 0xFB21A991, 0x487CAC60, 0x5DEC8032, 0xEF845D5D, 0xE98575B1, 0xDC262302, 0xEB651B88, 0x23893E81, 0xD396ACC5, 0x0F6D6FF3, 0x83F44239, 0x2E0B4482, 0xA4842004, 0x69C8F04A, 0x9E1F9B5E, 0x21C66842, 0xF6E96C9A, 0x670C9C61, 0xABD388F0, 0x6A51A0D2, 0xD8542F68, 0x960FA728, 0xAB5133A3, 0x6EEF0B6C, 0x137A3BE4, 0xBA3BF050, 0x7EFB2A98, 0xA1F1651D, 0x39AF0176, 0x66CA593E, 0x82430E88, 0x8CEE8619, 0x456F9FB4, 0x7D84A5C3, 0x3B8B5EBE, 0xE06F75D8, 0x85C12073, 0x401A449F, 0x56C16AA6, 0x4ED3AA62, 0x363F7706, 0x1BFEDF72, 0x429B023D, 0x37D0D724, 0xD00A1248, 0xDB0FEAD3, 0x49F1C09B, 0x075372C9, 0x80991B7B, 0x25D479D8, 0xF6E8DEF7, 0xE3FE501A, 0xB6794C3B, 0x976CE0BD, 0x04C006BA, 0xC1A94FB6, 0x409F60C4); int[] XTable = new int[32]; for (int i = 0; i < 32; i++) { XTable[i] = (in[offset++] & 0xFF) | (in[offset++] & 0xFF) << 8 | (in[offset++] & 0xFF) << 16 | (in[offset++] & 0xFF) << 24; } firstPass(XTable, collectionH); secondPass(XTable, collectionH, constants); thirdPass(XTable, collectionH, constants); if (getRounds() >= 4) { fourthPass(XTable, collectionH, constants); if (getRounds() == 5) { fifthPass(XTable, collectionH, constants); } } h7 += collectionH.get(7); h6 += collectionH.get(6); h5 += collectionH.get(5); h4 += collectionH.get(4); h3 += collectionH.get(3); h2 += collectionH.get(2); h1 += collectionH.get(1); h0 += collectionH.get(0); } }
package it.av.youeat.web.util; import it.av.youeat.web.components.TransparentWebMarkupContainer; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Page; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; /** * @author <a href='mailto:a.vincelli@gmail.com'>Alessandro Vincelli</a> * */ public final class HtmlUtil { private HtmlUtil() { }; public static final void fixInitialHtml(final Page page) { TransparentWebMarkupContainer html = new TransparentWebMarkupContainer("html"); page.add(html); IModel<String> localeModel = new AbstractReadOnlyModel<String>() { public String getObject() { return page.getLocale().getLanguage(); } }; html.add(new AttributeModifier("lang", true, localeModel)); html.add(new AttributeModifier("xml:lang", true, localeModel)); } }
package org.javarosa.core.model; import java.util.TimeZone; /** * Constants shared throught classes in the containing package. * * @version , */ public class Constants { /** Empty strig representation */ public static final String EMPTY_STRING = ""; /** Index for no selection */ public static final int NO_SELECTION = -1; /** ID not set to a value */ public static final int NULL_ID = -1; /** ID not set numeric value */ public static final String NULL_STRING_ID = "-1"; /** Operator not set numeric value */ public static final int OPERATOR_NULL = 0; /** Operator Equal */ public static final int OPERATOR_EQUAL = 1; /** Operator Not Equal */ public static final int OPERATOR_NOT_EQUAL = 2; /** Operator Greater */ public static final int OPERATOR_GREATER = 3; /** Operator Greater of Equal */ public static final int OPERATOR_GREATER_EQUAL = 4; /** Operator Less */ public static final int OPERATOR_LESS = 5; /** Operator Less than */ public static final int OPERATOR_LESS_EQUAL = 6; /** No rule action specified */ public static final int ACTION_NONE = 0; /** Rule action to hide questions */ public static final int ACTION_HIDE= 1; /** Rule action to show questions */ public static final int ACTION_SHOW = 2; /** Rule action to disable questions */ public static final int ACTION_DISABLE = 3; /** Rule action to enable questions */ public static final int ACTION_ENABLE = 4; /** Rule action to make a question mandatory */ public static final int ACTION_MAKE_MANDATORY = 5; /** Rule action to make a question optional */ public static final int ACTION_MAKE_OPTIONAL = 6; /** Connection type not specified */ public static final int CONNECTION_NONE = 0; /** Infrared connection */ public static final int CONNECTION_INFRARED = 1; /** Bluetooth connection */ public static final int CONNECTION_BLUETOOTH = 2; /** Data cable connection. Can be USB or Serial */ public static final int CONNECTION_CABLE = 3; /** Over The Air or HTTP Connection */ public static final int CONNECTION_OTA = 4; public static final String NULLS_NOT_ALLOWED = "Nulls not allowed. Use empty string"; /** The maximum number of characters for text input. */ public static final int MAX_NUM_CHARS = 500; /** The default study id for those that dont deal with studies, they just have forms. */ public static final int DEFAULT_STUDY_ID = 1; /** The default time zone. */ public static final TimeZone DEFAULT_TIME_ZONE = TimeZone.getTimeZone("GMT"); public static final int DATATYPE_UNSUPPORTED = -1; public static final int DATATYPE_NULL = 0; /* for nodes that have no data, or data type otherwise unknown */ public static final int DATATYPE_TEXT = 1; /** Text question type. */ public static final int DATATYPE_INTEGER = 2; /** Numeric question type. These are numbers without decimal points*/ public static final int DATATYPE_DECIMAL = 3; /** Decimal question type. These are numbers with decimals */ public static final int DATATYPE_DATE = 4; /** Date question type. This has only date component without time. */ public static final int DATATYPE_TIME = 5; /** Time question type. This has only time element without date*/ public static final int DATATYPE_DATE_TIME = 6; /** Date and Time question type. This has both the date and time components*/ public static final int DATATYPE_CHOICE = 7; /** This is a question with alist of options where not more than one option can be selected at a time. */ public static final int DATATYPE_CHOICE_LIST = 8; /** This is a question with alist of options where more than one option can be selected at a time. */ public static final int DATATYPE_BOOLEAN = 9; /** Question with true and false answers. */ public static final int DATATYPE_GEOPOINT = 10; /** Question with location answer. */ public static final int DATATYPE_BARCODE = 11; /** Question with barcode string answer. */ public static final int CONTROL_UNTYPED = -1; public static final int CONTROL_INPUT = 1; public static final int CONTROL_SELECT_ONE = 2; public static final int CONTROL_SELECT_MULTI = 3; public static final int CONTROL_TEXTAREA = 4; public static final int CONTROL_SECRET = 5; public static final int CONTROL_RANGE = 6; public static final int CONTROL_UPLOAD = 7; public static final int CONTROL_SUBMIT = 8; public static final int CONTROL_TRIGGER = 9; public static final int CONTROL_IMAGE_CHOOSE = 10; public static final int CONTROL_LABEL = 11; public static final int CONTROL_AUDIO_CAPTURE = 12; /** constants for xform tags */ public static final String XFTAG_UPLOAD = "upload"; }
package jp.azw.kancolleague.kcapi; import java.util.Map; import com.google.gson.JsonObject; public class Root { private String token; private String verNo; private long time; protected Root() { } protected Root(JsonObject json, Map<String, String[]> requestParams) { init(json, requestParams); } protected void init(JsonObject json, Map<String, String[]> parameters) { token = parameters.getOrDefault("api_token", new String[]{""})[0]; verNo = parameters.getOrDefault("api_verno", new String[]{""})[0]; } public String getToken() { return token; } public String getVerNo() { return verNo; } /** * HTTP request * * @return HTTP request */ public long getTime() { return time; } public void setTime(long time) { this.time = time; } }
package mil.dds.anet.database; import java.util.HashMap; import java.util.List; import java.util.Map; import org.skife.jdbi.v2.Handle; import mil.dds.anet.beans.AdminSetting; import mil.dds.anet.database.mappers.AdminSettingMapper; public class AdminDao { public static enum AdminSettingKeys { SECURITY_BANNER_TEXT, SECURITY_BANNER_COLOR, DEFAULT_APPROVAL_ORGANIZATION } private Handle dbHandle; private Map<String,String> cachedSettings; public AdminDao(Handle db) { this.dbHandle = db; cachedSettings = new HashMap<String,String>(); List<AdminSetting> settings = getAllSettings(); for (AdminSetting s : settings){ cachedSettings.put(s.getKey(), s.getValue()); } } public String getSetting(AdminSettingKeys key) { return cachedSettings.get(key.toString()); } public List<AdminSetting> getAllSettings() { return dbHandle.createQuery("SELECT * FROM adminSettings") .map(new AdminSettingMapper()) .list(); } public int saveSetting(AdminSetting setting) { cachedSettings.put(setting.getKey(), setting.getValue()); return dbHandle.createStatement("UPDATE adminSettings SET value = :value WHERE [key] = :key") .bind("key", setting.getKey()) .bind("value", setting.getValue()) .execute(); } }
package edu.ucsf.lava.crms.importer.controller; import static edu.ucsf.lava.core.importer.model.ImportDefinition.CSV_FORMAT; import static edu.ucsf.lava.core.importer.model.ImportDefinition.DEFAULT_DATE_FORMAT; import static edu.ucsf.lava.core.importer.model.ImportDefinition.DEFAULT_TIME_FORMAT; import static edu.ucsf.lava.core.importer.model.ImportDefinition.TAB_FORMAT; import static edu.ucsf.lava.crms.importer.model.CrmsImportDefinition.MUST_EXIST; import static edu.ucsf.lava.crms.importer.model.CrmsImportDefinition.MUST_NOT_EXIST; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.InvocationTargetException; import java.sql.Time; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.util.StringUtils; import org.springframework.validation.BindingResult; import org.springframework.webflow.context.servlet.ServletExternalContext; import org.springframework.webflow.core.collection.AttributeMap; import org.springframework.webflow.core.collection.LocalAttributeMap; import org.springframework.webflow.definition.StateDefinition; import org.springframework.webflow.execution.Event; import org.springframework.webflow.execution.RequestContext; import au.com.bytecode.opencsv.CSVReader; import edu.ucsf.lava.core.controller.ComponentCommand; import edu.ucsf.lava.core.controller.LavaComponentFormAction; import edu.ucsf.lava.core.dao.LavaDaoFilter; import edu.ucsf.lava.core.file.model.ImportFile; import edu.ucsf.lava.core.importer.controller.ImportHandler; import edu.ucsf.lava.core.importer.model.ImportDefinition; import edu.ucsf.lava.core.importer.model.ImportLog; import edu.ucsf.lava.core.importer.model.ImportSetup; import edu.ucsf.lava.core.manager.Managers; import edu.ucsf.lava.core.model.EntityBase; import edu.ucsf.lava.core.model.LavaEntity; import edu.ucsf.lava.core.session.CoreSessionUtils; import edu.ucsf.lava.core.type.LavaDateUtils; import edu.ucsf.lava.crms.assessment.InstrumentManager; import edu.ucsf.lava.crms.assessment.model.Instrument; import edu.ucsf.lava.crms.auth.CrmsAuthUtils; import edu.ucsf.lava.crms.enrollment.EnrollmentManager; import edu.ucsf.lava.crms.enrollment.model.EnrollmentStatus; import edu.ucsf.lava.crms.importer.model.CrmsImportDefinition; import edu.ucsf.lava.crms.importer.model.CrmsImportLog; import edu.ucsf.lava.crms.importer.model.CrmsImportSetup; import edu.ucsf.lava.crms.manager.CrmsManagerUtils; import edu.ucsf.lava.crms.people.model.Caregiver; import edu.ucsf.lava.crms.people.model.ContactInfo; import edu.ucsf.lava.crms.people.model.Patient; import edu.ucsf.lava.crms.project.ProjectManager; import edu.ucsf.lava.crms.scheduling.VisitManager; import edu.ucsf.lava.crms.scheduling.model.Visit; import edu.ucsf.lava.crms.session.CrmsSessionUtils; /** * CrmsImportHandler * * Handles the crms specific part of importing a data file. * * @author ctoohey * */ public class CrmsImportHandler extends ImportHandler { protected InstrumentManager instrumentManager; protected EnrollmentManager enrollmentManager; protected VisitManager visitManager; protected ProjectManager projectManager; public CrmsImportHandler() { super(); // the defaultObjectName should ideally be the same as the target part of the action which // uses this handler, i.e. lava.core.importer.import.import so target='import', because // the flow constructs event transitions using the target part of the action (at least for // customizing actions) while the decorator uses the defaultObjectName on eventButton that // will construct the event to be submitted which should match the transition setHandledEntity("import", CrmsImportSetup.class); setDefaultObjectBaseClass(ImportSetup.class); } public void updateManagers(Managers managers){ super.updateManagers(managers); this.enrollmentManager = CrmsManagerUtils.getEnrollmentManager(managers); this.instrumentManager = CrmsManagerUtils.getInstrumentManager(managers); this.projectManager = CrmsManagerUtils.getProjectManager(managers); this.visitManager = CrmsManagerUtils.getVisitManager(managers); } /** * The idea here is that if this is a crms application, then we always want * to use the CrmsImportHandler instead of the core ImportHandler. If scopes * need to extend Import further, then they should subclass and customize this * handler/action. */ @Override public Event preSetupFlowDirector(RequestContext context) throws Exception { return new Event(this,CONTINUE_FLOW_EVENT_ID); } // set on importSetup. assume it is ok that importSetup is not a persistent object public Map getBackingObjects(RequestContext context, Map components) { Map backingObjects = super.getBackingObjects(context, components); CrmsImportSetup importSetup = (CrmsImportSetup) components.get(this.getDefaultObjectName()); // replace the importLog for crms ImportLog baseImportLog = (ImportLog) backingObjects.get("importLog"); CrmsImportLog importLog = new CrmsImportLog(baseImportLog); backingObjects.put("importLog", importLog); return backingObjects; } protected Event doImport(RequestContext context, Object command, BindingResult errors) throws Exception { CrmsImportSetup importSetup = (CrmsImportSetup) ((ComponentCommand)command).getComponents().get(this.getDefaultObjectName()); CrmsImportLog importLog = (CrmsImportLog) ((ComponentCommand)command).getComponents().get("importLog"); Event returnEvent = new Event(this,this.SUCCESS_FLOW_EVENT_ID); Event handlingEvent = null; // the CrmsImportSetup command object is used as a parameter object to pass parameters to methods which would // otherwise require many arguments // additionally it facilitates using properties from its ImportSetup superclass in this handler // these include the columns array (mappingCols) and properties arrays (mappingEntities, mappingProps) // that ImportHandler creates when reading the definition mapping file if ((returnEvent = super.doImport(context, command, errors)).getId().equals(SUCCESS_FLOW_EVENT_ID)) { CrmsImportDefinition importDefinition = (CrmsImportDefinition) importSetup.getImportDefinition(); importLog.setProjName(importDefinition.getProjName()); importLog.setNotes(importSetup.getNotes()); // read data file // NOTE: remember to review jfesenko data load script ImportFile dataFile = importLog.getDataFile(); int lineNum = 0; InputStream dataFileContent = new ByteArrayInputStream(dataFile.getContent()); // open data file contents with a CSVReader for parsing CSV values, accounting for things like // quoted strings that contain comments, etc. CSVReader reader = null; if (importSetup.getImportDefinition().getDataFileFormat().equals(CSV_FORMAT)) { reader = new CSVReader(new InputStreamReader(dataFileContent)); } else if (importSetup.getImportDefinition().getDataFileFormat().equals(TAB_FORMAT)) { reader = new CSVReader(new InputStreamReader(dataFileContent), '\t'); } // nextLine[] is an array of values from the line String [] nextLine; // opencsv readNext parses the record into a String array while ((nextLine = reader.readNext()) != null) { lineNum++; // number of lines < MAX_LINES //if (++lineNum > MAX_LINES) { // break; // skip over the data file column headers line (it has already been read into the importSetup // dataCols by the superclass) //TODO: for startLine prob just change to if (lineNum < startLine) if (lineNum == 1) { continue; } importSetup.reset(); // reset created/existed flags to false, entities that are retrieved or created to null // note that indices of data array items in data file match up with indices of column and // property array items in import definition mapping file importSetup.setDataValues(nextLine); // skip over blank lines. check first couple cols if (!StringUtils.hasText(importSetup.getDataValues()[0]) && !StringUtils.hasText(importSetup.getDataValues()[1]) && !StringUtils.hasText(importSetup.getDataValues()[2])) { continue; } importLog.incTotalRecords(); // includes records that cannot be exported due to some error // allow subclasses to custom generate revisedProjName (e.g. append unit/site to projName), which // is used everywhere a projName is needed in the import // this needs to be called for each record because site could differ for each record generateRevisedProjName(importDefinition, importSetup); // find existing Patient. possibly create new Patient if ((handlingEvent = patientExistsHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } // if no errors, continue processing import record. importSetup patientCreated indicates whether a // new Patient record was created or an existing Patient record was found (and given no errors, this // means that all import definition flags were successfully met such that the record can be imported // with either a new or existing Patient) // (this goes for EnrollmentStatus, Visit and instrument as well) // if Patient MUST_EXIST then importing assessment data, so do not deal with creating ContactInfo if (!importDefinition.getPatientExistRule().equals(MUST_EXIST)) { if ((handlingEvent = contactInfoExistsHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } } // because caregiverExistsHandling may be reused for multiple Caregiver instances if data file has multiple Caregivers, it does not // directly set entities on importSetup like other existsHandling methods; instead it passes flags and instantiated entities back // via the returned Event, which has attributes, and the values of these attributes are then use to set the Caregiver. // note that Caregiver and Caregiver ContactInfo are tightly bound such that both are handled together since there // is an assumption that Caregiver ContactInfo would never be imported without importing Caregiver data (i.e. would // never import new ContactInfo record for an existing Caregiver). so if a Caregiver is created a Caregiver ContactInfo // might also be created (if there is data in the import data file) if ((handlingEvent = caregiverExistsHandling(context, errors, importDefinition, importSetup, importLog, importSetup.getIndexCaregiverFirstName(), importSetup.getIndexCaregiverLastName(), importSetup.getIndexCaregiverContactInfoAddress(), importSetup.getIndexCaregiverContactInfoCity(), importSetup.getIndexCaregiverContactInfoState(), importSetup.getIndexCaregiverContactInfoZip(), importSetup.getIndexCaregiverContactInfoPhone1(), importSetup.getIndexCaregiverContactInfoEmail(), lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } importSetup.setCaregiverCreated((Boolean) handlingEvent.getAttributes().get("caregiverCreated")); importSetup.setCaregiverExisted((Boolean) handlingEvent.getAttributes().get("caregiverExisted")); if (importSetup.isCaregiverCreated() || importSetup.isCaregiverExisted()) { importSetup.setCaregiver((Caregiver) handlingEvent.getAttributes().get("caregiver")); importSetup.setCaregiverContactInfoCreated((Boolean) handlingEvent.getAttributes().get("caregiverContactInfoCreated")); if (importSetup.isCaregiverContactInfoCreated()) { importSetup.setCaregiverContactInfo((ContactInfo) handlingEvent.getAttributes().get("caregiverContactInfo")); } } // support importing two caregivers (e.g. Mother and Father for child patients) if ((handlingEvent = caregiverExistsHandling(context, errors, importDefinition, importSetup, importLog, importSetup.getIndexCaregiver2FirstName(), importSetup.getIndexCaregiver2LastName(), importSetup.getIndexCaregiver2ContactInfoAddress(), importSetup.getIndexCaregiver2ContactInfoCity(), importSetup.getIndexCaregiver2ContactInfoState(), importSetup.getIndexCaregiver2ContactInfoZip(), importSetup.getIndexCaregiver2ContactInfoPhone1(), importSetup.getIndexCaregiver2ContactInfoEmail(), lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } importSetup.setCaregiver2Created((Boolean) handlingEvent.getAttributes().get("caregiverCreated")); importSetup.setCaregiver2Existed((Boolean) handlingEvent.getAttributes().get("caregiverExisted")); if (importSetup.isCaregiver2Created() || importSetup.isCaregiver2Existed()) { importSetup.setCaregiver2((Caregiver) handlingEvent.getAttributes().get("caregiver")); importSetup.setCaregiver2ContactInfoCreated((Boolean) handlingEvent.getAttributes().get("caregiverContactInfoCreated")); if (importSetup.isCaregiver2ContactInfoCreated()) { importSetup.setCaregiver2ContactInfo((ContactInfo) handlingEvent.getAttributes().get("caregiverContactInfo")); } } // determine if Patient is Enrolled in Project. possibly create new EnrollmentStatus if ((handlingEvent = enrollmentStatusExistsHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } Event instrHandlingEvent = null; if (!importDefinition.getPatientOnlyImport()) { // find matching Visit. possibly create new Visit if ((handlingEvent = visitExistsHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } // find matching instrument. possibly create new instrument. type of instrument specified in the // importDefinition if ((instrHandlingEvent = instrumentExistsHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { // it is simply enough to check for the existence of the "alreadyExists" attribute, i.e. do not need to check its value if (instrHandlingEvent.getAttributes() != null && instrHandlingEvent.getAttributes().get("alreadyExists") != null) { importLog.incAlreadyExist(); } else { importLog.incErrors(); } continue; } } //RIGHT HERE // create a link from the importLog to the importDefinition so user can quickly see what definition was used // to support this, definition needs to be a subflow of log // X-do pedi attachments (consents) when working in the following with LavaFile stuff // download definition mapping file // download data file // X-change mapping file format to 3 rows: row 2 is entity type, row 3 is property name (if // both are blank then defaults to 1st instrument and prop name == column name (row 1)) // pedi new patient history import (data file with all columns, not cut off at 256 cols) // X-should only create caregiver and caregiver contactInfo records if data exists // X-log totals do not reflect caregivers and contactInfo records (but they could. not bothering with // caregiverContactInfo totals since that is tightly bound with caregiver) // X-SPDC History Form 2 metadata populated, e.g. marco_lab...history_timestamp (which will be used for versioning) // X-SPDC History Form 2 only showing when run server in non-debug mode - KNOWN ISSUE // confirm that data is being loaded correctly (incl. caregiver livesWithPatient, ContactInfo is // for caregiver) // need separate definitions for old and current versions because var names from old // need to map to current, e.g. field5 old maps to field6 current, whereas for current // field5 maps to field5 // get rid of import section, default to imports section (make sure regular import fails // on SPDC history import // X-open csv // OT: Add Patient skip logic on Community Dx should disable following field unless "6 - other" // pertinent TODOs in code, config, Hibernate mapping, jsp, etc. // X-make mapping definition name longer (50?) // import definition UI cleanup (for now move Project near top, ahead of selection of Import // skip logic: // if Only Import Patients then disable Visit and Instrument fields // if Patient Must Not Exist rule is selected then other exist rules should be disabled and // set to Must Not Exist // make import definition bigger // MappingData File since the project refresh removes mapping file selection) // (why does Browse button have _ in it?) // help text: (maybe) if mapping file changes, have to re-upload // importLogContent / crmsImportLogContent format log summary results in a table // importLog/crmsImportLog needs to get rid of Edit button // crmsAllImportLogs needs a Filter // X-add creation of entities as importLog CREATED messages // ?? create preview mode, at least for development, that does not do anything to db // X-call calculate on save (or is it done automatically?) // REDCap form exports have 2 digit dates. 4 digit dates always better. make sure 2 digit dates before // 2000 are imported correctly // truncation solution: add import def flag: Truncate to fit field length, then retry, create warning? // or abort this record, create error, continue w next record // with an importDefinition flag determine how to handle when data length exceeds metadata length // other majors: // BASC import // FileMaker patient import // FileMaker Sensory Profile Child import // REDCap Sensory Profile Child import // for REDCap assessment imports, test patient firstName against nickname if match against firstName failed // Rankin TODOs: // migrate to MAC LAVA // implement startDataRow (defaults to 2 for all imports done prior to implementation) // match existing Visit on Visit Type if user sets flag to do so. even if not, Visit Type could still be // used when creating new Visits (default is false) // (columns and metadata to support already added to db) // match existing Visit on user specified time window, in days, around the visitDate in data file. set // to 0 for an exact date match (need info text with this) (0 is the default) // (columns and metadata to support already added to db) // expand to work with multiple instruments (crmsImportDefinition will have inputs for up to 10 // instruments, and will have to rework instrumentExistsHandling to go thru each specified instrument, // and use of instrType,instrVer for generateLocation for data files will just have to use that of // the first instrument chosen) // (columns and metadata to support already added to db, i.e. 2 thru 10 instrType/instrVer) // 2.0 expand to work with files in folders for special not-exactly-import use cases: // a) for instruments that load individual patient files, e.g. e-prime instruments // b) for PDFs that should be attached to an existing instrument // 2.0: validation, i.e. read property metadata to obtain type, list of valid values // 3.0 import detail data files, e.g. Freesurfer 5.1 data if ((handlingEvent = otherExistsHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } // iterate thru the values of the current import record, setting each value on the property of an entity, as // determined by the importDefinition mapping file if ((handlingEvent = setPropertyHandling(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } // if definition has flag set that this is a caregiver instrument, set the caregiver on the instrument if (importDefinition.getInstrCaregiver() != null && importDefinition.getInstrCaregiver().equals(1)) { if ((handlingEvent = setInstrumentCaregiver(context, errors, importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } } // at this point all values of the import record have been successfully set on entity properties //TODO: when enable updating existing instrument data: // not calling save on the entity should solve not persisting new records that should be skipped // but what if existing entities are modified and then record is to be skipped? // Hibernate would implicitly save changes so would have to explicitly rollback. // however, use cases don't support modifying existing Patient/ES/Visit, only an existing Instrument // so review how CRUD editing cancel is done and try calling refresh on the modified object // (could fool around with CRUD editing and take out refresh on cancel just to see if changes // are persisted without explicit call to save) if ((handlingEvent = saveImportRecord(importDefinition, importSetup, importLog, lineNum)).getId().equals(ERROR_FLOW_EVENT_ID)) { importLog.incErrors(); continue; } // update counts // applies to entire import record // it is simply enough to check for the existence of the "update" attribute, i.e. do not need to check its value if (!importDefinition.getPatientOnlyImport()) { if (instrHandlingEvent.getAttributes() != null && instrHandlingEvent.getAttributes().get("update") != null) { importLog.incUpdated(); } else { importLog.incImported(); } } else { importLog.incImported(); } // these counts apply to specific entities within an import record updateEntityCounts(importSetup, importLog); } } // at this point, returnEvent success means the success of the overall import. individual records // may have had errors, which are logged as importLog messages and the total error count is incremented // returnEvent error means that the import failed as a whole and error msg is put in the command // object errors to be displayed if (returnEvent.getId().equals(SUCCESS_FLOW_EVENT_ID)) { importLog.save(); } return returnEvent; } protected Event validateDataFile(BindingResult errors, ImportDefinition importDefinition, ImportSetup importSetup) throws Exception { CrmsImportSetup crmsImportSetup = (CrmsImportSetup) importSetup; if (super.validateDataFile(errors, importDefinition, importSetup).getId().equals(ERROR_FLOW_EVENT_ID)) { return new Event(this, ERROR_FLOW_EVENT_ID); } // caregiver instruments have special handling to set their caregiver ID property, which involves putting an "instrumentCaregiverId" // column in the mapping file even though there is no such column in the data file (the data does presumably have caregiver first and // last names from which an existing Caregiver is matched or a new Caregiver is created) //// crmsImportSetup.setIndexInstrCaregiverId(ArrayUtils.indexOf(importSetup.getMappingCols(), "instrumentCaregiverId")); // validate the mapping file data columns against the import file data columns as they should be //// if ((crmsImportSetup.getIndexInstrCaregiverId() == -1 ? importSetup.getMappingCols().length : importSetup.getMappingCols().length-1) != importSetup.getDataCols().length) { //TODO: go back to using the base class which does this /** if (importSetup.getMappingCols().length != importSetup.getDataCols().length) { LavaComponentFormAction.createCommandError(errors, "Cannot import. Mismatch in number of columns in mapping file vs data file"); return new Event(this,ERROR_FLOW_EVENT_ID); } **/ //TODO: go back to using the base class which does this /** //// // the "instrumentCaregiverId" column, if used, must be put at the end of the mapping file, for this validation to pass for (int i=0; i < importSetup.getMappingCols().length; i++) { //// if (importSetup.getMappingCols()[i].equals("instrumentCaregiverId")) { //// break; //// } if (!importSetup.getMappingCols()[i].equals(importSetup.getDataCols()[i])) { LavaComponentFormAction.createCommandError(errors, "Cannot import. Mapping file column name " + importSetup.getMappingCols()[i] + " does not exactly match column header in data file"); return new Event(this,ERROR_FLOW_EVENT_ID); } } **/ // set indices here as this only needs to be done once for the entire data file // ** the import definition mapping file second row must have entity string and third row must have // property string that match exactly the entity and property name strings below // look up the indices of fields in the import definition mapping file properties row that are required // to search for existing entities and/or populate new entities, and record the indices to be used in // processing each import record // required fields for creating new Patient/EnrollmentStatus/Visit/instrument which could have the same // uniform value across all records imported from a data file may be specified as part of the import // definition rather then being supplied in the data file. but the data file takes precedent so first // check the data file and set the index if the field has a value in the data file. // note that the entity and property are on separate lines of the mapping file and thus in separate arrays, // so need to check the two arrays in conjunction with each other (could have just had a single property // row with entity.property but if there are multiple instruments in the data file with many properties, // easier to edit the mapping file with instrument names across the entity column headers instead of // editing entity.property format for each property) setDataFilePropertyIndex(importSetup, "indexPatientPIDN", "patient", "PIDN"); setDataFilePropertyIndex(importSetup, "indexPatientFirstName", "patient", "firstName"); setDataFilePropertyIndex(importSetup, "indexPatientLastName", "patient", "lastName"); setDataFilePropertyIndex(importSetup, "indexPatientBirthDate", "patient", "birthDate"); setDataFilePropertyIndex(importSetup, "indexPatientGender", "patient", "gender"); setDataFilePropertyIndex(importSetup, "indexContactInfoAddress", "contactInfo", "address"); setDataFilePropertyIndex(importSetup, "indexContactInfoCity", "contactInfo", "city"); setDataFilePropertyIndex(importSetup, "indexContactInfoState", "contactInfo", "state"); setDataFilePropertyIndex(importSetup, "indexContactInfoZip", "contactInfo", "zip"); setDataFilePropertyIndex(importSetup, "indexContactInfoPhone1", "contactInfo", "phone1"); setDataFilePropertyIndex(importSetup, "indexContactInfoEmail", "contactInfo", "email"); setDataFilePropertyIndex(importSetup, "indexCaregiverFirstName", "caregiver", "firstName"); setDataFilePropertyIndex(importSetup, "indexCaregiverLastName", "caregiver", "lastName"); setDataFilePropertyIndex(importSetup, "indexCaregiverContactInfoAddress", "caregiverContactInfo", "address"); setDataFilePropertyIndex(importSetup, "indexCaregiverContactInfoCity", "caregiverContactInfo", "city"); setDataFilePropertyIndex(importSetup, "indexCaregiverContactInfoState", "caregiverContactInfo", "state"); setDataFilePropertyIndex(importSetup, "indexCaregiverContactInfoZip", "caregiverContactInfo", "zip"); setDataFilePropertyIndex(importSetup, "indexCaregiverContactInfoPhone1", "caregiverContactInfo", "phone1"); setDataFilePropertyIndex(importSetup, "indexCaregiverContactInfoEmail", "caregiverContactInfo", "email"); setDataFilePropertyIndex(importSetup, "indexCaregiver2FirstName", "caregiver2", "firstName"); setDataFilePropertyIndex(importSetup, "indexCaregiver2LastName", "caregiver2", "lastName"); setDataFilePropertyIndex(importSetup, "indexCaregiver2ContactInfoAddress", "caregiver2ContactInfo", "address"); setDataFilePropertyIndex(importSetup, "indexCaregiver2ContactInfoCity", "caregiver2ContactInfo", "city"); setDataFilePropertyIndex(importSetup, "indexCaregiver2ContactInfoState", "caregiver2ContactInfo", "state"); setDataFilePropertyIndex(importSetup, "indexCaregiver2ContactInfoZip", "caregiver2ContactInfo", "zip"); setDataFilePropertyIndex(importSetup, "indexCaregiver2ContactInfoPhone1", "caregiver2ContactInfo", "phone1"); setDataFilePropertyIndex(importSetup, "indexCaregiver2ContactInfoEmail", "caregiver2ContactInfo", "email"); setDataFilePropertyIndex(importSetup, "indexEsStatusDate", "enrollmentStatus", "date"); setDataFilePropertyIndex(importSetup, "indexEsStatus", "enrollmentStatus", "status"); setDataFilePropertyIndex(importSetup, "indexVisitDate", "visit", "visitDate"); setDataFilePropertyIndex(importSetup, "indexVisitTime", "visit", "visitTime"); setDataFilePropertyIndex(importSetup, "indexVisitType", "visit", "visitType"); setDataFilePropertyIndex(importSetup, "indexVisitWith", "visit", "visitWith"); setDataFilePropertyIndex(importSetup, "indexVisitLoc", "visit", "visitLoc"); setDataFilePropertyIndex(importSetup, "indexVisitStatus", "visit", "visitStatus"); setDataFilePropertyIndex(importSetup, "indexInstrDcDate", "instrument", "dcDate"); setDataFilePropertyIndex(importSetup, "indexInstrDcStatus", "instrument", "dcStatus"); setOtherIndices((CrmsImportDefinition)importDefinition, crmsImportSetup); //TODO: move these checks to the CrmsImportDefinitionHandler // error on entire import if either no PIDN or no FirstName/LastName if (crmsImportSetup.getIndexPatientPIDN() == -1 && (crmsImportSetup.getIndexPatientFirstName() == -1 || crmsImportSetup.getIndexPatientLastName() == -1)) { LavaComponentFormAction.createCommandError(errors, "Insufficient Patient properties (must have PIDN or FirstName Lastname) in Import Definition mapping file"); return new Event(this, ERROR_FLOW_EVENT_ID); } // error on entire import if no visitDate else if (!((CrmsImportDefinition)importDefinition).getPatientOnlyImport() && crmsImportSetup.getIndexVisitDate() == -1) { LavaComponentFormAction.createCommandError(errors, "Import Definition mapping file must have 'visit.visitDate' property to link import record to a date"); return new Event(this, ERROR_FLOW_EVENT_ID); } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * Subclasses should override to generate custom projName * * @return */ protected void generateRevisedProjName(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) { importSetup.setRevisedProjName(importDefinition.getProjName()); } /** * Subclasses override this to set indices for custom imports. * * @param importDefinition * @param importSetup * @throws Exception */ protected void setOtherIndices(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) throws Exception { // do nothing } /** * patientExistsHandling * * Determine whether patient exists or not and act accordingly based on the importDefinition settings. * * The approach to logging is to log the error when it occurs within the method but have the * caller increment the error count if an error Event is returned (in which case processing of * the current record will abort and will go to the next import record). * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return SUCCESS Event if no import errors with current record; ERROR EVENT if errors */ protected Event patientExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { HttpServletRequest request = ((ServletExternalContext)context.getExternalContext()).getRequest(); LavaDaoFilter filter = EntityBase.newFilterInstance(); SimpleDateFormat formatter; String dateOrTimeAsString; Date birthDate = null; // search for existing patient Patient p = null; filter.clearDaoParams(); if (importSetup.getIndexPatientPIDN() != -1) { String pidnAsString = importSetup.getDataValues()[importSetup.getIndexPatientPIDN()]; Long pidn = null; try { pidn = Long.valueOf(pidnAsString); } catch (NumberFormatException ex) { importLog.addErrorMessage(lineNum, "PIDN Is not a number="+ pidnAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } filter.addIdDaoEqualityParam(pidn); p = (Patient) Patient.MANAGER.getById(pidn); } else { // birthDate is optional for search as it is often not part of data files if (importSetup.getIndexPatientBirthDate() != -1) { dateOrTimeAsString = importSetup.getDataValues()[importSetup.getIndexPatientBirthDate()]; formatter = new SimpleDateFormat(importDefinition.getDateFormat() != null ? importDefinition.getDateFormat() : DEFAULT_DATE_FORMAT); formatter.setLenient(true); // to avoid exceptions; we check later to see if leniency was applied try { birthDate = formatter.parse(dateOrTimeAsString); } catch (ParseException e) { // likely will not be called with leniency applied importLog.addErrorMessage(lineNum, "Patient.birthDate is an invalid Date format, Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } // because if date format is yyyy for year part, the parser will allow any date into the future, even 5 digit dates, so // have to do range checking to catch bad date errors java.util.Calendar birthDateCalendar = java.util.Calendar.getInstance(); birthDateCalendar.setTime(birthDate); int birthDateYear = birthDateCalendar.get(java.util.Calendar.YEAR); java.util.Calendar nowCalendar = java.util.Calendar.getInstance(); int nowYear = nowCalendar.get(java.util.Calendar.YEAR); if (birthDateYear < (nowYear - 100) || birthDateYear > nowYear) { importLog.addErrorMessage(lineNum, "Patient DOB has an invalid Year. DOB:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } filter.addDaoParam(filter.daoEqualityParam("birthDate", birthDate)); } // have already validated that firstName and lastName are present in the mapping definition file if PIDN is not setPatientNameMatchFilter(filter, importSetup); try { p = (Patient) Patient.MANAGER.getOne(filter); } // this should never happen. if re-running import of a data file, should just be one catch (IncorrectResultSizeDataAccessException ex) { importLog.addErrorMessage(lineNum, "Duplicate Patient records for patient firstName:" + importSetup.getDataValues()[importSetup.getIndexPatientFirstName()] + " lastName:" + importSetup.getDataValues()[importSetup.getIndexPatientLastName()]); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } if (p == null) { if (importDefinition.getPatientExistRule().equals(MUST_EXIST)) { if (importSetup.getIndexPatientPIDN() != -1) { importLog.addErrorMessage(lineNum, "Patient does not exist violating MUST_NOT_EXIST flag. PIDN:" + importSetup.getDataValues()[importSetup.getIndexPatientPIDN()]); } else { importLog.addErrorMessage(lineNum, "Patient does not exist violating MUST_NOT_EXIST flag.Line:" + " First Name:" + importSetup.getDataValues()[importSetup.getIndexPatientFirstName()] + " Last Name:" + importSetup.getDataValues()[importSetup.getIndexPatientLastName()]); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record }else { // for either MUST_NOT_EXIST or MAY_OR_MAY_NOT_EXIST instantiate the Patient if (importSetup.getIndexPatientFirstName() == -1 || !StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexPatientFirstName()])) { importLog.addErrorMessage(lineNum, "Cannot create Patient. First Name field (patient.firstName) is missing or has no value"); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } if (importSetup.getIndexPatientLastName() == -1 || !StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexPatientLastName()])) { importLog.addErrorMessage(lineNum, "Cannot create Patient. Last Name field (patient.lastName) is missing or has no value"); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } if (importSetup.getIndexPatientBirthDate() == -1 || !StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexPatientBirthDate()])) { importLog.addErrorMessage(lineNum, "Cannot create Patient. Date of Birth field (patient.birthDate) is missing or has no value"); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } if (importSetup.getIndexPatientGender() == -1 || !StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexPatientGender()])) { importLog.addErrorMessage(lineNum, "Cannot create Patient. Gender field (patient.gender) is missing or has no value"); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } // create Patient record p = createPatient(importDefinition, importSetup); // the property values will be assigned when iterating dataValues below. could assign // the "indexed" Patient properties here since those were found as part of determing whether the // Patient exists. but still need to assign other Patient properties (can not index them // all because do not even know what all the properties could be) and since will be // iterating thru all data values just assign all properties when iterating dataValues // however, do set first name, last name, dob properties as they may be used in error log p.setFirstName(importSetup.getDataValues()[importSetup.getIndexPatientFirstName()]); p.setLastName(importSetup.getDataValues()[importSetup.getIndexPatientLastName()]); p.updateCalculatedFields(); // so can use full name in log messages // if the birthDate conversion was not done yet, i.e. PIDN was supplied such that a PIDN match was done (and failed) if (birthDate == null) { //look at making this date conversion into a small helper method dateOrTimeAsString = importSetup.getDataValues()[importSetup.getIndexPatientBirthDate()]; formatter = new SimpleDateFormat(importDefinition.getDateFormat() != null ? importDefinition.getDateFormat() : DEFAULT_DATE_FORMAT); formatter.setLenient(true); // to avoid exceptions; we check later to see if leniency was applied try { birthDate = formatter.parse(dateOrTimeAsString); } catch (ParseException e) { // likely will not be called with leniency applied importLog.addErrorMessage(lineNum, "Patient.birthDate is an invalid Date format, Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } p.setBirthDate(birthDate); // at this point have already validated that patient.gender exists in import file and has a value p.setGender(importSetup.getDataValues()[importSetup.getIndexPatientGender()].toLowerCase().startsWith("m") || importSetup.getDataValues()[importSetup.getIndexPatientGender()].equals("1") ? (byte)1 : (byte)2); p.setCreated(new Date()); p.setCreatedBy("IMPORT (" + CoreSessionUtils.getCurrentUser(sessionManager, request).getLogin() + ")"); importSetup.setPatientCreated(true); importSetup.setPatient(p); } } else { // Patient already exists importSetup.setPatientExisted(true); importSetup.setPatient(p); if (importDefinition.getPatientExistRule().equals(MUST_NOT_EXIST)) { // typically with this flag the first time the import is run the Patients will not exist // so they will be created above. if there were some import data errors they would be fixed // and the script re-imported, at which point there will be these errors for all Patients that // created again // note: this is why it is important that Patient should not be persisted until EnrollmentStatus, // Visit and Instrument have all been validated for errors and successfully added. because if // Patient were persisted, then there were errors with Visit, Instrument, etc. when those errors // were fixed and script re-imported the records will be skipped because Patient now exists // note: this differs from MAY_OR_MAY_NOT_EXIST where import of the record will continue if // the Patient exists (as well as if Patient does not exist as it will be created above) importLog.addErrorMessage(lineNum, "Patient already exists, violates Import Definition MUST_NOT_EXIST setting. Patient::" + p.getFullNameWithId()); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * Set the filter for matching patient names. Subclasses should override if they have custom * patient name matching. * * @param filter * @param importSetup */ protected void setPatientNameMatchFilter(LavaDaoFilter filter, CrmsImportSetup importSetup) { //TODO: consider Danny's Levenson algorithm for fuzzy matching Patient Last Name filter.addDaoParam(filter.daoEqualityParam("firstName", importSetup.getDataValues()[importSetup.getIndexPatientFirstName()])); filter.addDaoParam(filter.daoEqualityParam("lastName", importSetup.getDataValues()[importSetup.getIndexPatientLastName()])); } /** * contactInfoExistsHandling * * Determine whether ContactInfo should be created, and if so create instance. Subclasses can override to customize * logic to determine whether to create a ContactInfo instance. * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return SUCCESS Event if no import errors with current record; ERROR EVENT if errors */ protected Event contactInfoExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { // the assumption is that ContactInfo is only imported as part of a new Patient import, so if a new Patient was created // and any ContactInfo properties are mapped and have data then create a new ContactInfo record. // because currently not a use case for importing new ContactInfo for already existing Patients if (importSetup.isPatientCreated()) { // check that at least one of what are considered the key ContactInfo properties has data if ((importSetup.getIndexContactInfoAddress() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexContactInfoAddress()])) || (importSetup.getIndexContactInfoCity() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexContactInfoCity()])) || (importSetup.getIndexContactInfoState() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexContactInfoState()])) || (importSetup.getIndexContactInfoZip() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexContactInfoZip()])) || (importSetup.getIndexContactInfoPhone1() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexContactInfoPhone1()])) || (importSetup.getIndexContactInfoEmail() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexContactInfoEmail()]))) { ContactInfo contactInfo = createContactInfo(importDefinition, importSetup); contactInfo.setPatient(importSetup.getPatient()); contactInfo.setIsCaregiver(false); contactInfo.setActive((short)1); if (importSetup.getIndexContactInfoAddress() != -1) { contactInfo.setAddress(importSetup.getDataValues()[importSetup.getIndexContactInfoAddress()]); } if (importSetup.getIndexContactInfoCity() != -1) { contactInfo.setCity(importSetup.getDataValues()[importSetup.getIndexContactInfoCity()]); } if (importSetup.getIndexContactInfoState() != -1) { contactInfo.setState(importSetup.getDataValues()[importSetup.getIndexContactInfoState()]); } if (importSetup.getIndexContactInfoZip() != -1) { contactInfo.setZip(importSetup.getDataValues()[importSetup.getIndexContactInfoZip()]); } if (importSetup.getIndexContactInfoPhone1() != -1) { contactInfo.setPhone1(importSetup.getDataValues()[importSetup.getIndexContactInfoPhone1()]); } if (importSetup.getIndexContactInfoEmail() != -1) { contactInfo.setEmail(importSetup.getDataValues()[importSetup.getIndexContactInfoEmail()]); } importSetup.setContactInfoCreated(true); importSetup.setContactInfo(contactInfo); } } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * caregiverExistsHandling * * Determine whether Caregiver exists or not and create if it does not. Can assume an exists setting * of MAY_OR_MAY_NOT_EXIST where nothing is updated if the Caregiver already exists. * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return SUCCESS Event if no import errors with current record; ERROR EVENT if errors */ protected Event caregiverExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int indexFirstName, int indexLastName, int indexContactInfoAddress, int indexContactInfoCity, int indexContactInfoState, int indexContactInfoZip, int indexContactInfoPhone1, int indexContactInfoEmail, int lineNum) { LavaDaoFilter filter = EntityBase.newFilterInstance(); // search for existing Caregiver Caregiver caregiver = null; Boolean caregiverCreated = null; Boolean caregiverExisted = null; ContactInfo caregiverContactInfo = null; Boolean caregiverContactInfoCreated = null; if (indexFirstName == -1 || !StringUtils.hasText(importSetup.getDataValues()[indexFirstName]) || indexLastName == -1 || !StringUtils.hasText(importSetup.getDataValues()[indexLastName])) { // if no caregiver data in the data file cannot check if caregiver exists or create a new // caregiver caregiverCreated = false; caregiverExisted = false; caregiverContactInfoCreated = false; } else { // only search if the Patient already exists because if Patient did not exist then Caregiver does not exist if (!importSetup.isPatientCreated()) { filter.clearDaoParams(); filter.setAlias("patient", "patient"); filter.addDaoParam(filter.daoEqualityParam("patient.id", importSetup.getPatient().getId())); filter.addDaoParam(filter.daoEqualityParam("firstName", importSetup.getDataValues()[importSetup.getIndexCaregiverFirstName()])); filter.addDaoParam(filter.daoEqualityParam("lastName", importSetup.getDataValues()[importSetup.getIndexCaregiverLastName()])); try { caregiver = (Caregiver) Caregiver.MANAGER.getOne(filter); } // this should never happen. if re-running import of a data file, should just be one catch (IncorrectResultSizeDataAccessException ex) { importLog.addErrorMessage(lineNum, "Duplicate Caregiver records for patient " + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Caregiver firstName:" + importSetup.getDataValues()[importSetup.getIndexPatientFirstName()] + " lastName:" + importSetup.getDataValues()[importSetup.getIndexPatientLastName()]); return new Event(this, "error"); // to abort processing this import record } } // if Patient MUST_EXIST then importing assessment data, so do not deal with creating Caregivers / ContactInfo if (!importDefinition.getPatientExistRule().equals(MUST_EXIST)) { if (caregiver == null) { caregiverExisted = false; // at this point either Patient was just created in which case there cannot be a Caregiver yet, or Patient // already existed but Caregiver could not be found if ((indexFirstName != -1 && StringUtils.hasText(importSetup.getDataValues()[indexFirstName])) && (indexLastName != -1 && StringUtils.hasText(importSetup.getDataValues()[indexLastName]))) { caregiver = createCaregiver(importDefinition, importSetup); caregiver.setPatient(importSetup.getPatient()); caregiver.setFirstName(importSetup.getDataValues()[indexFirstName]); caregiver.setLastName(importSetup.getDataValues()[indexLastName]); caregiver.setActive((short)1); // any other (non required) caregiver fields will be assigned in setProperty as they are encountered // in the import record caregiverCreated = true; // if a new Caregiver is created, create a new ContactInfo record for that Caregiver if there is // ContactInfo data in the import data file // caregiverContactInfo properties are set in setPropertyHandling if ((indexContactInfoAddress != -1 && StringUtils.hasText(importSetup.getDataValues()[indexContactInfoAddress])) || (indexContactInfoCity != -1 && StringUtils.hasText(importSetup.getDataValues()[indexContactInfoCity])) || (indexContactInfoState != -1 && StringUtils.hasText(importSetup.getDataValues()[indexContactInfoState])) || (indexContactInfoZip != -1 && StringUtils.hasText(importSetup.getDataValues()[indexContactInfoZip])) || (indexContactInfoPhone1 != -1 && StringUtils.hasText(importSetup.getDataValues()[indexContactInfoPhone1])) || (indexContactInfoEmail != -1 && StringUtils.hasText(importSetup.getDataValues()[indexContactInfoEmail]))) { caregiverContactInfo = createContactInfo(importDefinition, importSetup); caregiverContactInfo.setPatient(importSetup.getPatient()); caregiverContactInfo.setIsCaregiver(true); // NOTE: had to refactor lava-crms ContactInfo to map Caregiver as an association rather than mapping caregiverId // property since do not know caregiverId at this point given that new Caregiver has not been persisted. ORM will // take care of assigning caregiverId to ContactInfo at persistence caregiverContactInfo.setCaregiver(caregiver); caregiverContactInfo.setActive((short)1); if (indexContactInfoAddress != -1) { caregiverContactInfo.setAddress(importSetup.getDataValues()[indexContactInfoAddress]); } if (indexContactInfoCity != -1) { caregiverContactInfo.setCity(importSetup.getDataValues()[indexContactInfoCity]); } if (indexContactInfoState != -1) { caregiverContactInfo.setState(importSetup.getDataValues()[indexContactInfoState]); } if (indexContactInfoZip != -1) { caregiverContactInfo.setZip(importSetup.getDataValues()[indexContactInfoZip]); } if (indexContactInfoPhone1 != -1) { caregiverContactInfo.setPhone1(importSetup.getDataValues()[indexContactInfoPhone1]); } if (indexContactInfoEmail != -1) { caregiverContactInfo.setEmail(importSetup.getDataValues()[indexContactInfoEmail]); } caregiverContactInfoCreated = true; } else { caregiverContactInfoCreated = false; } } else { caregiverCreated = false; caregiverContactInfoCreated = false; } } else { caregiverExisted = true; caregiverCreated = false; caregiverContactInfoCreated = false; } } else { caregiverExisted = false; caregiverCreated = false; caregiverContactInfoCreated = false; } } Map<String,Object> eventAttrMap = new HashMap<String,Object>(); eventAttrMap.put("caregiver", caregiver); eventAttrMap.put("caregiverCreated", caregiverCreated); eventAttrMap.put("caregiverExisted", caregiverExisted); eventAttrMap.put("caregiverContactInfo", caregiverContactInfo); eventAttrMap.put("caregiverContactInfoCreated", caregiverContactInfoCreated); AttributeMap attributeMap = new LocalAttributeMap(eventAttrMap); return new Event(this, SUCCESS_FLOW_EVENT_ID, attributeMap); } /** * enrollmentStatusExistsHandling * * Determine whether enrollmentStatus exists or not and act accordingly based on the importDefinition settings. * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return SUCCESS Event if no import errors with current record; ERROR EVENT if errors */ protected Event enrollmentStatusExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { HttpServletRequest request = ((ServletExternalContext)context.getExternalContext()).getRequest(); LavaDaoFilter filter = EntityBase.newFilterInstance(); SimpleDateFormat formatter; String dateOrTimeAsString; // search for existing enrollmentStatus EnrollmentStatus es = null; // if patient was just created, know that the enrollmentStatus could not exist yet, but if patient // was not just created, then check whether enrollmentStatus exists or not if (!importSetup.isPatientCreated()) { filter.clearDaoParams(); filter.setAlias("patient", "patient"); filter.addDaoParam(filter.daoEqualityParam("patient.id", importSetup.getPatient().getId())); filter.addDaoParam(filter.daoEqualityParam("projName", importSetup.getRevisedProjName())); // note: could get the list of project enrollment statuses for the given projName and then // filter on a certain set of statuses, e.g. exclude 'Withdrew'. But statuses can be custom // for each project so that is a lot of logic and probably too much for the import definition, // as can generally assume that if there is data for a patient and project that the patient // is currently enrolled try { es = (EnrollmentStatus) EnrollmentStatus.MANAGER.getOne(filter); } // this should never happen. if re-running import of a data file, should just be one catch (IncorrectResultSizeDataAccessException ex) { importLog.addErrorMessage(lineNum, "Duplicate EnrollmentStatus records for patient " + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and project " + importSetup.getRevisedProjName()); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } String importEsStatus = importSetup.getIndexEsStatus() != -1 ? importSetup.getDataValues()[importSetup.getIndexEsStatus()] : importDefinition.getEsStatus(); if (es == null) { if (importDefinition.getEsExistRule().equals(MUST_EXIST)) { importLog.addErrorMessage(lineNum, "Patient Enrollment does not exist for Project:" + importSetup.getRevisedProjName() + " violating MUST_NOT_EXIST flag. Patient:" + importSetup.getPatient().getFullNameRevWithId()); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record }else { // for either MUST_NOT_EXIST or MAY_OR_MAY_NOT_EXIST instantiate the Enrollment Status // enrollmentStatus date will typically not be supplied in the data file, so default to visitDate if not // note if 'patientOnlyImport' flag set in importDefinition there will not be a date to use Date esDate = null; if (!((CrmsImportDefinition)importDefinition).getPatientOnlyImport()) { // note that for a patientOnlyImport, unless there is an enrollment date in the data, there will not be an enrollment // date to assign to the new enrollmentStatus. this means that the enrollmentStatus latestDesc / latestDate will not be set // because date is null dateOrTimeAsString = importSetup.getIndexEsStatusDate() != -1 ? importSetup.getDataValues()[importSetup.getIndexEsStatusDate()] : importSetup.getDataValues()[importSetup.getIndexVisitDate()]; formatter = new SimpleDateFormat(importDefinition.getDateFormat() != null ? importDefinition.getDateFormat() : DEFAULT_DATE_FORMAT); formatter.setLenient(true); // to avoid exceptions; we check later to see if leniency was applied try { esDate = formatter.parse(dateOrTimeAsString); } catch (ParseException e) { // likely will not be called with leniency applied importLog.addErrorMessage(lineNum, "Enrollment Status Date or Visit Date is an invalid Date format. Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } // because if date format is yyyy for year part, the parser will allow any date into the future, even 5 digit dates, so // have to do range checking to catch bad date errors java.util.Calendar esDateCalendar = java.util.Calendar.getInstance(); esDateCalendar.setTime(esDate); int esDateYear = esDateCalendar.get(java.util.Calendar.YEAR); java.util.Calendar nowCalendar = java.util.Calendar.getInstance(); int nowYear = nowCalendar.get(java.util.Calendar.YEAR); // allow for dates 5 years into the future if (esDateYear < (nowYear - 100) || esDateYear > (nowYear + 5)) { importLog.addErrorMessage(lineNum, "Enrollment Status Date or Visit Date has an invalid Year. Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } else { importLog.addWarningMessage(lineNum, "Enrollment Status Date unknown for 'Patient Only Import' for:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName())); } if (!StringUtils.hasText(importEsStatus)) { if (importSetup.getIndexEsStatus() != -1) { importLog.addErrorMessage(lineNum, "Cannot create Enrollment Status. Status field in data file (column:" + importSetup.getDataCols()[importSetup.getIndexEsStatus()] + ") has no value"); } else { importLog.addErrorMessage(lineNum, "Cannot create Enrollment Status. No Status field supplied in data file and no value specified in definition"); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } // note that non-required fields will be set in the setPropertyHandling method which iterates thru all // property values. this could include custom, instance specfic fields. // create Enrollment Status record es = createEnrollmentStatus(importDefinition, importSetup); es.setPatient(importSetup.getPatient()); es.setProjName(importSetup.getRevisedProjName()); es.setStatus(importEsStatus, esDate); // note that for a patientOnlyImport, unless there is an enrollment date in the data, there will not be an enrollment // date to assign to the new enrollmentStatus (i.e. no visit date to use). updateLatestStatusValues will not set // enrollmentStatus latestDesc / latestDate if the date is null es.updateLatestStatusValues(); importSetup.setEnrollmentStatusCreated(true); importSetup.setEnrollmentStatus(es); } } else { // EnrollmentStatus already exists // warning if current status is not the same as the status defined in the import definition. // e.g. if an existing EnrollmentStatus is matched with latestDesc="EXCLUDED" then user should definitely // be warned that data is being imported for a project from which the patient is excluded. could make // this an error, but there are many more common situations which do not warrant an error, e.g. lastestDesc // is ELIGIBLE instead of ENROLLED because perhaps a coordinator forgot to update the status if (es.getLatestDesc() == null) { importLog.addWarningMessage(lineNum, "Patient:" + importSetup.getPatient().getFullName() + " does not have a current Enrollment Status and Date for Project:" + importSetup.getRevisedProjName() + ", possibly because there is no Enrollment Status Date to import"); } else if (!es.getLatestDesc().equals(importEsStatus)) { importLog.addWarningMessage(lineNum, "Patient:" + importSetup.getPatient().getFullName() + " has an Enrollment Status of: " + es.getLatestDesc() + " for Project:" + importSetup.getRevisedProjName() + ", not:" + importEsStatus); } importSetup.setEnrollmentStatusExisted(true); importSetup.setEnrollmentStatus(es); if (importDefinition.getEsExistRule().equals(MUST_NOT_EXIST)) { // typically with this flag the first time the import is run the Enrollment Status will not // exist so it will be created above. if there were some import data errors they would be fixed // and the script re-imported, at which point there will be these errors for all Enrollment // will correctly not be created again // note: this differs from MAY_OR_MAY_NOT_EXIST where import of the record will continue if // the Enrollment Status exists (as well as if Enrollment Status does not exist as it will be // created above) importLog.addErrorMessage(lineNum, "Enrollment Status already exists, violates Import Definition MUST_NOT_EXIST setting. Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName())); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * visitExistHandling * * Determine whether visit exists or not and act accordingly based on the importDefinition settings. * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return SUCCESS Event if no import errors with current record; ERROR EVENT if errors */ protected Event visitExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { HttpServletRequest request = ((ServletExternalContext)context.getExternalContext()).getRequest(); LavaDaoFilter filter = EntityBase.newFilterInstance(); SimpleDateFormat formatter; String dateOrTimeAsString; // search for existing Visit Visit v = null; Date visitDate = null; Time visitTime = null; String visitType = null; // visitDate is required for both matching Visit and as a required field when creating new Visit dateOrTimeAsString = importSetup.getDataValues()[importSetup.getIndexVisitDate()]; formatter = new SimpleDateFormat(importDefinition.getDateFormat() != null ? importDefinition.getDateFormat() : DEFAULT_DATE_FORMAT); formatter.setLenient(true); // to avoid exceptions; we check later to see if leniency was applied try { visitDate = formatter.parse(dateOrTimeAsString); } catch (ParseException e) { // likely will not occur with leniency applied importLog.addErrorMessage(lineNum, "Visit.visitDate is an invalid Date format. Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } // because if date format is yyyy for year part, the parser will allow any date into the future, even 5 digit dates, so // have to do range checking to catch bad date errors java.util.Calendar visitDateCalendar = java.util.Calendar.getInstance(); visitDateCalendar.setTime(visitDate); int visitDateYear = visitDateCalendar.get(java.util.Calendar.YEAR); java.util.Calendar nowCalendar = java.util.Calendar.getInstance(); int nowYear = nowCalendar.get(java.util.Calendar.YEAR); // allow for dates 5 years into the future if (visitDateYear < (nowYear - 100) || visitDateYear > (nowYear + 5)) { importLog.addErrorMessage(lineNum, "Visit.visitDate has an invalid Year. Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } if (importSetup.getIndexVisitTime() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexVisitTime()])) { Date visitTimeAsDate = null; dateOrTimeAsString = importSetup.getDataValues()[importSetup.getIndexVisitTime()]; formatter = new SimpleDateFormat(importDefinition.getTimeFormat() != null ? importDefinition.getTimeFormat() : DEFAULT_TIME_FORMAT); try{ visitTimeAsDate = formatter.parse(dateOrTimeAsString); visitTime = LavaDateUtils.getTimePart(visitTimeAsDate); }catch (ParseException e){ importLog.addErrorMessage(lineNum, "Visit.visitTime is an invalid Time format. Time:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } // visitType is a required field, not null in the database, so if new Visit will be created it must be // either supplied in the data file, or more likely, specified in the import definition (assuming it // is accurate to assign the same visitType to every visit created within the same import) visitType = importSetup.getIndexVisitType() != -1 ? importSetup.getDataValues()[importSetup.getIndexVisitType()] : importDefinition.getVisitType(); // if enrollmentStatus was just created (whether patient just created or patient already existed) then // know that the Visit could not exist yet. otherwise, check to see if Visit exists or not. if (!importSetup.isEnrollmentStatusCreated()) { filter.clearDaoParams(); filter.setAlias("patient", "patient"); filter.addDaoParam(filter.daoEqualityParam("patient.id", importSetup.getPatient().getId())); filter.addDaoParam(filter.daoEqualityParam("projName", importSetup.getRevisedProjName())); if (StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexVisitDate()])) { // do not have a flag for whether both date and time must match. just assume that whichever is provided should // match. if only date can just do a full date comparison, i.e. should not need datepart because the date in // the Visit is just the datepart and the date in the data file will just be a date // currently do not handle existing columns that have date and time in same column. not sure if this will // be encountered filter.addDaoParam(filter.daoEqualityParam("visitDate", visitDate)); if (importSetup.getIndexVisitTime() != -1 && StringUtils.hasText(importSetup.getDataValues()[importSetup.getIndexVisitTime()])) { filter.addDaoParam(filter.daoEqualityParam("visitTime", visitTime)); } // note: could also use daoDateAndTimeEqualityParam // visitType is optional for the search; it typically is not in generated data files, and if the import // is such that new Visits will not be created then it need not be specified in the definition. // however, without visitType, could match multiple visits. see more on this below if (visitType != null) { if (StringUtils.hasText(visitType)) { filter.addDaoParam(filter.daoEqualityParam("visitType", visitType)); } } filter.addDaoParam(filter.daoNot(filter.daoEqualityParam("visitStatus", "Cancelled"))); try { v = (Visit) Visit.MANAGER.getOne(filter); } // assuming visitType is supplied, this should never happen. if re-running import of a data file, should just be one instance // of a given visitType on a given date // however, if no visitType supplied in import definition, could match multiple visits on same date, in which case // would not know which one to use. so user should then modify import definition to include visitType //TODO: when start using Visit Window for Kate, figure out what to do if matches multiple visits catch (IncorrectResultSizeDataAccessException ex) { if (visitType != null) { importLog.addErrorMessage(lineNum, "Duplicate Visit records for Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Visit Date:" + dateOrTimeAsString + " and Visit Type:" + visitType); } else { importLog.addErrorMessage(lineNum, "Duplicate Visit records for Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Visit Date:" + dateOrTimeAsString + ". Specify Visit Type in Import Definition to match on single Visit"); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } else { // this is not the same as Visit does not exist because do not have fields to check that the // Visit does or does not exist importLog.addErrorMessage(lineNum, "Cannot determine if Visit exists or not. Column:" + importSetup.getDataCols()[importSetup.getIndexVisitDate()] + " has no data"); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } if (v == null) { if (importDefinition.getVisitExistRule().equals(MUST_EXIST)) { importLog.addErrorMessage(lineNum, "Visit does not exist for Patient:" + importSetup.getPatient().getFullNameRev() + " Project:" + importSetup.getRevisedProjName() + " violating MUST_NOT_EXIST flag"); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record }else { // for either MUST_NOT_EXIST or MAY_OR_MAY_NOT_EXIST instantiate the Enrollment Status // get fields that were not already obtained for querying for existing Visit. note that these fields are // required in LAVA, but they are not required in the import definition, because it may be that a single // visitType / visitLocation / visitWith / visitStatus does not apply to all visits that are created // within a single import file String visitLoc = importSetup.getIndexVisitLoc() != -1 ? importSetup.getDataValues()[importSetup.getIndexVisitLoc()] : importDefinition.getVisitLoc(); // note there is a catch-all "Home" entry in the Visit Location list that can be used generically. could add more as needed if (!StringUtils.hasText(visitLoc)) { if (importSetup.getIndexVisitLoc() != -1) { importLog.addErrorMessage(lineNum, "Cannot create Visit. Visit Location field in data file (column:" + importSetup.getDataCols()[importSetup.getIndexVisitLoc()] + ") has no value"); } else { importLog.addErrorMessage(lineNum, "Cannot create Visit. Visit Location field not supplied in data file and no value specified in definition"); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } String visitWith = importSetup.getIndexVisitWith() != -1 ? importSetup.getDataValues()[importSetup.getIndexVisitWith()] : importDefinition.getVisitWith(); /** in case decide to require visitWith if (!StringUtils.hasText(visitWith)) { if (importSetup.getIndexVisitWith() != -1) { importLog.addErrorMessage(lineNum, "Cannot create Visit. Visit With field in data file (column:" + importSetup.getDataCols()[importSetup.getIndexVisitWith()] + ") has no value"); } else { importLog.addErrorMessage(lineNum, "Cannot create Visit. Visit With field not supplied in data file and no value specified in definition"); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } **/ String visitStatus = importSetup.getIndexVisitStatus() != -1 ? importSetup.getDataValues()[importSetup.getIndexVisitStatus()] : importDefinition.getVisitStatus(); if (!StringUtils.hasText(visitStatus)) { if (importSetup.getIndexVisitStatus() != -1) { importLog.addErrorMessage(lineNum, "Cannot create Visit. Visit Status field in data file (column:" + importSetup.getDataCols()[importSetup.getIndexVisitStatus()] + ") has no value"); } else { importLog.addErrorMessage(lineNum, "Cannot create Visit. Visit Status field not supplied in data file and no value specified in definition"); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } // create Visit record v = createVisit(importDefinition, importSetup); v.setPatient(importSetup.getPatient()); v.setProjName(importSetup.getRevisedProjName()); v.setVisitType(visitType); // visitDate and visitTime have already been converted above in search for Visit v.setVisitDate(visitDate); v.setVisitTime(visitTime); v.setVisitLocation(visitLoc); v.setVisitWith(visitWith); v.setVisitStatus(visitStatus); // note that non-required fields will be set in the setPropertyHandling method which iterates thru all // property values. this could include custom, instance specfic fields. importSetup.setVisitCreated(true); importSetup.setVisit(v); } } else { // Visit already exists importSetup.setVisitExisted(true); importSetup.setVisit(v); if (importDefinition.getVisitExistRule().equals(MUST_NOT_EXIST)) { // typically with this flag the first time the import is run the Enrollment Status will not // exist so it will be created above. if there were some import data errors they would be fixed // and the script re-imported, at which point there will be these errors for all Enrollment // will correctly not be created again // note: this differs from MAY_OR_MAY_NOT_EXIST where import of the record will continue if // the Enrollment Status exists (as well as if Enrollment Status does not exist as it will be // created above) importLog.addErrorMessage(lineNum, "Visit already exists, violates Import Definition MUST_NOT_EXIST setting. Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName())); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * insrtumentExistsHandling * * Determine whether instrument exists or not and act accordingly based on the importDefinition * settings. * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return SUCCESS Event if no import errors with current record; ERROR EVENT if errors */ protected Event instrumentExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { HttpServletRequest request = ((ServletExternalContext)context.getExternalContext()).getRequest(); LavaDaoFilter filter = EntityBase.newFilterInstance(); SimpleDateFormat formatter, msgDateFormatter = new SimpleDateFormat(DEFAULT_DATE_FORMAT); String dateOrTimeAsString; Map<String,Object> eventAttrMap = new HashMap<String,Object>(); AttributeMap attributeMap = new LocalAttributeMap(eventAttrMap); // search for existing instrument Instrument instr = null; Class instrClazz =instrumentManager.getInstrumentClass( Instrument.getInstrTypeEncoded(importDefinition.getInstrType(), importDefinition.getInstrVer())); // determine dcDate for search // convert DCDate Date dcDate = null; // if not supplied in data file then it defaults to visit date when adding new instrument if (importSetup.getIndexInstrDcDate() != -1) { dateOrTimeAsString = importSetup.getDataValues()[importSetup.getIndexInstrDcDate()]; formatter = new SimpleDateFormat(importDefinition.getDateFormat() != null ? importDefinition.getDateFormat() : DEFAULT_DATE_FORMAT); formatter.setLenient(true); // to avoid exceptions; we check later to see if leniency was applied try { dcDate = formatter.parse(dateOrTimeAsString); } catch (ParseException e) { // likely will not occur with leniency applied importLog.addErrorMessage(lineNum, "Instrumet.dcDate is an invalid Date format. Date:" + dateOrTimeAsString); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } else { dcDate = importSetup.getVisit().getVisitDate(); } // if Visit just created, know instrument could not exist. otherwise, check if instrument exists or not if (!importSetup.isVisitCreated()) { filter.clearDaoParams(); filter.setAlias("patient", "patient"); filter.addDaoParam(filter.daoEqualityParam("patient.id", importSetup.getPatient().getId())); filter.addDaoParam(filter.daoEqualityParam("projName", importSetup.getRevisedProjName())); filter.setAlias("visit", "visit"); filter.addDaoParam(filter.daoEqualityParam("visit.id", importSetup.getVisit().getId())); filter.addDaoParam(filter.daoEqualityParam("instrType", importDefinition.getInstrType())); filter.addDaoParam(filter.daoEqualityParam("dcDate", dcDate)); try { instr = (Instrument) Instrument.MANAGER.getOne(instrClazz, filter); } catch (IncorrectResultSizeDataAccessException ex) { importLog.addErrorMessage(lineNum, "Duplicate " + importDefinition.getInstrType() + " records for Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Visit Date:" + msgDateFormatter.format(importSetup.getVisit().getVisitDate())); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } if (instr == null) { if (importDefinition.getInstrExistRule().equals(MUST_EXIST)) { importLog.addErrorMessage(lineNum, "Instrument does not exist. Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " Visit Date:" + msgDateFormatter.format(importSetup.getVisit().getVisitDate())); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record }else { // instrument does not exist so instantiate String dcStatus = importSetup.getIndexInstrDcStatus() != -1 ? importSetup.getDataValues()[importSetup.getIndexInstrDcStatus()] : importDefinition.getInstrDcStatus(); if (!StringUtils.hasText(dcStatus)) { if (importSetup.getIndexInstrDcStatus() != -1) { importLog.addErrorMessage(lineNum, "Cannot create Instrument. Instrument DC Status field in data file (column:" + importSetup.getDataCols()[importSetup.getIndexInstrDcStatus()] + ") has no value"); } else { importLog.addErrorMessage(lineNum, "Cannot create Instrument. Instrument DC Status field not supplied in data file and no value specified in definition"); } return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } try { instr = createInstrument(context, importDefinition, importSetup, instrClazz, dcDate, dcStatus); instr.setDcBy(importSetup.getVisit().getVisitWith()); instr.setDeBy("IMPORTED"); instr.setDeDate(new Date()); // if import record does not have any errors then instrument will be saved so set data entry // status complete; if there are errors import record will be skipped and instrument will not // be saved, so it is ok if setting data entry status "Complete" here instr.setDeStatus("Complete"); instr.setDeNotes("Data Imported by:" + CrmsSessionUtils.getCrmsCurrentUser(sessionManager,request).getShortUserNameRev()); } catch (Exception ex) { importLog.addErrorMessage(lineNum, "Error instantiating instrument. Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Visit Date:" + msgDateFormatter.format(importSetup.getVisit().getVisitDate())); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } importSetup.setInstrCreated(true); importSetup.setInstrument(instr); } else { // instrument already exists importSetup.setInstrument(instr); if (importDefinition.getInstrExistRule().equals(MUST_NOT_EXIST)) { importLog.addErrorMessage(lineNum, "Instrument already exists violating Import Definition MUST_NOT_EXIST setting. Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Visit Date:" + msgDateFormatter.format(importSetup.getVisit().getVisitDate())); return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } else { // MAY_OR_MAY_NOT_EXIST or MUST_EXIST // this is where exist flag handling differs for instruments than for Patient, EnrollmentStatus, Visit. // need a further check in the case of instruments to make sure it does not have date entered, and if it // does then only proceed with import if the allowInstrUpdate flag is set in the import definition. // if used allow..Update flags for Patient, EnrollmentStatus and Visit, then just their mere existence // would be enough to consider the flag, i.e. not whether they have been data entered or not, because // if they exist they must have been data entered because of required fields validation. instruments // can be exist without being data entered. // note that this is also different than flags for updating Patient,EnrollmentStatus and Visit because // those would not affect whether the instrument data is imported or not. so while the instrument // flag can be handled at the level of the import record in terms of skipping the whole import // record or not, the Patient,EnrollmentStatus and,Visit update flags would not dictate this, and so // their allow..Update flags would be enforced at the individual property setting level // using deDate to determine if instrument has been data entered. not looking for a specific deStatus // such as 'Complete' since data entry could have any number of deStatus values if (instr.getDeDate() == null) { importSetup.setInstrExisted(true); } else { if (importDefinition.getAllowInstrUpdate()) { importSetup.setInstrExistedWithData(true); // set an attribute on the return event so the caller can distinguish between an error and the // record already exists eventAttrMap.put("update", Boolean.TRUE); } else { // this is not an error in the sense that the there was a problem; rather the ERROR Event is // returned so the current record will not be imported since data already exists, and it is likely // that a data file with this record was already imported. importSetup.setInstrExistedWithData(true); importLog.addDebugMessage(lineNum, "Instrument exists and has already been data entered. Cannot overwrite per Import Definition. Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + " and Visit Date:" + msgDateFormatter.format(importSetup.getVisit().getVisitDate())); // set an attribute on the return event so the caller can distinguish between an error and the // record already exists eventAttrMap.put("alreadyExists", Boolean.TRUE); return new Event(this, ERROR_FLOW_EVENT_ID, attributeMap); // to abort processing this import record } } } } return new Event(this, SUCCESS_FLOW_EVENT_ID, attributeMap); } /** * otherExistsHandling * * Subclasses should override to handle additional entities beyond Patient/EnrollmentStatus/ * Visit/instrument. * * @param context * @param errors * @param importDefinition * @param importSetup * @param lineNum * @return */ protected Event otherExistsHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { // do nothing return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * Subclasses override to instantiate a custom, typically instance specific Patient subclass. * * @param importDefinition * @param importSetup * @return */ protected Patient createPatient(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) { return new Patient(); } /** * Subclasses override to instantiate a custom, typically instance specific ContactInfo subclass. * * @param importDefinition * @param importSetup * @return */ protected ContactInfo createContactInfo(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) { return new ContactInfo(); } /** * Subclasses override to instantiate a custom, typically instance specific Caregiver subclass. * * @param importDefinition * @param importSetup * @return */ protected Caregiver createCaregiver(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) { return new Caregiver(); } /** * Subclasses override to instantiate a custom, typically instance specific Patient subclass. * * @param importDefinition * @param importSetup * @return */ protected EnrollmentStatus createEnrollmentStatus(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) { return enrollmentManager.getEnrollmentStatusPrototype(importSetup.getRevisedProjName()); } /** * Subclasses override to instantiate a custom, typically instance specific Visit subclass. * * @param importDefinition * @param importSetup * @return */ protected Visit createVisit(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup) { return new Visit(); } /** * Subclasses override if instrument creation requires custom behavior. * * @param importDefinition * @param importSetup * @param instrClazz * @param p * @param projName * @param v * @param instrType * @param dcDate * @param dcStatus * @return */ protected Instrument createInstrument(RequestContext context, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, Class instrClazz, Date dcDate, String dcStatus) { return Instrument.create(instrClazz, importSetup.getPatient(), importSetup.getVisit(), importSetup.getRevisedProjName(), importDefinition.getInstrType(), dcDate, dcStatus); } /** * setPropertyHandling * * Iterate over all field/property values in the current data import record, setting the * value on the entity property designated by the import mapping file column and property * with the same column index. * * This method is about matching each data value with the entity and property on which it * should be set. When the entity and property are determined then a setProperty method * is called to actually set the imported data value on an entity property. * * @param context * @param errors * @param importDefinition * @param importSetup * @param importLog * @param lineNum * @return success Event to continue processing this record, error Event to abort processing this record */ protected Event setPropertyHandling(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) throws Exception { Event returnEvent = new Event(this, SUCCESS_FLOW_EVENT_ID); String definitionColName, definitionPropName, definitionEntityName; for (int i = 0; i < importSetup.getDataValues().length; i++) { returnEvent = new Event(this, SUCCESS_FLOW_EVENT_ID); definitionColName = importSetup.getMappingCols()[i]; definitionEntityName = importSetup.getMappingEntities()[i]; definitionPropName = importSetup.getMappingProps()[i]; logger.info("i="+i+" colName="+definitionColName+ " entityName="+definitionEntityName+" propName="+definitionPropName); // skip fields with column name or property name prefixed by XX if (definitionColName.startsWith("XX") || (definitionEntityName != null && definitionEntityName.startsWith("XX")) || (definitionPropName != null && definitionPropName.startsWith("XX"))) { // do nothing } // Set Property Values // instruments // note that if the instrument should not be updated because it has already been data entered and user // has specified not to overwrite in this case (in the import definition), the import record will have // already been skipped and we will not get to the setting of properties // shorthand can be used in the mapping file for the first instrument specified in the // importDefinition mapping file. the entity can be left blank and the first instrument will be // used as the entity. this eases creation of mapping files because: // a) most of the time there is only one instrument in a data file so no need to populate the entity // in the mapping for all instrument variables // b) an instrument may have many, many variables so this cuts down on what has to be set up in // the mapping file for each variable // note that all other entities must supply the entity in the entity row, i.e. Patient, EnrollmentStatus, // Visit, etc. and all instruments beyond the first one // give this first instrument first shot at determining if the property applies to it, to support // this shorthand else if (!((CrmsImportDefinition)importDefinition).getPatientOnlyImport() && (!StringUtils.hasText(definitionEntityName) || definitionEntityName.equalsIgnoreCase(importSetup.getInstrument().getInstrType()))) { //TODO: handle case where entity is an instrType to support multiple instrument data imports //mappingPropName.startsWith(..each of the importDefinition instrType (10 of them)). this could also match //the default instrument if mapping file puts in entity name (instrType) for it. //if startsWith matches, given that instrumentExistsHandler will have iterated across all importDefinition //instrTypes instantiating each, use the corresponding instantiated instrument (importSetup will have //properties instrument, instrument2, instrument3, etc. that correspond with importDefinition //instrType, instrTyp2, instrType3, etc. (and don't forget about instrVer) //could go with either having mappingEntities be instrType or instrTypeEncoded. if the latter, //instrExistsHandling will instantiate the Instrument from which instrTypeEncoded can be obtained //(or it can be obtained passing importDefinition instrType to the static getInstrTypeEncoded method //instrTypeEncoded would be a bit off for the users. instrType just have to be careful with spaces, etc. //that there is an exact match // missing entity means shorthand to use the first instrument specified in importDefinition //if (!StringUtils.hasText(definitionEntityName)) { // for all instruments, if the property name is left blank in the mapping file that means that the // column name is the same as the property name, so there is no need to redundantly specify the property // name as well. String propName = null; if (!StringUtils.hasText(definitionPropName)) { propName = definitionColName; } else { propName = definitionPropName; } // set property on the first instrument specified in importDefinition returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getInstrument(), propName, i, lineNum); } //Patient properties else if (definitionEntityName.equalsIgnoreCase("patient")) { // if decide to allow updates on existing Patients, then have the flags in data model to do: // (importSetup.isPatientCreated() || importDefinition.getAllowPatientUpdates()) // same for EnrollmentStatus, Visit //thinking is that should not have Patient, EnrollmentStatus, Visit allowUpdate flags because import //will be used for either //a) creating these if they do not exist and importing assessment data, or //b) importing assessment data, //i.e. import is not a mechanism for updating Patient, EnrollmentStatus and Visit data if (importSetup.isPatientCreated()) { // don't need to set properties already set when Patient was created if (!definitionPropName.equalsIgnoreCase("firstName") && !definitionPropName.equalsIgnoreCase("lastName") && !definitionPropName.equalsIgnoreCase("birthDate") && !definitionPropName.equalsIgnoreCase("gender")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getPatient(), definitionPropName, i, lineNum); } } } //ContactInfo properties else if (definitionEntityName.equalsIgnoreCase("contactInfo")) { if (importSetup.isContactInfoCreated()) { // don't need to set properties already set when ContactInfo was created if (!definitionPropName.equalsIgnoreCase("address") && !definitionPropName.equalsIgnoreCase("city") && !definitionPropName.equalsIgnoreCase("state") && !definitionPropName.equalsIgnoreCase("zip") && !definitionPropName.equalsIgnoreCase("phone1") && !definitionPropName.equalsIgnoreCase("email")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getContactInfo(), definitionPropName, i, lineNum); } } } //Caregiver properties else if (definitionEntityName.equalsIgnoreCase("caregiver")) { if (importSetup.isCaregiverCreated()) { // don't need to set properties already set when Caregiver was created if (!definitionPropName.equalsIgnoreCase("firstName") && !definitionPropName.equalsIgnoreCase("lastName")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getCaregiver(), definitionPropName, i, lineNum); } } } //Caregiver ContactInfo properties else if (definitionEntityName.equalsIgnoreCase("caregiverContactInfo")) { if (importSetup.isCaregiverContactInfoCreated()) { // don't need to set properties already set when Caregiver ContactInfo was created if (!definitionPropName.equalsIgnoreCase("address") && !definitionPropName.equalsIgnoreCase("city") && !definitionPropName.equalsIgnoreCase("state") && !definitionPropName.equalsIgnoreCase("zip") && !definitionPropName.equalsIgnoreCase("phone1") && !definitionPropName.equalsIgnoreCase("email")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getCaregiverContactInfo(), definitionPropName, i, lineNum); } } } //Caregiver2 properties else if (definitionEntityName.equalsIgnoreCase("caregiver2")) { if (importSetup.isCaregiver2Created()) { // don't need to set properties already set when Caregiver2 was created if (!definitionPropName.equalsIgnoreCase("firstName") && !definitionPropName.equalsIgnoreCase("lastName")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getCaregiver2(), definitionPropName, i, lineNum); } } } //Caregiver2 ContactInfo properties else if (definitionEntityName.equalsIgnoreCase("caregiver2ContactInfo")) { if (importSetup.isCaregiver2ContactInfoCreated()) { // don't need to set properties already set when Caregiver2 ContactInfo was created if (!definitionPropName.equalsIgnoreCase("address") && !definitionPropName.equalsIgnoreCase("city") && !definitionPropName.equalsIgnoreCase("state") && !definitionPropName.equalsIgnoreCase("zip") && !definitionPropName.equalsIgnoreCase("phone1") && !definitionPropName.equalsIgnoreCase("email")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getCaregiver2ContactInfo(), definitionPropName, i, lineNum); } } } //EnrollmentStatus properties else if (definitionEntityName.equalsIgnoreCase("enrollmentStatus")) { if (importSetup.isEnrollmentStatusCreated()) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getEnrollmentStatus(), definitionPropName, i, lineNum); } } //Visit properties else if (!((CrmsImportDefinition)importDefinition).getPatientOnlyImport() && definitionEntityName.equalsIgnoreCase("visit")) { if (importSetup.isVisitCreated()) { // don't need to set properties already set when Visit was created if (!definitionPropName.equalsIgnoreCase("visitDate") && !definitionPropName.equalsIgnoreCase("visitType") && !definitionPropName.equalsIgnoreCase("visitLocation") && !definitionPropName.equalsIgnoreCase("visitWith") && !definitionPropName.equalsIgnoreCase("visitStatus")) { returnEvent = this.setProperty(importDefinition, importSetup, importLog, importSetup.getVisit(), definitionPropName, i, lineNum); } } } else { // allow subclasses to set entity properties for any custom behavior returnEvent = setOtherPropertyHandling(importDefinition, importSetup, importLog, i, lineNum); } // abort import of the current record if there was an error setting the imported value on the property if (returnEvent.getId().equals(ERROR_FLOW_EVENT_ID)) { return new Event(this, ERROR_FLOW_EVENT_ID); // to abort processing this import record } } return returnEvent; } /** * setInstrumentCaregiver * * If the instrument has a caregiver (informant) property, set a caregiver on the property if the * data file contains a caregiver (i.e. first and last name). The first and last name of the caregiver * would either match an existing caregiver or populate a new caregiver. * * @param context * @param errors * @param importDefinition * @param importSetup * @param importLog * @param lineNum * @return success Event to continue processing this record, error Event to abort processing this record */ protected Event setInstrumentCaregiver(RequestContext context, BindingResult errors, CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) throws Exception { Event returnEvent = new Event(this, SUCCESS_FLOW_EVENT_ID); //NOW THAT HAVE CHANGED to Caregiver association, just need an importdefintion flag for Caregiver instrument, and // if set then try to set the caregiver here // special handling for instruments that have a Caregiver // add "instrumentCaregiverId" as the last column of the mapping file even though there is no caregiver ID column in the data file // This pseudo column should be mapped to the instrument property that stores caregiver ID, as defined in the mapping file if (importDefinition.getInstrCaregiver().equals(1)) { // if (importSetup.getIndexInstrCaregiverId() != -1) { if (importSetup.isCaregiverCreated() || importSetup.isCaregiverExisted()) { //TODO:when support multiple instruments in a single import, each caregiver instrument could have an "instrumentCaregiverId" column mapping where the //entity would map it for a specific instrument, so would then need to check mappingEntities for which instrument to set (can assume that the Caregiver //is the same for all instruments on the same row of data). //PROBLEM is then need a separate indexInstrCaregiverId for each instrument, so that needs to be figure out in conjunction with how handling and setting //properties on multiple instruments will be done in general (e.g. also need separate instrDcDate, instrDcStatus properties for each instrument) // BeanUtils.setProperty(importSetup.getInstrument(), importSetup.getMappingProps()[((CrmsImportSetup)importSetup).getIndexInstrCaregiverId()], ((CrmsImportSetup)importSetup).getCaregiver().getId()); importSetup.getInstrument().setCaregiver(importSetup.getCaregiver()); } else { importLog.addWarningMessage(lineNum, "No Caregiver found or created to assign to instrument:" + importDefinition.getInstrType() + " for patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName())); } } return returnEvent; } /** * Subclasses override this if setting a property involves any custom behavior. * * @param importDefinition * @param importSetup * @param entity * @param propName * @param i * @throws Exception */ protected Event setProperty(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, LavaEntity entity, String propName, int i, int lineNum) throws Exception { // default BeanUtils converter will set empty values to a default which is not null, so change the // behavior so property is set to null // note: could just skip null values since property value is already null on new instrument, but if // this code were to be used for an import update then would need to set null values //TODO: consult the metadata for each property // 1) if the property is a string/text value then check the length of the data vs. the max string length. // add a flag to import definition about how user wants this handled: either do not import record and // create error, or truncate the string to the max length and import it and create warning // 2) validate data value by obtaining metadata for the entity.property, i.e. list of valid values if (!StringUtils.hasText(importSetup.getDataValues()[i])) { // temporarily change the conversion handling so if no value to convert, just sets property null instead of // throwing an exception // false -use a default value instead of throwing a conversion exception (for any conversions) // true - use null for the default value // -1 - array types defaulted to null this.getConvertUtilsBean().register(false, true, -1); } try { // use Apache Commons BeanUtils rather than PropertyUtils as BeanUtils will convert the data value // from String to its correct type logger.info("setting propName="+propName+" to value="+importSetup.getDataValues()[i]); BeanUtils.setProperty(entity, propName, importSetup.getDataValues()[i]); } catch (InvocationTargetException ex) { String visitDateAsString = ((CrmsImportDefinition)importDefinition).getPatientOnlyImport() ? "" : "Visit Date:" + importSetup.getVisit().getVisitDate(); importLog.addErrorMessage(lineNum, "[InvocationTargetException] Error setting property: Property:" + propName + " Value:" + importSetup.getDataValues()[i] + " Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + visitDateAsString); return new Event(this, ERROR_FLOW_EVENT_ID); } catch (IllegalAccessException ex) { String visitDateAsString = ((CrmsImportDefinition)importDefinition).getPatientOnlyImport() ? "" : "Visit Date:" + importSetup.getVisit().getVisitDate(); importLog.addErrorMessage(lineNum, "[IllegalAccessException] Error setting property: Property:" + propName + " Value:" + importSetup.getDataValues()[i] + " Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + visitDateAsString); return new Event(this, ERROR_FLOW_EVENT_ID); } catch (Exception ex) { String visitDateAsString = ((CrmsImportDefinition)importDefinition).getPatientOnlyImport() ? "" : "Visit Date:" + importSetup.getVisit().getVisitDate(); importLog.addErrorMessage(lineNum, "[Exception] Error setting property: Property:" + propName + " Value:" + importSetup.getDataValues()[i] + " Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + visitDateAsString); return new Event(this, ERROR_FLOW_EVENT_ID); } if (!StringUtils.hasText(importSetup.getDataValues()[i])) { // resume throwing exceptions (second and third arguments ignored in this case) this.setupBeanUtilConverters(importSetup); } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * Subclasses override this to set a value on a property of an entity other than Patient, * Visit, EnrollmentStatus or the instrument. * * @param importDefinition * @param importSetup * @param i * @throws Exception */ protected Event setOtherPropertyHandling(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int i, int lineNum) throws Exception { // if property was not set in setPropertyHandling then it is likely that there is a mapping problem, or it is // a custom property that a subclass should handle in an overridden setOtherPropertyHandling importLog.addErrorMessage(lineNum, "Property not set: Mapping column:" + importSetup.getMappingCols()[i] + " Mapping entity:" + importSetup.getMappingEntities()[i] + " Mapping property:" + importSetup.getMappingProps()[i]); return new Event(this, ERROR_FLOW_EVENT_ID); } /** * saveImportRecord * * Persist the import record to the applicable entities. * * Subclasses should override if they involve additional entities, such as ContactInfo or * Caregiver, or for other custom handling. Make sure they call this superclass method. */ protected Event saveImportRecord(CrmsImportDefinition importDefinition, CrmsImportSetup importSetup, CrmsImportLog importLog, int lineNum) { // for new entities must explicitly save since not associated with a Hibernate session. for // updates, entity was retrieved and thus attached to a session and Hibernate dirty checking should // implicitly update the entity try { if (importSetup.isPatientCreated()) { importSetup.getPatient().save(); importLog.addCreatedMessage(lineNum, "PATIENT CREATED:" + importSetup.getPatient().getFullName()); } if (importSetup.isContactInfoCreated()) { importSetup.getContactInfo().save(); importLog.addCreatedMessage(lineNum, "CONTACT INFO CREATED, Patient:" + importSetup.getPatient().getFullName()); } if (importSetup.isCaregiverCreated()) { importSetup.getCaregiver().save(); importLog.addCreatedMessage(lineNum, "CAREGIVER CREATED, Patient:" + importSetup.getPatient().getFullName() + " Caregiver:" + importSetup.getCaregiver().getFullName()); } if (importSetup.isCaregiverContactInfoCreated()) { importSetup.getCaregiverContactInfo().save(); importLog.addCreatedMessage(lineNum, "CAREGIVER CONTACT INFO CREATED, Patient:" + importSetup.getPatient().getFullName() + " Caregiver:" + importSetup.getCaregiver().getFullName()); } if (importSetup.isCaregiver2Created()) { importSetup.getCaregiver2().save(); importLog.addCreatedMessage(lineNum, "CAREGIVER CREATED, Patient:" + importSetup.getPatient().getFullName() + " Caregiver:" + importSetup.getCaregiver2().getFullName()); } if (importSetup.isCaregiver2ContactInfoCreated()) { importSetup.getCaregiver2ContactInfo().save(); importLog.addCreatedMessage(lineNum, "CAREGIVER CONTACT INFO CREATED, Patient:" + importSetup.getPatient().getFullName() + " Caregiver:" + importSetup.getCaregiver2().getFullName()); } if (importSetup.isEnrollmentStatusCreated()) { importSetup.getEnrollmentStatus().save(); importLog.addCreatedMessage(lineNum, "ENROLLMENTSTATUS CREATED, Patient:" + importSetup.getPatient().getFullName() + " Project:" + importSetup.getRevisedProjName()); } if (importSetup.isVisitCreated()) { importSetup.getVisit().save(); importLog.addCreatedMessage(lineNum, "VISIT CREATED, Patient:" + importSetup.getPatient().getFullName() + " VisitDate:" + importSetup.getVisit().getVisitDate() + " VisitType:" + importSetup.getVisit().getVisitType()); } // allowInstrUpdate is used to determine whether an already existing instrument which has already // been data entered can be updated if (importSetup.isInstrCreated() || importDefinition.getAllowInstrUpdate()) { importSetup.getInstrument().save(); importLog.addCreatedMessage(lineNum, "INSTRUMENT CREATED, Patient:" + importSetup.getPatient().getFullName() + " Instrument:" + importDefinition.getInstrType()); } } catch (Exception e) { int i = 0; // e = InvalidDataAccessResourceException // e.cause = DataException // e.cause.cause = MysqlDataTruncation // could potentially parse violating property out of cause.message e.g. "Data too long for column 'sp56_list' at row 1" // if data truncation exception iterate thru all properties, querying metadata and if property // style is: "suggest", "string" or "text", get the max length from the metadata and iterate thru // the properties comparing value against max length: // if user set flag to truncate and warn: truncate data that exceeds max length and create warning and try again // if user set flag to error out on the current record, stop processing this patient record and create error importLog.addErrorMessage(lineNum, "Exception on save. Could be incomplete import of this record." + " Patient:" + (importSetup.isPatientExisted() ? importSetup.getPatient().getFullNameWithId() : importSetup.getPatient().getFullName()) + "<br>Message:" + e.getMessage() + "<br>RootCause:" + ExceptionUtils.getRootCauseMessage(e)); return new Event(this, ERROR_FLOW_EVENT_ID); } return new Event(this, SUCCESS_FLOW_EVENT_ID); } /** * Called at the end of processing an import record (that was not aborted due to an error). * */ protected void updateEntityCounts(CrmsImportSetup importSetup, CrmsImportLog importLog) { if (importSetup.isPatientCreated()) { importLog.incNewPatients(); } if (importSetup.isPatientExisted()) { importLog.incExistingPatients(); } if (importSetup.isContactInfoCreated()) { importLog.incNewContactInfo(); } if (importSetup.isContactInfoExisted()) { importLog.incExistingContactInfo(); } if (importSetup.isCaregiverCreated()) { importLog.incNewCaregivers(); } if (importSetup.isCaregiverExisted()) { importLog.incExistingCaregivers(); } // note: do not keep a count of "existing" for Caregiver ContactInfo since don't check for that, i.e. Caregiver // ContactInfo is tightly bound to Caregiver so if Caregiver exists there is no check to see if ContactInfo exists. // Caregiver ContactInfo is only created when Caregiver created and there is ContactInfo data in the data file if (importSetup.isCaregiverContactInfoCreated()) { importLog.incNewCaregiverContactInfo(); } // note that caregiver2 and caregiver both count together for NewCaregivers, ExistingCaregivers and NewCaregiverContactInfo if (importSetup.isCaregiver2Created()) { importLog.incNewCaregivers(); } if (importSetup.isCaregiver2Existed()) { importLog.incExistingCaregivers(); } // see comments for caregiverContactInfo above if (importSetup.isCaregiver2ContactInfoCreated()) { importLog.incNewCaregiverContactInfo(); } if (importSetup.isEnrollmentStatusCreated()) { importLog.incNewEnrollmentStatuses(); } if (importSetup.isEnrollmentStatusExisted()) { importLog.incExistingEnrollmentStatuses(); } if (importSetup.isVisitCreated()) { importLog.incNewVisits(); } if (importSetup.isVisitExisted()) { importLog.incExistingVisits(); } if (importSetup.isInstrCreated()) { importLog.incNewInstruments(); } // both instrument existed flags will be set but "WithData" takes precedence if (importSetup.isInstrExistedWithData()) { importLog.incExistingInstrumentsWithData(); } else if (importSetup.isInstrExisted()) { importLog.incExistingInstruments(); } } @Override public Map addReferenceData(RequestContext context, Object command, BindingResult errors, Map model) { HttpServletRequest request = ((ServletExternalContext)context.getExternalContext()).getRequest(); // load up dynamic lists StateDefinition state = context.getCurrentState(); model = super.addReferenceData(context, command, errors, model); Map<String,Map<String,String>> dynamicLists = getDynamicLists(model); CrmsImportSetup crmsImportSetup = (CrmsImportSetup) ((ComponentCommand)command).getComponents().get(this.getDefaultObjectName()); if (state.getId().equals("edit")) { // note that this list is filtered via projectAuth filter. CrmsAuthUser getAuthDaoFilters determines the projects to Map<String,String> projList = listManager.getDynamicList(CrmsSessionUtils.getCrmsCurrentUser(sessionManager,request), "context.projectList"); projList = CrmsAuthUtils.filterProjectListByPermission(CrmsSessionUtils.getCrmsCurrentUser(sessionManager,request), CoreSessionUtils.getCurrentAction(sessionManager,request), projList); dynamicLists.put("context.projectList", projList); } model.put("dynamicLists", dynamicLists); return model; } }
package mingzuozhibi.persist.disc; import mingzuozhibi.persist.BaseModel; import org.json.JSONObject; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Transient; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.Objects; import java.util.Optional; import java.util.Set; @Entity public class Disc extends BaseModel implements Comparable<Disc> { public enum DiscType { Cd, Dvd, Bluray, Box, Other } public enum UpdateType { Sakura, Amazon, Both, None } private String asin; private String title; private String titlePc; private String titleMo; private Integer thisRank; private Integer prevRank; private Integer nicoBook; private Integer todayPt; private Integer totalPt; private Integer guessPt; private DiscType discType; private boolean amazonLimit; private UpdateType updateType; private LocalDate releaseDate; private LocalDateTime createTime; private LocalDateTime updateTime; private LocalDateTime modifyTime; public Disc() { } public Disc(String asin, String title, DiscType discType, UpdateType updateType, boolean amazonLimit, LocalDate releaseDate) { this.asin = asin; this.title = title; this.discType = discType; this.updateType = updateType; this.amazonLimit = amazonLimit; this.releaseDate = releaseDate; this.createTime = LocalDateTime.now().withNano(0); } @Column(length = 20, nullable = false, unique = true) public String getAsin() { return asin; } public void setAsin(String asin) { this.asin = asin; } @Column(length = 500, nullable = false) public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } @Column(length = 500) public String getTitlePc() { return titlePc; } public void setTitlePc(String titlePc) { this.titlePc = titlePc; } @Column(length = 100) public String getTitleMo() { return titleMo; } public void setTitleMo(String titleMo) { this.titleMo = titleMo; } @Column public Integer getThisRank() { return thisRank; } public void setThisRank(Integer thisRank) { this.thisRank = thisRank; } @Column public Integer getPrevRank() { return prevRank; } public void setPrevRank(Integer prevRank) { this.prevRank = prevRank; } @Column public Integer getNicoBook() { return nicoBook; } public void setNicoBook(Integer nicoBook) { this.nicoBook = nicoBook; } @Column public Integer getTodayPt() { return todayPt; } public void setTodayPt(Integer todayPt) { this.todayPt = todayPt; } @Column public Integer getTotalPt() { return totalPt; } public void setTotalPt(Integer totalPt) { this.totalPt = totalPt; } @Column public Integer getGuessPt() { return guessPt; } public void setGuessPt(Integer guessPt) { this.guessPt = guessPt; } @Column(nullable = false) public DiscType getDiscType() { return discType; } public void setDiscType(DiscType discType) { this.discType = discType; } @Column(nullable = false) public boolean isAmazonLimit() { return amazonLimit; } public void setAmazonLimit(boolean amazonLimit) { this.amazonLimit = amazonLimit; } @Column(nullable = false) public UpdateType getUpdateType() { return updateType; } public void setUpdateType(UpdateType updateType) { this.updateType = updateType; } @Column(nullable = false) public LocalDate getReleaseDate() { return releaseDate; } public void setReleaseDate(LocalDate releaseDate) { this.releaseDate = releaseDate; } @Column(nullable = false) public LocalDateTime getCreateTime() { return createTime; } public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; } @Column public LocalDateTime getUpdateTime() { return updateTime; } public void setUpdateTime(LocalDateTime updateTime) { this.updateTime = updateTime; } @Column public LocalDateTime getModifyTime() { return modifyTime; } public void setModifyTime(LocalDateTime modifyTime) { this.modifyTime = modifyTime; } @Transient public long getSurplusDays() { return getReleaseDate().toEpochDay() - LocalDate.now().toEpochDay(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Disc disc = (Disc) o; return Objects.equals(asin, disc.asin); } @Override public int hashCode() { return Objects.hash(asin); } @Override public int compareTo(Disc disc) { Objects.requireNonNull(disc); Integer rank1 = this.getThisRank(); Integer rank2 = disc.getThisRank(); boolean empty1 = rank1 == null; boolean empty2 = rank2 == null; if (!empty1 && !empty2) { return rank1.compareTo(rank2); } else { return empty1 && empty2 ? 0 : empty1 ? 1 : -1; } } public JSONObject toJSON() { JSONObject object = new JSONObject(); object.put("id", getId()); object.put("asin", getAsin()); object.put("title", getTitle()); object.put("titlePc", getTitlePc()); object.put("titleMo", getTitleMo()); object.put("thisRank", getThisRank()); object.put("prevRank", getPrevRank()); object.put("nicoBook", getNicoBook()); object.put("todayPt", getTodayPt()); object.put("totalPt", getTotalPt()); object.put("guessPt", getGuessPt()); object.put("amazonLimit", isAmazonLimit()); object.put("discType", getDiscType().name()); object.put("updateType", getUpdateType().name()); object.put("releaseDate", getReleaseDate().toString()); object.put("createTime", toEpochMilli(getCreateTime())); Optional.ofNullable(getUpdateTime()).ifPresent(updateTime -> { object.put("updateTime", toEpochMilli(updateTime)); }); Optional.ofNullable(getModifyTime()).ifPresent(modifyTime -> { object.put("modifyTime", toEpochMilli(modifyTime)); }); object.put("surplusDays", getSurplusDays()); return object; } public JSONObject toJSON(Set<String> columns) { JSONObject object = toJSON(); object.keys().forEachRemaining(key -> { if (!columns.contains(key)) { object.remove(key); } }); return object; } }
package net.emaze.dysfunctional; import java.util.Comparator; import java.util.Iterator; import net.emaze.dysfunctional.contracts.dbc; import net.emaze.dysfunctional.iterations.ArrayIterator; import net.emaze.dysfunctional.options.Maybe; import net.emaze.dysfunctional.order.JustBeforeNothingComparator; import net.emaze.dysfunctional.order.SequencingPolicy; import net.emaze.dysfunctional.ranges.DenseRange; import net.emaze.dysfunctional.ranges.Difference; import net.emaze.dysfunctional.ranges.Intersection; import net.emaze.dysfunctional.ranges.Range; import net.emaze.dysfunctional.ranges.Range.Endpoint; import net.emaze.dysfunctional.ranges.SymmetricDifference; import net.emaze.dysfunctional.ranges.Union; import net.emaze.dysfunctional.reductions.Reductor; /** * * @author rferranti */ public class Ranges<T> { private final Comparator<Maybe<T>> comparator; private final SequencingPolicy<T> sequencer; private final T emptyValue; public Ranges(Comparator<T> comparator, SequencingPolicy<T> sequencer, T emptyValue) { dbc.precondition(comparator != null, "cannot create Ranges<T> with a null Comparator<T>"); dbc.precondition(sequencer != null, "cannot create Ranges<T> with a null SequencingPolicy<T>"); this.comparator = new JustBeforeNothingComparator<T>(comparator); this.sequencer = sequencer; this.emptyValue = emptyValue; } public Range<T> of(Endpoint left, T lower, T upper, Endpoint right) { return new DenseRange<T>(sequencer, comparator, left, lower, Maybe.just(upper), right); } /** * returns ( lower, upper ) * * @param lower * @param upper * @return (lower, upper) */ public Range<T> open(T lower, T upper) { return new DenseRange<T>(sequencer, comparator, Endpoint.Exclude, lower, Maybe.just(upper), Endpoint.Exclude); } /** * returns [ lower, upper ] * * @param lower * @param upper * @return [ lower, upper ] */ public Range<T> closed(T lower, T upper) { return new DenseRange<T>(sequencer, comparator, Endpoint.Include, lower, Maybe.just(upper), Endpoint.Include); } /** * Creates a singleton Range with the passed value. * returns [ value, value ] * * @param value * @return [ lower, upper ] */ public Range<T> degenerate(T value) { return new DenseRange<T>(sequencer, comparator, Endpoint.Include, value, Maybe.just(value), Endpoint.Include); } /** * returns [ emptyValue, emptyValue ) * * @param emptyValue * @param emptyValue * @return [ emptyValue, emptyValue ) */ public Range<T> empty() { return new DenseRange<T>(this.sequencer, this.comparator, Endpoint.Include, emptyValue, Maybe.just(emptyValue), Endpoint.Exclude); } public Range<T> union(Range<T> lhs, Range<T> rhs) { final Union<T> union = new Union<T>(sequencer, comparator, emptyValue); return union.perform(lhs, rhs); } public Range<T> union(Range<T> first, Range<T> second, Range<T> third) { final Union<T> union = new Union<T>(sequencer, comparator, emptyValue); return union.perform(union.perform(first, second), third); } public Range<T> union(Iterator<Range<T>> ranges) { dbc.precondition(ranges != null, "cannot evaluate union for a null iterator of ranges"); dbc.precondition(ranges.hasNext(), "cannot evaluate union for an empty iterator of ranges"); final Union<T> union = new Union<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(union, ranges.next()).perform(ranges); } public Range<T> union(Iterable<Range<T>> ranges) { dbc.precondition(ranges != null, "cannot evaluate union for a null iterable of ranges"); dbc.precondition(ranges.iterator().hasNext(), "cannot evaluate union for an empty iterable of ranges"); final Iterator<Range<T>> iterator = ranges.iterator(); final Union<T> union = new Union<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(union, iterator.next()).perform(iterator); } public Range<T> union(Range<T>... ranges) { dbc.precondition(ranges != null, "cannot evaluate union for a null array of ranges"); dbc.precondition(ranges.length != 0, "cannot evaluate union for an empty array of ranges"); final Iterator<Range<T>> iterator = new ArrayIterator<Range<T>>(ranges); final Union<T> union = new Union<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(union, iterator.next()).perform(iterator); } public Range<T> intersect(Range<T> lhs, Range<T> rhs) { final Intersection<T> intersection = new Intersection<T>(sequencer, comparator, emptyValue); return intersection.perform(lhs, rhs); } public Range<T> intersect(Range<T> first, Range<T> second, Range<T> third) { final Intersection<T> intersection = new Intersection<T>(sequencer, comparator, emptyValue); return intersection.perform(intersection.perform(first, second), third); } public Range<T> intersect(Iterator<Range<T>> ranges) { dbc.precondition(ranges != null, "cannot intersection a null iterator of ranges"); dbc.precondition(ranges.hasNext(), "cannot intersection an empty iterator of ranges"); final Intersection<T> intersection = new Intersection<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(intersection, ranges.next()).perform(ranges); } public Range<T> intersect(Iterable<Range<T>> ranges) { dbc.precondition(ranges != null, "cannot intersect a null iterable of ranges"); dbc.precondition(ranges.iterator().hasNext(), "cannot intersect an empty iterable of ranges"); final Intersection<T> intersection = new Intersection<T>(sequencer, comparator, emptyValue); final Iterator<Range<T>> iterator = ranges.iterator(); return new Reductor<Range<T>, Range<T>>(intersection, iterator.next()).perform(iterator); } public Range<T> intersect(Range<T>... ranges) { dbc.precondition(ranges != null, "cannot intersect a null array of ranges"); dbc.precondition(ranges.length != 0, "cannot intersect an empty array of ranges"); final Intersection<T> intersection = new Intersection<T>(sequencer, comparator, emptyValue); final Iterator<Range<T>> iterator = new ArrayIterator<Range<T>>(ranges); return new Reductor<Range<T>, Range<T>>(intersection, iterator.next()).perform(iterator); } public Range<T> symmetricDifference(Range<T> lhs, Range<T> rhs) { final SymmetricDifference<T> symmetricDifference = new SymmetricDifference<T>(sequencer, comparator, emptyValue); return symmetricDifference.perform(lhs, rhs); } public Range<T> symmetricDifference(Range<T> first, Range<T> second, Range<T> third) { final SymmetricDifference<T> symmetricDifference = new SymmetricDifference<T>(sequencer, comparator, emptyValue); return symmetricDifference.perform(symmetricDifference.perform(first, second), third); } public Range<T> symmetricDifference(Iterator<Range<T>> ranges) { dbc.precondition(ranges != null, "cannot evaluate symmetric difference for a null iterator of ranges"); dbc.precondition(ranges.hasNext(), "cannot evaluate symmetric difference for an empty iterator of ranges"); final SymmetricDifference<T> symmetricDifference = new SymmetricDifference<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(symmetricDifference, ranges.next()).perform(ranges); } public Range<T> symmetricDifference(Iterable<Range<T>> ranges) { dbc.precondition(ranges != null, "cannot evaluate symmetric difference for a null iterable of ranges"); dbc.precondition(ranges.iterator().hasNext(), "cannot evaluate symmetric difference for an empty iterable of ranges"); final Iterator<Range<T>> iterator = ranges.iterator(); final SymmetricDifference<T> symmetricDifference = new SymmetricDifference<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(symmetricDifference, iterator.next()).perform(iterator); } public Range<T> symmetricDifference(Range<T>... ranges) { dbc.precondition(ranges != null, "cannot evaluate symmetric difference for a null array of ranges"); dbc.precondition(ranges.length != 0, "cannot evaluate symmetric difference for an empty array of ranges"); final Iterator<Range<T>> iterator = new ArrayIterator<Range<T>>(ranges); final SymmetricDifference<T> symmetricDifference = new SymmetricDifference<T>(sequencer, comparator, emptyValue); return new Reductor<Range<T>, Range<T>>(symmetricDifference, iterator.next()).perform(iterator); } public Range<T> difference(Range<T> lhs, Range<T> rhs) { final Difference<T> difference = new Difference<T>(sequencer, comparator, emptyValue); return difference.perform(lhs, rhs); } }
package net.imagej.patcher; import java.awt.event.KeyEvent; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.Attributes.Name; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.regex.Pattern; /** * Extension points for ImageJ 1.x. * <p> * These extension points will be patched into ImageJ 1.x by the * {@link CodeHacker}. To override the behavior of ImageJ 1.x, a new instance of * this class needs to be installed into <code>ij.IJ._hooks</code>. * </p> * <p> * The essential functionality of the hooks is provided in the * {@link EssentialLegacyHooks} class, which makes an excellent base class for * project-specific implementations. * </p> * * @author Johannes Schindelin */ public abstract class LegacyHooks { /** * Determines whether the image windows should be displayed or not. * * @return false if ImageJ 1.x should be prevented from opening image windows. */ public boolean isLegacyMode() { return true; } /** * Returns the current context, if any. * <p> * For ImageJ2-specific hooks, the returned object will be the current SciJava * context, or null if the context is not yet initialized. * </p> * * @return the context, or null */ public Object getContext() { return null; } /** * Allows interception of ImageJ 1.x's {@link ij.ImageJ#quit()} method. * * @return whether ImageJ 1.x should proceed with its usual quitting routine */ public boolean quit() { return true; } /** * Runs when the hooks are installed into an existing legacy environment. */ public void installed() { // ignore } /** * Disposes of the hooks. * <p> * This method is called when ImageJ 1.x is quitting or when new hooks are * installed. * </p> */ public void dispose() { // ignore } /** * Intercepts the call to {@link ij.IJ#runPlugIn(String, String)}. * * @param className the class name * @param arg the argument passed to the {@code runPlugIn} method * @return the object to return, or null to let ImageJ 1.x handle the call */ public Object interceptRunPlugIn(final String className, final String arg) { return null; } /** * Updates the progress bar, where 0 <= progress <= 1.0. * * @param progress between 0.0 and 1.0 */ public void showProgress(final double progress) {} /** * Updates the progress bar, where the length of the bar is set to ( * <code>currentValue + 1) / finalValue</code> of the maximum bar length. The * bar is erased if <code>currentValue &gt;= finalValue</code>. * * @param currentIndex the step that was just started * @param finalIndex the final step. */ public void showProgress(final int currentIndex, final int finalIndex) {} /** * Shows a status message. * * @param status the message */ public void showStatus(final String status) {} /** * Logs a message. * * @param message the message */ public void log(final String message) {} /** * Registers an image (possibly not seen before). * * @param image the new image */ public void registerImage(final Object image) {} /** * Releases an image. * * @param image the image */ public void unregisterImage(final Object image) {} /** * Logs a debug message (to be shown only in debug mode). * * @param string the debug message */ public void debug(final String string) { System.err.println(string); } /** * Shows an exception. * * @param t the exception */ public void error(final Throwable t) { // ignore } /** * Returns the name to use in place of "ImageJ". * * @return the application name */ public String getAppName() { return "ImageJ"; } /** * Returns the version to use in place of the legacy version. * * @return the application version, or null if we do not override */ public String getAppVersion() { return null; } /** * Returns the icon to use in place of the ImageJ microscope. * * @return the URL to the icon to use, or null */ public URL getIconURL() { return null; } /** * Extension point to override ImageJ 1.x' editor. * * @param path the path to the file to open * @return true if the hook opened a different editor */ public boolean openInEditor(final String path) { return false; } /** * Extension point to override ImageJ 1.x' editor. * * @param fileName the name of the new file * @param content the initial content * @return true if the hook opened a different editor */ public boolean createInEditor(final String fileName, final String content) { return false; } private boolean enableIJ1PluginDirs = true; protected void enableIJ1PluginDirs(final boolean enable) { enableIJ1PluginDirs = enable; } final private Collection<File> pluginClasspath = new LinkedHashSet<File>(); protected void addPluginClasspath(final File file) { pluginClasspath.add(file); } /** * Extension point to add to ImageJ 1.x' PluginClassLoader's class path. * * @return a list of class path elements to add */ public List<File> handleExtraPluginJars() { final List<File> result = new ArrayList<File>(); result.addAll(pluginClasspath); if (!enableIJ1PluginDirs) return result; final String extraPluginDirs = System.getProperty("ij1.plugin.dirs"); if (extraPluginDirs != null) { for (final String dir : extraPluginDirs.split(File.pathSeparator)) { final File directory = new File(dir); if (directory.isDirectory()) { result.add(directory); handleExtraPluginJars(directory, result); } } return result; } final String userHome = System.getProperty("user.home"); if (userHome != null) { final File dir = new File(userHome, ".plugins"); if (dir.isDirectory()) { result.add(dir); handleExtraPluginJars(dir, result); } } return result; } private void handleExtraPluginJars(final File directory, final List<File> result) { final File[] list = directory.listFiles(); if (list == null) return; for (final File file : list) { if (file.isDirectory()) handleExtraPluginJars(file, result); else if (file.isFile() && file.getName().endsWith(".jar")) { result.add(file); } } } /** * Extension point to run after <i>Help&gt;Refresh Menus</i> */ public void runAfterRefreshMenus() { // ignore } /** * Extension point to enhance ImageJ 1.x' error reporting upon * {@link NoSuchMethodError}. * * @param e the exception to handle * @return true if the error was handled by the legacy hook */ public boolean handleNoSuchMethodError(final NoSuchMethodError error) { String message = error.getMessage(); int paren = message.indexOf("("); if (paren < 0) return false; int dot = message.lastIndexOf(".", paren); if (dot < 0) return false; String path = message.substring(0, dot).replace('.', '/') + ".class"; Set<String> urls = new LinkedHashSet<String>(); final ClassLoader loader = Thread.currentThread().getContextClassLoader(); try { Enumeration<URL> e = loader.getResources(path); while (e.hasMoreElements()) { urls.add(e.nextElement().toString()); } e = loader.getResources("/" + path); while (e.hasMoreElements()) { urls.add(e.nextElement().toString()); } } catch (Throwable t) { t.printStackTrace(); return false; } if (urls.size() == 0) return false; StringBuilder buffer = new StringBuilder(); buffer.append("There was a problem with the class "); buffer.append(message.substring(0, dot)); buffer.append(" which can be found here:\n"); for (String url : urls) { if (url.startsWith("jar:")) url = url.substring(4); if (url.startsWith("file:")) url = url.substring(5); int bang = url.indexOf("!"); if (bang < 0) buffer.append(url); else buffer.append(url.substring(0, bang)); buffer.append("\n"); } if (urls.size() > 1) { buffer.append("\nWARNING: multiple locations found!\n"); } StringWriter writer = new StringWriter(); error.printStackTrace(new PrintWriter(writer)); buffer.append(writer.toString()); System.out.println(buffer.toString()); final NoSuchMethodException throwable = new NoSuchMethodException("Could not find method " + message + "\n" + buffer); throwable.setStackTrace(error.getStackTrace()); error(throwable); return true; } /** * Extension point to run after a new PluginClassLoader was initialized. * * @param loader the PluginClassLoader instance */ public void newPluginClassLoader(final ClassLoader loader) { // do nothing } /** * Extension point to modify the order in which .jar files are added to the * PluginClassLoader. * <p> * There is a problem which only strikes large distributions of ImageJ such as * Fiji: some .jar files try to be helpful and bundle classes which are * actually not theirs, causing problems when newer versions of those .jar * files which they shadow are present in the <i>plugins/</i> or <i>jars/</i> * directory but are not respected by the class loader. * </p> * <p> * The default hook of this extension point therefore hard-codes a few file * names of known offenders (which we politely will call fat .jar files * normally) and just pushes them back to the end of the list. * </p> * * @param directory the directory which ImageJ 1.x looked at * @param names the list of file names in the order ImageJ 1.x discovered them * @return the ordered, filtered and/or augmented list */ public String[] addPluginDirectory(final File directory, final String[] names) { if (names != null) { /* Note that this code is replicated in imagej-launcher's ClassLoaderPlus class. Improvements to this Pattern string should also be mirrored there. */ final Pattern pattern = Pattern.compile("(batik|jython|jruby)(-[0-9].*)?\\.jar"); Arrays.sort(names, new FatJarNameComparator(pattern)); } return names; } /** * Comparator to ensure that problematic fat JARs are sorted <em>last</em>. * It is intended to be used with a {@link Pattern} that filters things this * way. */ public final static class FatJarNameComparator implements Comparator<String> { private final Pattern pattern; private FatJarNameComparator(Pattern pattern) { this.pattern = pattern; } @Override public int compare(final String a, final String b) { return (pattern.matcher(a).matches() ? 1 : 0) - (pattern.matcher(b).matches() ? 1 : 0); } } /** * First extension point to run just after ImageJ 1.x spun up. */ public void initialized() { // do nothing by default } public InputStream autoGenerateConfigFile(final File directory) { // skip unpacked ImageJ 1.x if (new File(directory, "IJ_Props.txt").exists()) return null; return new ByteArrayInputStream(autoGenerateConfigFile(directory, directory, "Plugins", "", new StringBuilder()).toString().getBytes()); } protected StringBuilder autoGenerateConfigFile(final File topLevelDirectory, final File directory, final String menuPath, final String packageName, final StringBuilder builder) { final File[] list = directory.listFiles(); if (list == null) return builder; // make order consistent Arrays.sort(list); for (final File file : list) { String name = file.getName(); if (name.startsWith("_")) continue; if (file.isDirectory()) { autoGenerateConfigFile(topLevelDirectory, file, menuPath + ">" + name.replace('_', ' '), packageName + name + ".", builder); } else if (name.endsWith(".class") && name.contains("_") && !name.contains("$")) { if (topLevelDirectory == directory && Character.isLowerCase(name.charAt(0))) continue; final String className = packageName + name.substring(0, name.length() - 6); name = name.substring(0, name.length() - 6).replace('_', ' '); builder.append(menuPath + ", \"" + name + "\", " + className + "\n"); } } return builder; } private Map<String, String> menuStructure = new LinkedHashMap<String, String>(); /** * Callback for ImageJ 1.x' menu parsing machinery. * <p> * This method is called whenever ImageJ 1.x adds a command to the menu structure. * </p> * * @param menuPath the menu path of the menu item, or null when reinitializing * @param command the command associated with the menu item, or null when reinitializing */ public void addMenuItem(final String menuPath, final String command) { if (menuPath == null) { menuStructure.clear(); } else if (menuPath.endsWith(">-")) { int i = 1; while (menuStructure.containsKey(menuPath + i)) { i++; } menuStructure.put(menuPath + i, command); } else { menuStructure.put(menuPath, command); } } /** * Returns ImageJ 1.x' menu structure as a map. * * @return the menu structure */ public Map<String, String> getMenuStructure() { return Collections.unmodifiableMap(menuStructure); } /** * Optionally override opening resources via legacy hooks. * <p> * This is intended as a "HandleExtraFileTypesPlus". * </p> * * @param path the path to the resource to open, or {@code null} if a dialog * needs to be shown * @param planeIndex * If applicable - the index of plane to open or -1 for all planes * @param display * if true, the opened object should be displayed before returning * @return The opened object, or {@code null} to let ImageJ 1.x open the path. * @deprecated this will be removed before ij1-patcher 1.0.0 */ @Deprecated public Object interceptOpen(final String path, final int planeIndex, final boolean display) { return null; } /** * Optionally override opening resources via legacy hooks. * <p> * This is intended as a "HandleExtraFileTypesPlus". * </p> * * @param path the path to the resource to open, or {@code null} if a dialog * needs to be shown * @return The opened object, or {@code null} to let ImageJ 1.x open the resource. */ public Object interceptFileOpen(final String path) { return null; } /** * Optionally override opening images via legacy hooks. * <p> * This is intended as a "HandleExtraFileTypesPlus". * </p> * * @param path the path to the image to open, or {@code null} if a dialog * needs to be shown * @param planeIndex * If applicable - the index of plane to open or -1 for all planes * @return The opened image, or {@code null} to let ImageJ 1.x open the image. */ public Object interceptOpenImage(final String path, final int planeIndex) { return null; } /** * Optionally override opening recent images via legacy hooks. * * @param path the path to the recent image to open * @return The opened object, or {@code null} to let ImageJ 1.x open the image. */ public Object interceptOpenRecent(final String path) { return null; } /** * Optionally override opening drag-and-dropped files via legacy hooks. * * @param f the file that was dragged onto the IJ UI * @return The opened object, or {@code null} to let ImageJ 1.x open the file * as normal. */ public Object interceptDragAndDropFile(final File f) { return null; } /** * Do not use: for internal use only. */ public static Collection<File> getClasspathElements( final ClassLoader fromClassLoader, final StringBuilder errors, final ClassLoader... excludeClassLoaders) { final Set<ClassLoader> exclude = new HashSet<ClassLoader>(Arrays.asList(excludeClassLoaders)); final List<File> result = new ArrayList<File>(); for (ClassLoader loader = fromClassLoader; loader != null; loader = loader.getParent()) { if (exclude.contains(loader)) break; if (!(loader instanceof URLClassLoader)) { errors.append("Cannot add class path from ClassLoader of type ") .append(fromClassLoader.getClass().getName()).append("\n"); continue; } for (final URL url : ((URLClassLoader) loader).getURLs()) { if (!"file".equals(url.getProtocol())) { errors.append("Not a file URL! ").append(url).append("\n"); continue; } result.add(new File(url.getPath())); final String path = url.getPath(); if (path.matches(".*/target/surefire/surefirebooter[0-9]*\\.jar")) try { final JarFile jar = new JarFile(path); final Manifest manifest = jar.getManifest(); if (manifest != null) { final String classPath = manifest.getMainAttributes().getValue(Name.CLASS_PATH); if (classPath != null) { for (final String element : classPath.split(" +")) try { final URL url2 = new URL(element); if (!"file".equals(url2.getProtocol())) { errors.append("Not a file URL! ").append(url2).append("\n"); continue; } result.add(new File(url2.getPath())); } catch (final MalformedURLException e) { e.printStackTrace(); } } } } catch (final IOException e) { System.err .println("Warning: could not add plugin class path due to "); e.printStackTrace(); } } } return result; } /** * Intercepts keyboard events sent to ImageJ 1.x. * * @param e the keyboard event * @return whether the event was intercepted */ public boolean interceptKeyPressed(final KeyEvent e) { return false; } /** * Iterates through the current thread's ancestors. * <p> * ImageJ 1.x' macro options are thread-local. Unfortunately, this does not * take into account thread relationships e.g. when threads are spawned in * parallel. * </p> * <p> * By overriding this method, legacy hooks can force ImageJ 1.x to look harder * for macro options. * </p> * * @return the ancestor(s) of the current thread, or null */ public Iterable<Thread> getThreadAncestors() { return null; } /** * Allows closing additional windows at the end of * {@link ij.WindowManager#closeAllWindows()}. * <p> * When returning {@code false}, ImageJ 1.x will be disallowed from quitting. * </p> * * @return whether it is okay to quit */ public boolean interceptCloseAllWindows() { return true; } /** * Allows interception of ImageJ 1.x's disposal routine while quitting. * <p> * This method is called after it has been confirmed that quitting should * proceed. That is, the user OKed all the windows being closed, etc. * This method provides one final chance to cancel the quit operation by * returning false; otherwise, it performs any needed disposal and cleanup. * </p> * * @return whether ImageJ 1.x should proceed in quitting * @see ij.ImageJ#run() which is where ImageJ 1.x actually quits */ public boolean disposing() { return true; } }
package com.matthewtamlin.spyglass.library.core; import android.content.Context; import android.content.res.TypedArray; import android.os.Looper; import android.util.AttributeSet; import android.view.View; import com.matthewtamlin.spyglass.library.default_adapters.DefaultAdapter; import com.matthewtamlin.spyglass.library.handler_adapters.HandlerAdapter; import com.matthewtamlin.spyglass.library.handler_adapters.HandlerAdapter.TypedArrayAccessor; import com.matthewtamlin.spyglass.library.handler_annotations.EnumConstantHandler; import com.matthewtamlin.spyglass.library.use_adapters.UseAdapter; import com.matthewtamlin.spyglass.library.util.AdapterUtil; import com.matthewtamlin.spyglass.library.util.AnnotationUtil; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull; import static com.matthewtamlin.spyglass.library.util.AdapterUtil.getDefaultAdapter; import static com.matthewtamlin.spyglass.library.util.AdapterUtil.getHandlerAdapter; import static com.matthewtamlin.spyglass.library.util.AnnotationUtil.getDefaultAnnotation; import static com.matthewtamlin.spyglass.library.util.AnnotationUtil.getHandlerAnnotation; import static com.matthewtamlin.spyglass.library.util.ValidationUtil.validateField; import static com.matthewtamlin.spyglass.library.util.ValidationUtil.validateMethod; public class Spyglass { private View view; private Context context; private TypedArray attrSource; private Spyglass(final Builder builder) { this.view = builder.view; this.context = builder.context; this.attrSource = view.getContext().obtainStyledAttributes( builder.attributeSet, builder.styleableRes, builder.defStyleAttr, builder.defStyleRes); } public void bindDataToFields() { checkMainThread(); for (final Field f : view.getClass().getDeclaredFields()) { validateField(f); processField(f); } } public void passDataToMethods() { checkMainThread(); for (final Method m : view.getClass().getDeclaredMethods()) { validateMethod(m); processMethod(m); } } private void checkMainThread() { if (Looper.myLooper() != Looper.getMainLooper()) { throw new IllegalThreadException("Spyglasses must only be touched by the UI thread."); } } private void processField(final Field field) { field.setAccessible(true); final Annotation handlerAnnotation = getHandlerAnnotation(field); if (handlerAnnotation != null) { final HandlerAdapter<?, Annotation> handlerAdapter = getHandlerAdapter(field); final TypedArrayAccessor<?> accessor = handlerAdapter.getAccessor(handlerAnnotation); if (accessor.valueExistsInArray(attrSource)) { bindDataToField(field, accessor.getValueFromArray(attrSource)); } else if (getDefaultAnnotation(field) != null) { final DefaultAdapter<?, Annotation> defaultAdapter = getDefaultAdapter(field); bindDataToField( field, defaultAdapter.getDefault(getDefaultAnnotation(field), context)); } else { final String message = "Missing mandatory attribute %1$s in view %2$s."; final int resId = handlerAdapter.getAttributeId(handlerAnnotation); final String resIdName = context.getResources().getResourceEntryName(resId); throw new MandatoryAttributeMissingException( String.format(message, resIdName, view)); } } } private void processMethod(final Method method) { method.setAccessible(true); final Annotation handlerAnnotation = getHandlerAnnotation(method); if (handlerAnnotation != null) { if (handlerAnnotation instanceof EnumConstantHandler) { processMethodEnumConstantCase(method); } else { processMethodStandardCase(method); } } } private void processMethodEnumConstantCase(final Method method) { final Annotation handlerAnnotation = getHandlerAnnotation(method); final HandlerAdapter<?, Annotation> handlerAdapter = getHandlerAdapter(method); final TypedArrayAccessor<?> accessor = handlerAdapter.getAccessor(handlerAnnotation); if (accessor.valueExistsInArray(attrSource)) { final TreeMap<Integer, Object> args = new TreeMap<>(getArgsFromUseAnnotations(method)); callMethod(method, args.values().toArray()); } } private void processMethodStandardCase(final Method method) { final Annotation handlerAnnotation = getHandlerAnnotation(method); final HandlerAdapter<?, Annotation> handlerAdapter = getHandlerAdapter(method); final TypedArrayAccessor<?> accessor = handlerAdapter.getAccessor(handlerAnnotation); if (accessor.valueExistsInArray(attrSource)) { final Object value = accessor.getValueFromArray(attrSource); final TreeMap<Integer, Object> args = new TreeMap<>(getArgsFromUseAnnotations(method)); addValueAtEmptyPosition(args, value); callMethod(method, args.values().toArray()); } else if (getDefaultAnnotation(method) != null) { final Object value = accessor.getValueFromArray(attrSource); final TreeMap<Integer, Object> args = new TreeMap<>(getArgsFromUseAnnotations(method)); addValueAtEmptyPosition(args, value); callMethod(method, args.values().toArray()); } else { final String message = "Missing mandatory attribute %1$s in view %2$s."; final int resId = handlerAdapter.getAttributeId(handlerAnnotation); final String resIdName = context.getResources().getResourceEntryName(resId); throw new MandatoryAttributeMissingException( String.format(message, resIdName, view)); } } private void bindDataToField(final Field field, final Object value) { try { field.set(view, value); } catch (final Exception e) { final String message = "Failed to bind data to field %1$s."; throw new SpyglassFieldBindException(String.format(message, value), e); } } private void callMethod(final Method method, Object[] arguments) { try { method.invoke(view, arguments); } catch (final Exception e) { final String message = "Failed to call method %1$s with arguments %2$s."; throw new SpyglassMethodCallException( String.format(message, message, Arrays.toString(arguments)), e); } } private Map<Integer, Object> getArgsFromUseAnnotations(final Method method) { final Map<Integer, Object> args = new HashMap<>(); final Map<Integer, Annotation> annotations = AnnotationUtil.getUseAnnotations(method); final Map<Integer, UseAdapter> adapters = AdapterUtil.getUseAdapters(method); for (final Integer i : annotations.keySet()) { final Object value = adapters.get(i).getValue(annotations.get(i)); args.put(i, value); } return args; } private void addValueAtEmptyPosition(final Map<Integer, Object> args, final Object value) { // Use size + 1 so to handle the case where the existing values have consecutive keys // For example, [1 = a, 2 = b, 3 = c] would become [1 = a, 2 = b, 3 = c, 4 = value] for (int i = 0; i < args.size() + 1; i++) { if (!args.containsKey(i)) { args.put(i, value); } } } public static Builder builder() { return new Builder(); } public static class Builder { private View view; private Context context; private int styleableRes[]; private AttributeSet attributeSet; private int defStyleAttr; private int defStyleRes; private Builder() {} public void forView(final View view) { this.view = view; } public void withContext(final Context context) { this.context = context; } public void withStyleableResource(final int[] styleableRes) { this.styleableRes = styleableRes; } public void withAttributeSet(final AttributeSet attributeSet) { this.attributeSet = attributeSet; } public void withDefStyleAttr(final int defStyleAttr) { this.defStyleAttr = defStyleAttr; } public void withDefStyleRes(final int defStyleRes) { this.defStyleRes = defStyleRes; } public Spyglass build() { checkNotNull(view, new InvalidBuilderStateException("Unable to build a Spyglass " + "without a view. Call method forView(View) before calling build().")); checkNotNull(context, new InvalidBuilderStateException("Unable to build a Spyglass " + "without a context. Call method withContext(Context) before calling build().")); checkNotNull(styleableRes, new InvalidBuilderStateException("Unable to build a " + "Spyglass without a styleable resource. Call method withStyleableRes(int[]) " + "before calling build().")); return new Spyglass(this); } } }
package net.imglib2.meta; import java.util.ArrayList; import net.imglib2.EuclideanSpace; /** * A {@code CombinedSpace} is a {@link EuclideanSpace} (specifically a * {@link TypedSpace}) which is a union of other {@link TypedSpace}s. Common * axes are merged as appropriate by matching the {@link AxisType}s of each * {@link TypedAxis}. * <p> * For example, combining three spaces with dimensions (X, Y, Z, CHANNEL), (X, * Y, CHANNEL, TIME) and (X, Z, LIFETIME, TIME) will result in a coordinate * space with dimensions (X, Y, Z, CHANNEL, TIME, LIFETIME). * </p> * * @author Curtis Rueden */ public class CombinedSpace<A extends TypedAxis, S extends TypedSpace<A>> extends ArrayList<S> implements TypedSpace<A> { /** List of axis types for the combined space. */ private final ArrayList<AxisType> axisTypes = new ArrayList<AxisType>(); // -- CombinedSpace methods -- /** Recomputes the combined space based on its current constituents. */ public void update() { axisTypes.clear(); for (final TypedSpace<A> space : this) { for (int d = 0; d < space.numDimensions(); d++) { final AxisType axisType = space.axis(d).type(); if (!axisTypes.contains(axisType)) { // new axis; add to the list axisTypes.add(axisType); } } } } // -- TypedSpace methods -- @Override public int dimensionIndex(final AxisType axis) { return axisTypes.indexOf(axis); } // -- AnnotatedSpace methods -- @Override public A axis(final int d) { final AxisType type = axisTypes.get(d); // find the first axis of a constituent space that matches the type for (final TypedSpace<A> space : this) { final int id = space.dimensionIndex(type); if (id < 0) continue; return space.axis(id); } throw new IllegalStateException("No compatible constituent space"); } @Override public void axes(final A[] axes) { for (int i = 0; i < axes.length; i++) { axes[i] = axis(i); } } @Override public void setAxis(final A axis, final int d) { final AxisType type = axisTypes.get(d); // assign the axis to all constituent spaces of matching type for (final TypedSpace<A> space : this) { final int id = space.dimensionIndex(type); if (id < 0) continue; space.setAxis(axis, id); } } // -- EuclideanSpace methods -- @Override public int numDimensions() { return axisTypes.size(); } }
package peergos.server.tests; import org.junit.*; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import peergos.server.crypto.JniTweetNacl; import java.util.*; import java.util.stream.Collectors; @RunWith(Parameterized.class) public class TestJniTweetNacl { private static JniTweetNacl.Signer signer; private static JniTweetNacl.Symmetric symmetric; private static Random random = new Random(1337); @BeforeClass public static void init() { JniTweetNacl instance = JniTweetNacl.build(); signer = new JniTweetNacl.Signer(instance); symmetric = new JniTweetNacl.Symmetric(instance); random.setSeed(1337); } public final int messageLength; public TestJniTweetNacl(int messageLength) { this.messageLength = messageLength; } @Parameterized.Parameters(name = "{0}") public static Collection<Object[]> parameters() { //spiral out int i=1, j=1; int cutoff = 1024 * 1024; List<Integer> fibs = new ArrayList<>(); while (i < cutoff) { int k = j; j = j+i; i = k; fibs.add(i); } return fibs.stream().map(e -> new Object[]{e}) .collect(Collectors.toList()); } @Test public void testSigningIdentity() { byte[] secretSignBytes = new byte[64]; byte[] publicSignBytes = new byte[32]; signer.crypto_sign_keypair(publicSignBytes, secretSignBytes); byte[] message = new byte[messageLength]; random.nextBytes(message); byte[] signed = signer.crypto_sign(message, secretSignBytes); byte[] unsigned = signer.crypto_sign_open(signed, publicSignBytes); Assert.assertArrayEquals(message, unsigned); Assert.assertFalse(Arrays.equals(message, signed)); Assert.assertFalse(Arrays.equals(signed, unsigned)); } @Test public void testSecretboxIdentity() { byte[] key = new byte[32]; byte[] nonce = new byte[32]; random.nextBytes(key); random.nextBytes(nonce); byte[] message = new byte[messageLength]; random.nextBytes(message); byte[] boxed = symmetric.secretbox(message, nonce, key); byte[] unboxed = symmetric.secretbox_open(boxed, nonce, key); Assert.assertArrayEquals(message, unboxed); Assert.assertFalse(Arrays.equals(message, boxed)); Assert.assertFalse(Arrays.equals(boxed, unboxed)); } @Test public void testSecretboxAsyncIdentity() { byte[] key = new byte[32]; byte[] nonce = new byte[32]; random.nextBytes(key); random.nextBytes(nonce); byte[] message = new byte[messageLength]; random.nextBytes(message); byte[] boxed = symmetric.secretboxAsync(message, nonce, key).join(); byte[] unboxed = symmetric.secretbox_openAsync(boxed, nonce, key).join(); Assert.assertArrayEquals(message, unboxed); Assert.assertFalse(Arrays.equals(message, boxed)); Assert.assertFalse(Arrays.equals(boxed, unboxed)); } }
package com.mikepenz.materialdrawer; import android.app.Activity; import android.content.Context; import android.graphics.Typeface; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.support.annotation.ColorInt; import android.support.annotation.ColorRes; import android.support.annotation.DimenRes; import android.support.annotation.DrawableRes; import android.support.annotation.LayoutRes; import android.support.annotation.NonNull; import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import com.mikepenz.fastadapter.utils.IdDistributor; import com.mikepenz.iconics.IconicsDrawable; import com.mikepenz.materialdrawer.holder.ColorHolder; import com.mikepenz.materialdrawer.holder.DimenHolder; import com.mikepenz.materialdrawer.holder.ImageHolder; import com.mikepenz.materialdrawer.holder.StringHolder; import com.mikepenz.materialdrawer.icons.MaterialDrawerFont; import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem; import com.mikepenz.materialdrawer.model.interfaces.IProfile; import com.mikepenz.materialdrawer.util.DrawerImageLoader; import com.mikepenz.materialdrawer.util.DrawerUIUtils; import com.mikepenz.materialdrawer.view.BezelImageView; import com.mikepenz.materialize.util.UIUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Stack; public class AccountHeaderBuilder { // global references to views we need later protected View mAccountHeader; protected ImageView mAccountHeaderBackground; protected BezelImageView mCurrentProfileView; protected View mAccountHeaderTextSection; protected ImageView mAccountSwitcherArrow; protected TextView mCurrentProfileName; protected TextView mCurrentProfileEmail; protected BezelImageView mProfileFirstView; protected BezelImageView mProfileSecondView; protected BezelImageView mProfileThirdView; // global references to the profiles protected IProfile mCurrentProfile; protected IProfile mProfileFirst; protected IProfile mProfileSecond; protected IProfile mProfileThird; // global stuff protected boolean mSelectionListShown = false; protected int mAccountHeaderTextSectionBackgroundResource = -1; // the activity to use protected Activity mActivity; /** * Pass the activity you use the drawer in ;) * * @param activity * @return */ public AccountHeaderBuilder withActivity(@NonNull Activity activity) { this.mActivity = activity; return this; } // defines if we use the compactStyle protected boolean mCompactStyle = false; /** * Defines if we should use the compact style for the header. * * @param compactStyle * @return */ public AccountHeaderBuilder withCompactStyle(boolean compactStyle) { this.mCompactStyle = compactStyle; return this; } // the typeface used for textViews within the AccountHeader protected Typeface mTypeface; // the typeface used for name textView only. overrides mTypeface protected Typeface mNameTypeface; // the typeface used for email textView only. overrides mTypeface protected Typeface mEmailTypeface; /** * Define the typeface which will be used for all textViews in the AccountHeader * * @param typeface * @return */ public AccountHeaderBuilder withTypeface(@NonNull Typeface typeface) { this.mTypeface = typeface; return this; } /** * Define the typeface which will be used for name textView in the AccountHeader. * Overrides typeface supplied to {@link AccountHeaderBuilder#withTypeface(android.graphics.Typeface)} * * @param typeface * @return * @see #withTypeface(android.graphics.Typeface) */ public AccountHeaderBuilder withNameTypeface(@NonNull Typeface typeface) { this.mNameTypeface = typeface; return this; } /** * Define the typeface which will be used for email textView in the AccountHeader. * Overrides typeface supplied to {@link AccountHeaderBuilder#withTypeface(android.graphics.Typeface)} * * @param typeface * @return * @see #withTypeface(android.graphics.Typeface) */ public AccountHeaderBuilder withEmailTypeface(@NonNull Typeface typeface) { this.mEmailTypeface = typeface; return this; } // set the account header height protected DimenHolder mHeight; /** * set the height for the header * * @param heightPx * @return */ public AccountHeaderBuilder withHeightPx(int heightPx) { this.mHeight = DimenHolder.fromPixel(heightPx); return this; } /** * set the height for the header * * @param heightDp * @return */ public AccountHeaderBuilder withHeightDp(int heightDp) { this.mHeight = DimenHolder.fromDp(heightDp); return this; } /** * set the height for the header by resource * * @param heightRes * @return */ public AccountHeaderBuilder withHeightRes(@DimenRes int heightRes) { this.mHeight = DimenHolder.fromResource(heightRes); return this; } //the background color for the slider protected ColorHolder mTextColor; /** * set the background for the slider as color * * @param textColor * @return */ public AccountHeaderBuilder withTextColor(@ColorInt int textColor) { this.mTextColor = ColorHolder.fromColor(textColor); return this; } /** * set the background for the slider as resource * * @param textColorRes * @return */ public AccountHeaderBuilder withTextColorRes(@ColorRes int textColorRes) { this.mTextColor = ColorHolder.fromColorRes(textColorRes); return this; } //the current selected profile is visible in the list protected boolean mCurrentHiddenInList = false; /** * hide the current selected profile from the list * * @param currentProfileHiddenInList * @return */ public AccountHeaderBuilder withCurrentProfileHiddenInList(boolean currentProfileHiddenInList) { mCurrentHiddenInList = currentProfileHiddenInList; return this; } //set to hide the first or second line protected boolean mSelectionFirstLineShown = true; protected boolean mSelectionSecondLineShown = true; /** * set this to false if you want to hide the first line of the selection box in the header (first line would be the name) * * @param selectionFirstLineShown * @return * @deprecated replaced by {@link #withSelectionFirstLineShown} */ @Deprecated public AccountHeaderBuilder withSelectionFistLineShown(boolean selectionFirstLineShown) { this.mSelectionFirstLineShown = selectionFirstLineShown; return this; } /** * set this to false if you want to hide the first line of the selection box in the header (first line would be the name) * * @param selectionFirstLineShown * @return */ public AccountHeaderBuilder withSelectionFirstLineShown(boolean selectionFirstLineShown) { this.mSelectionFirstLineShown = selectionFirstLineShown; return this; } /** * set this to false if you want to hide the second line of the selection box in the header (second line would be the e-mail) * * @param selectionSecondLineShown * @return */ public AccountHeaderBuilder withSelectionSecondLineShown(boolean selectionSecondLineShown) { this.mSelectionSecondLineShown = selectionSecondLineShown; return this; } //set one of these to define the text in the first or second line with in the account selector protected String mSelectionFirstLine; protected String mSelectionSecondLine; /** * set this to define the first line in the selection area if there is no profile * note this will block any values from profiles! * * @param selectionFirstLine * @return */ public AccountHeaderBuilder withSelectionFirstLine(String selectionFirstLine) { this.mSelectionFirstLine = selectionFirstLine; return this; } /** * set this to define the second line in the selection area if there is no profile * note this will block any values from profiles! * * @param selectionSecondLine * @return */ public AccountHeaderBuilder withSelectionSecondLine(String selectionSecondLine) { this.mSelectionSecondLine = selectionSecondLine; return this; } // set no divider below the header protected boolean mPaddingBelowHeader = true; /** * Set this to false if you want no padding below the Header * * @param paddingBelowHeader * @return */ public AccountHeaderBuilder withPaddingBelowHeader(boolean paddingBelowHeader) { this.mPaddingBelowHeader = paddingBelowHeader; return this; } // set no divider below the header protected boolean mDividerBelowHeader = true; /** * Set this to false if you want no divider below the Header * * @param dividerBelowHeader * @return */ public AccountHeaderBuilder withDividerBelowHeader(boolean dividerBelowHeader) { this.mDividerBelowHeader = dividerBelowHeader; return this; } // set non translucent statusBar mode protected boolean mTranslucentStatusBar = true; /** * Set or disable this if you use a translucent statusbar * * @param translucentStatusBar * @return */ public AccountHeaderBuilder withTranslucentStatusBar(boolean translucentStatusBar) { this.mTranslucentStatusBar = translucentStatusBar; return this; } //the background for the header protected ImageHolder mHeaderBackground; /** * set the background for the slider as color * * @param headerBackground * @return */ public AccountHeaderBuilder withHeaderBackground(Drawable headerBackground) { this.mHeaderBackground = new ImageHolder(headerBackground); return this; } /** * set the background for the header as resource * * @param headerBackgroundRes * @return */ public AccountHeaderBuilder withHeaderBackground(@DrawableRes int headerBackgroundRes) { this.mHeaderBackground = new ImageHolder(headerBackgroundRes); return this; } /** * set the background for the header via the ImageHolder class * * @param headerBackground * @return */ public AccountHeaderBuilder withHeaderBackground(ImageHolder headerBackground) { this.mHeaderBackground = headerBackground; return this; } //background scale type protected ImageView.ScaleType mHeaderBackgroundScaleType = null; /** * define the ScaleType for the header background * * @param headerBackgroundScaleType * @return */ public AccountHeaderBuilder withHeaderBackgroundScaleType(ImageView.ScaleType headerBackgroundScaleType) { this.mHeaderBackgroundScaleType = headerBackgroundScaleType; return this; } //profile images in the header are shown or not protected boolean mProfileImagesVisible = true; /** * define if the profile images in the header are shown or not * * @param profileImagesVisible * @return */ public AccountHeaderBuilder withProfileImagesVisible(boolean profileImagesVisible) { this.mProfileImagesVisible = profileImagesVisible; return this; } //only the main profile image is visible protected boolean mOnlyMainProfileImageVisible = false; /** * define if only the main (current selected) profile image should be visible * * @param onlyMainProfileImageVisible * @return */ public AccountHeaderBuilder withOnlyMainProfileImageVisible(boolean onlyMainProfileImageVisible) { this.mOnlyMainProfileImageVisible = onlyMainProfileImageVisible; return this; } //show small profile images but hide MainProfileImage protected boolean mOnlySmallProfileImagesVisible = false; /** * define if only the small profile images should be visible * * @param onlySmallProfileImagesVisible * @return */ public AccountHeaderBuilder withOnlySmallProfileImagesVisible(boolean onlySmallProfileImagesVisible) { this.mOnlySmallProfileImagesVisible = onlySmallProfileImagesVisible; return this; } //close the drawer after a profile was clicked in the list protected Boolean mCloseDrawerOnProfileListClick = null; /** * define if the drawer should close if the user clicks on a profile item if the selection list is shown * * @param closeDrawerOnProfileListClick * @return */ public AccountHeaderBuilder withCloseDrawerOnProfileListClick(boolean closeDrawerOnProfileListClick) { this.mCloseDrawerOnProfileListClick = closeDrawerOnProfileListClick; return this; } //reset the drawer list to the main drawer list after the profile was clicked in the list protected boolean mResetDrawerOnProfileListClick = true; /** * define if the drawer selection list should be reseted after the user clicks on a profile item if the selection list is shown * * @param resetDrawerOnProfileListClick * @return */ public AccountHeaderBuilder withResetDrawerOnProfileListClick(boolean resetDrawerOnProfileListClick) { this.mResetDrawerOnProfileListClick = resetDrawerOnProfileListClick; return this; } // set the profile images clickable or not protected boolean mProfileImagesClickable = true; /** * enable or disable the profile images to be clickable * * @param profileImagesClickable * @return */ public AccountHeaderBuilder withProfileImagesClickable(boolean profileImagesClickable) { this.mProfileImagesClickable = profileImagesClickable; return this; } // set to use the alternative profile header switching protected boolean mAlternativeProfileHeaderSwitching = false; /** * enable the alternative profile header switching * * @param alternativeProfileHeaderSwitching * @return */ public AccountHeaderBuilder withAlternativeProfileHeaderSwitching(boolean alternativeProfileHeaderSwitching) { this.mAlternativeProfileHeaderSwitching = alternativeProfileHeaderSwitching; return this; } // enable 3 small header previews protected boolean mThreeSmallProfileImages = false; /** * enable the extended profile icon view with 3 small header images instead of two * * @param threeSmallProfileImages * @return */ public AccountHeaderBuilder withThreeSmallProfileImages(boolean threeSmallProfileImages) { this.mThreeSmallProfileImages = threeSmallProfileImages; return this; } //the delay which is waited before the drawer is closed protected int mOnProfileClickDrawerCloseDelay = 100; /** * Define the delay for the drawer close operation after a click. * This is a small trick to improve the speed (and remove lag) if you open a new activity after a DrawerItem * was selected. * NOTE: Disable this by passing -1 * * @param onProfileClickDrawerCloseDelay the delay in MS (-1 to disable) * @return */ public AccountHeaderBuilder withOnProfileClickDrawerCloseDelay(int onProfileClickDrawerCloseDelay) { this.mOnProfileClickDrawerCloseDelay = onProfileClickDrawerCloseDelay; return this; } // the onAccountHeaderProfileImageListener to set protected AccountHeader.OnAccountHeaderProfileImageListener mOnAccountHeaderProfileImageListener; /** * set click / longClick listener for the header images * * @param onAccountHeaderProfileImageListener * @return */ public AccountHeaderBuilder withOnAccountHeaderProfileImageListener(AccountHeader.OnAccountHeaderProfileImageListener onAccountHeaderProfileImageListener) { this.mOnAccountHeaderProfileImageListener = onAccountHeaderProfileImageListener; return this; } // the onAccountHeaderSelectionListener to set protected AccountHeader.OnAccountHeaderSelectionViewClickListener mOnAccountHeaderSelectionViewClickListener; /** * set a onSelection listener for the selection box * * @param onAccountHeaderSelectionViewClickListener * @return */ public AccountHeaderBuilder withOnAccountHeaderSelectionViewClickListener(AccountHeader.OnAccountHeaderSelectionViewClickListener onAccountHeaderSelectionViewClickListener) { this.mOnAccountHeaderSelectionViewClickListener = onAccountHeaderSelectionViewClickListener; return this; } //set the selection list enabled if there is only a single profile protected boolean mSelectionListEnabledForSingleProfile = true; /** * enable or disable the selection list if there is only a single profile * * @param selectionListEnabledForSingleProfile * @return */ public AccountHeaderBuilder withSelectionListEnabledForSingleProfile(boolean selectionListEnabledForSingleProfile) { this.mSelectionListEnabledForSingleProfile = selectionListEnabledForSingleProfile; return this; } //set the selection enabled disabled protected boolean mSelectionListEnabled = true; /** * enable or disable the selection list * * @param selectionListEnabled * @return */ public AccountHeaderBuilder withSelectionListEnabled(boolean selectionListEnabled) { this.mSelectionListEnabled = selectionListEnabled; return this; } // the drawerLayout to use protected View mAccountHeaderContainer; /** * You can pass a custom view for the drawer lib. note this requires the same structure as the drawer.xml * * @param accountHeader * @return */ public AccountHeaderBuilder withAccountHeader(@NonNull View accountHeader) { this.mAccountHeaderContainer = accountHeader; return this; } /** * You can pass a custom layout for the drawer lib. see the drawer.xml in layouts of this lib on GitHub * * @param resLayout * @return */ public AccountHeaderBuilder withAccountHeader(@LayoutRes int resLayout) { if (mActivity == null) { throw new RuntimeException("please pass an activity first to use this call"); } if (resLayout != -1) { this.mAccountHeaderContainer = mActivity.getLayoutInflater().inflate(resLayout, null, false); } else { if (mCompactStyle) { this.mAccountHeaderContainer = mActivity.getLayoutInflater().inflate(R.layout.material_drawer_compact_header, null, false); } else { this.mAccountHeaderContainer = mActivity.getLayoutInflater().inflate(R.layout.material_drawer_header, null, false); } } return this; } // the profiles to display protected List<IProfile> mProfiles; /** * set the arrayList of DrawerItems for the drawer * * @param profiles * @return */ public AccountHeaderBuilder withProfiles(@NonNull List<IProfile> profiles) { this.mProfiles = IdDistributor.checkIds(profiles); return this; } /** * add single ore more DrawerItems to the Drawer * * @param profiles * @return */ public AccountHeaderBuilder addProfiles(@NonNull IProfile... profiles) { if (this.mProfiles == null) { this.mProfiles = new ArrayList<>(); } Collections.addAll(this.mProfiles, IdDistributor.checkIds(profiles)); return this; } // the click listener to be fired on profile or selection click protected AccountHeader.OnAccountHeaderListener mOnAccountHeaderListener; /** * add a listener for the accountHeader * * @param onAccountHeaderListener * @return */ public AccountHeaderBuilder withOnAccountHeaderListener(AccountHeader.OnAccountHeaderListener onAccountHeaderListener) { this.mOnAccountHeaderListener = onAccountHeaderListener; return this; } //the on long click listener to be fired on profile longClick inside the list protected AccountHeader.OnAccountHeaderItemLongClickListener mOnAccountHeaderItemLongClickListener; /** * the on long click listener to be fired on profile longClick inside the list * * @param onAccountHeaderItemLongClickListener * @return */ public AccountHeaderBuilder withOnAccountHeaderItemLongClickListener(AccountHeader.OnAccountHeaderItemLongClickListener onAccountHeaderItemLongClickListener) { this.mOnAccountHeaderItemLongClickListener = onAccountHeaderItemLongClickListener; return this; } // the drawer to set the AccountSwitcher for protected Drawer mDrawer; /** * @param drawer * @return */ public AccountHeaderBuilder withDrawer(@NonNull Drawer drawer) { this.mDrawer = drawer; return this; } // savedInstance to restore state protected Bundle mSavedInstance; /** * create the drawer with the values of a savedInstance * * @param savedInstance * @return */ public AccountHeaderBuilder withSavedInstance(Bundle savedInstance) { this.mSavedInstance = savedInstance; return this; } /** * helper method to set the height for the header! * * @param height */ private void setHeaderHeight(int height) { if (mAccountHeaderContainer != null) { ViewGroup.LayoutParams params = mAccountHeaderContainer.getLayoutParams(); if (params != null) { params.height = height; mAccountHeaderContainer.setLayoutParams(params); } View accountHeader = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header); if (accountHeader != null) { params = accountHeader.getLayoutParams(); params.height = height; accountHeader.setLayoutParams(params); } View accountHeaderBackground = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_background); if (accountHeaderBackground != null) { params = accountHeaderBackground.getLayoutParams(); params.height = height; accountHeaderBackground.setLayoutParams(params); } } } /** * a small helper to handle the selectionView * * @param on */ private void handleSelectionView(IProfile profile, boolean on) { if (on) { if (Build.VERSION.SDK_INT >= 21) { ((FrameLayout) mAccountHeaderContainer).setForeground(UIUtils.getCompatDrawable(mAccountHeaderContainer.getContext(), mAccountHeaderTextSectionBackgroundResource)); mAccountHeaderContainer.setOnClickListener(onSelectionClickListener); mAccountHeaderContainer.setTag(R.id.material_drawer_profile_header, profile); } else { mAccountHeaderTextSection.setBackgroundResource(mAccountHeaderTextSectionBackgroundResource); mAccountHeaderTextSection.setOnClickListener(onSelectionClickListener); mAccountHeaderTextSection.setTag(R.id.material_drawer_profile_header, profile); } } else { if (Build.VERSION.SDK_INT >= 21) { ((FrameLayout) mAccountHeaderContainer).setForeground(null); mAccountHeaderContainer.setOnClickListener(null); } else { UIUtils.setBackground(mAccountHeaderTextSection, null); mAccountHeaderTextSection.setOnClickListener(null); } } } /** * method to build the header view * * @return */ public AccountHeader build() { // if the user has not set a accountHeader use the default one :D if (mAccountHeaderContainer == null) { withAccountHeader(-1); } // get the header view within the container mAccountHeader = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header); //the default min header height by default 148dp int defaultHeaderMinHeight = mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_account_header_height); int statusBarHeight = UIUtils.getStatusBarHeight(mActivity, true); // handle the height for the header int height; if (mHeight != null) { height = mHeight.asPixel(mActivity); } else { if (mCompactStyle) { height = mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_account_header_height_compact); } else { //calculate the header height by getting the optimal drawer width and calculating it * 9 / 16 height = (int) (DrawerUIUtils.getOptimalDrawerWidth(mActivity) * AccountHeader.NAVIGATION_DRAWER_ACCOUNT_ASPECT_RATIO); //if we are lower than api 19 (>= 19 we have a translucentStatusBar) the height should be a bit lower //probably even if we are non translucent on > 19 devices? if (Build.VERSION.SDK_INT < 19) { int tempHeight = height - statusBarHeight; //if we are lower than api 19 we are not able to have a translucent statusBar so we remove the height of the statusBar from the padding //to prevent display issues we only reduce the height if we still fit the required minHeight of 148dp (R.dimen.material_drawer_account_header_height) //we remove additional 8dp from the defaultMinHeaderHeight as there is some buffer in the header and to prevent to large spacings if (tempHeight > defaultHeaderMinHeight - UIUtils.convertDpToPixel(8, mActivity)) { height = tempHeight; } } } } // handle everything if we have a translucent status bar which only is possible on API >= 19 if (mTranslucentStatusBar && Build.VERSION.SDK_INT >= 21) { mAccountHeader.setPadding(mAccountHeader.getPaddingLeft(), mAccountHeader.getPaddingTop() + statusBarHeight, mAccountHeader.getPaddingRight(), mAccountHeader.getPaddingBottom()); //in fact it makes no difference if we have a translucent statusBar or not. we want 9/16 just if we are not compact if (mCompactStyle) { height = height + statusBarHeight; } else if ((height - statusBarHeight) <= defaultHeaderMinHeight) { //if the height + statusBar of the header is lower than the required 148dp + statusBar we change the height to be able to display all the data height = defaultHeaderMinHeight + statusBarHeight; } } //set the height for the header setHeaderHeight(height); // get the background view mAccountHeaderBackground = (ImageView) mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_background); // set the background ImageHolder.applyTo(mHeaderBackground, mAccountHeaderBackground, DrawerImageLoader.Tags.ACCOUNT_HEADER.name()); if (mHeaderBackgroundScaleType != null) { mAccountHeaderBackground.setScaleType(mHeaderBackgroundScaleType); } // get the text color to use for the text section int textColor = ColorHolder.color(mTextColor, mActivity, R.attr.material_drawer_header_selection_text, R.color.material_drawer_header_selection_text); // set the background for the section if (mCompactStyle) { mAccountHeaderTextSection = mAccountHeader; } else { mAccountHeaderTextSection = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_text_section); } mAccountHeaderTextSectionBackgroundResource = UIUtils.getSelectableBackgroundRes(mActivity); handleSelectionView(mCurrentProfile, true); // set the arrow :D mAccountSwitcherArrow = (ImageView) mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_text_switcher); mAccountSwitcherArrow.setImageDrawable(new IconicsDrawable(mActivity, MaterialDrawerFont.Icon.mdf_arrow_drop_down).sizeRes(R.dimen.material_drawer_account_header_dropdown).paddingRes(R.dimen.material_drawer_account_header_dropdown_padding).color(textColor)); //get the fields for the name mCurrentProfileView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_current); mCurrentProfileName = (TextView) mAccountHeader.findViewById(R.id.material_drawer_account_header_name); mCurrentProfileEmail = (TextView) mAccountHeader.findViewById(R.id.material_drawer_account_header_email); //set the typeface for the AccountHeader if (mNameTypeface != null) { mCurrentProfileName.setTypeface(mNameTypeface); } else if (mTypeface != null) { mCurrentProfileName.setTypeface(mTypeface); } if (mEmailTypeface != null) { mCurrentProfileEmail.setTypeface(mEmailTypeface); } else if (mTypeface != null) { mCurrentProfileEmail.setTypeface(mTypeface); } mCurrentProfileName.setTextColor(textColor); mCurrentProfileEmail.setTextColor(textColor); mProfileFirstView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_small_first); mProfileSecondView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_small_second); mProfileThirdView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_small_third); //calculate the profiles to set calculateProfiles(); //process and build the profiles buildProfiles(); // try to restore all saved values again if (mSavedInstance != null) { int selection = mSavedInstance.getInt(AccountHeader.BUNDLE_SELECTION_HEADER, -1); if (selection != -1) { //predefine selection (should be the first element if (mProfiles != null && (selection) > -1 && selection < mProfiles.size()) { switchProfiles(mProfiles.get(selection)); } } } //everything created. now set the header if (mDrawer != null) { mDrawer.setHeader(mAccountHeaderContainer, mPaddingBelowHeader, mDividerBelowHeader); } //forget the reference to the activity mActivity = null; return new AccountHeader(this); } /** * helper method to calculate the order of the profiles */ protected void calculateProfiles() { if (mProfiles == null) { mProfiles = new ArrayList<>(); } if (mCurrentProfile == null) { int setCount = 0; for (int i = 0; i < mProfiles.size(); i++) { if (mProfiles.size() > i && mProfiles.get(i).isSelectable()) { if (setCount == 0 && (mCurrentProfile == null)) { mCurrentProfile = mProfiles.get(i); } else if (setCount == 1 && (mProfileFirst == null)) { mProfileFirst = mProfiles.get(i); } else if (setCount == 2 && (mProfileSecond == null)) { mProfileSecond = mProfiles.get(i); } else if (setCount == 3 && (mProfileThird == null)) { mProfileThird = mProfiles.get(i); } setCount++; } } return; } IProfile[] previousActiveProfiles = new IProfile[]{ mCurrentProfile, mProfileFirst, mProfileSecond, mProfileThird }; IProfile[] newActiveProfiles = new IProfile[4]; Stack<IProfile> unusedProfiles = new Stack<>(); // try to keep existing active profiles in the same positions for (int i = 0; i < mProfiles.size(); i++) { IProfile p = mProfiles.get(i); if (p.isSelectable()) { boolean used = false; for (int j = 0; j < 4; j++) { if (previousActiveProfiles[j] == p) { newActiveProfiles[j] = p; used = true; break; } } if (!used) { unusedProfiles.push(p); } } } Stack<IProfile> activeProfiles = new Stack<>(); // try to fill the gaps with new available profiles for (int i = 0; i < 4; i++) { if (newActiveProfiles[i] != null) { activeProfiles.push(newActiveProfiles[i]); } else if (!unusedProfiles.isEmpty()) { activeProfiles.push(unusedProfiles.pop()); } } Stack<IProfile> reversedActiveProfiles = new Stack<>(); while (!activeProfiles.empty()) { reversedActiveProfiles.push(activeProfiles.pop()); } // reassign active profiles if (reversedActiveProfiles.isEmpty()) { mCurrentProfile = null; } else { mCurrentProfile = reversedActiveProfiles.pop(); } if (reversedActiveProfiles.isEmpty()) { mProfileFirst = null; } else { mProfileFirst = reversedActiveProfiles.pop(); } if (reversedActiveProfiles.isEmpty()) { mProfileSecond = null; } else { mProfileSecond = reversedActiveProfiles.pop(); } if (reversedActiveProfiles.isEmpty()) { mProfileThird = null; } else { mProfileThird = reversedActiveProfiles.pop(); } } /** * helper method to switch the profiles * * @param newSelection * @return true if the new selection was the current profile */ protected boolean switchProfiles(IProfile newSelection) { if (newSelection == null) { return false; } if (mCurrentProfile == newSelection) { return true; } if (mAlternativeProfileHeaderSwitching) { int prevSelection = -1; if (mProfileFirst == newSelection) { prevSelection = 1; } else if (mProfileSecond == newSelection) { prevSelection = 2; } else if (mProfileThird == newSelection) { prevSelection = 3; } IProfile tmp = mCurrentProfile; mCurrentProfile = newSelection; if (prevSelection == 1) { mProfileFirst = tmp; } else if (prevSelection == 2) { mProfileSecond = tmp; } else if (prevSelection == 3) { mProfileThird = tmp; } } else { if (mProfiles != null) { ArrayList<IProfile> previousActiveProfiles = new ArrayList<>(Arrays.asList(mCurrentProfile, mProfileFirst, mProfileSecond, mProfileThird)); if (previousActiveProfiles.contains(newSelection)) { int position = -1; for (int i = 0; i < 4; i++) { if (previousActiveProfiles.get(i) == newSelection) { position = i; break; } } if (position != -1) { previousActiveProfiles.remove(position); previousActiveProfiles.add(0, newSelection); mCurrentProfile = previousActiveProfiles.get(0); mProfileFirst = previousActiveProfiles.get(1); mProfileSecond = previousActiveProfiles.get(2); mProfileThird = previousActiveProfiles.get(3); } } else { mProfileThird = mProfileSecond; mProfileSecond = mProfileFirst; mProfileFirst = mCurrentProfile; mCurrentProfile = newSelection; } } } //if we only show the small profile images we have to make sure the first (would be the current selected) profile is also shown if (mOnlySmallProfileImagesVisible) { mProfileThird = mProfileSecond; mProfileSecond = mProfileFirst; mProfileFirst = mCurrentProfile; mCurrentProfile = mProfileThird; } buildProfiles(); return false; } /** * helper method to build the views for the ui */ protected void buildProfiles() { mCurrentProfileView.setVisibility(View.INVISIBLE); mAccountHeaderTextSection.setVisibility(View.INVISIBLE); mAccountSwitcherArrow.setVisibility(View.INVISIBLE); mProfileFirstView.setVisibility(View.GONE); mProfileFirstView.setOnClickListener(null); mProfileSecondView.setVisibility(View.GONE); mProfileSecondView.setOnClickListener(null); mProfileThirdView.setVisibility(View.GONE); mProfileThirdView.setOnClickListener(null); mCurrentProfileName.setText(""); mCurrentProfileEmail.setText(""); handleSelectionView(mCurrentProfile, true); if (mCurrentProfile != null) { if ((mProfileImagesVisible || mOnlyMainProfileImageVisible) && !mOnlySmallProfileImagesVisible) { setImageOrPlaceholder(mCurrentProfileView, mCurrentProfile.getIcon()); if (mProfileImagesClickable) { mCurrentProfileView.setOnClickListener(onCurrentProfileClickListener); mCurrentProfileView.setOnLongClickListener(onCurrentProfileLongClickListener); mCurrentProfileView.disableTouchFeedback(false); } else { mCurrentProfileView.disableTouchFeedback(true); } mCurrentProfileView.setVisibility(View.VISIBLE); mCurrentProfileView.invalidate(); } else if (mCompactStyle) { mCurrentProfileView.setVisibility(View.GONE); } mAccountHeaderTextSection.setVisibility(View.VISIBLE); handleSelectionView(mCurrentProfile, true); mAccountSwitcherArrow.setVisibility(View.VISIBLE); mCurrentProfileView.setTag(R.id.material_drawer_profile_header, mCurrentProfile); StringHolder.applyTo(mCurrentProfile.getName(), mCurrentProfileName); StringHolder.applyTo(mCurrentProfile.getEmail(), mCurrentProfileEmail); if (mProfileFirst != null && mProfileImagesVisible && !mOnlyMainProfileImageVisible) { setImageOrPlaceholder(mProfileFirstView, mProfileFirst.getIcon()); mProfileFirstView.setTag(R.id.material_drawer_profile_header, mProfileFirst); if (mProfileImagesClickable) { mProfileFirstView.setOnClickListener(onProfileClickListener); mProfileFirstView.setOnLongClickListener(onProfileLongClickListener); mProfileFirstView.disableTouchFeedback(false); } else { mProfileFirstView.disableTouchFeedback(true); } mProfileFirstView.setVisibility(View.VISIBLE); mProfileFirstView.invalidate(); } if (mProfileSecond != null && mProfileImagesVisible && !mOnlyMainProfileImageVisible) { setImageOrPlaceholder(mProfileSecondView, mProfileSecond.getIcon()); mProfileSecondView.setTag(R.id.material_drawer_profile_header, mProfileSecond); if (mProfileImagesClickable) { mProfileSecondView.setOnClickListener(onProfileClickListener); mProfileSecondView.setOnLongClickListener(onProfileLongClickListener); mProfileSecondView.disableTouchFeedback(false); } else { mProfileSecondView.disableTouchFeedback(true); } mProfileSecondView.setVisibility(View.VISIBLE); mProfileSecondView.invalidate(); } if (mProfileThird != null && mThreeSmallProfileImages && mProfileImagesVisible && !mOnlyMainProfileImageVisible) { setImageOrPlaceholder(mProfileThirdView, mProfileThird.getIcon()); mProfileThirdView.setTag(R.id.material_drawer_profile_header, mProfileThird); if (mProfileImagesClickable) { mProfileThirdView.setOnClickListener(onProfileClickListener); mProfileThirdView.setOnLongClickListener(onProfileLongClickListener); mProfileThirdView.disableTouchFeedback(false); } else { mProfileThirdView.disableTouchFeedback(true); } mProfileThirdView.setVisibility(View.VISIBLE); mProfileThirdView.invalidate(); } } else if (mProfiles != null && mProfiles.size() > 0) { IProfile profile = mProfiles.get(0); mAccountHeaderTextSection.setTag(R.id.material_drawer_profile_header, profile); mAccountHeaderTextSection.setVisibility(View.VISIBLE); handleSelectionView(mCurrentProfile, true); mAccountSwitcherArrow.setVisibility(View.VISIBLE); if (mCurrentProfile != null) { StringHolder.applyTo(mCurrentProfile.getName(), mCurrentProfileName); StringHolder.applyTo(mCurrentProfile.getEmail(), mCurrentProfileEmail); } } if (!mSelectionFirstLineShown) { mCurrentProfileName.setVisibility(View.GONE); } if (!TextUtils.isEmpty(mSelectionFirstLine)) { mCurrentProfileName.setText(mSelectionFirstLine); mAccountHeaderTextSection.setVisibility(View.VISIBLE); } if (!mSelectionSecondLineShown) { mCurrentProfileEmail.setVisibility(View.GONE); } if (!TextUtils.isEmpty(mSelectionSecondLine)) { mCurrentProfileEmail.setText(mSelectionSecondLine); mAccountHeaderTextSection.setVisibility(View.VISIBLE); } //if we disabled the list if (!mSelectionListEnabled) { mAccountSwitcherArrow.setVisibility(View.INVISIBLE); handleSelectionView(null, false); } if (!mSelectionListEnabledForSingleProfile && mProfileFirst == null && (mProfiles == null || mProfiles.size() == 1)) { mAccountSwitcherArrow.setVisibility(View.INVISIBLE); handleSelectionView(null, false); } //if we disabled the list but still have set a custom listener if (mOnAccountHeaderSelectionViewClickListener != null) { handleSelectionView(mCurrentProfile, true); } } /** * small helper method to set an profile image or a placeholder * * @param iv * @param imageHolder */ private void setImageOrPlaceholder(ImageView iv, ImageHolder imageHolder) { //cancel previous started image loading processes DrawerImageLoader.getInstance().cancelImage(iv); //set the placeholder iv.setImageDrawable(DrawerUIUtils.getPlaceHolder(iv.getContext())); //set the real image (probably also the uri) ImageHolder.applyTo(imageHolder, iv, DrawerImageLoader.Tags.PROFILE.name()); } /** * onProfileClickListener to notify onClick on the current profile image */ private View.OnClickListener onCurrentProfileClickListener = new View.OnClickListener() { @Override public void onClick(final View v) { onProfileImageClick(v, true); } }; /** * onProfileClickListener to notify onClick on a profile image */ private View.OnClickListener onProfileClickListener = new View.OnClickListener() { @Override public void onClick(final View v) { onProfileImageClick(v, false); } }; /** * calls the mOnAccountHEaderProfileImageListener and continues with the actions afterwards * * @param v * @param current */ private void onProfileImageClick(View v, boolean current) { IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header); boolean consumed = false; if (mOnAccountHeaderProfileImageListener != null) { consumed = mOnAccountHeaderProfileImageListener.onProfileImageClick(v, profile, current); } //if the event was already consumed by the click don't continue. note that this will also stop the profile change event if (!consumed) { onProfileClick(v, current); } } /** * onProfileLongClickListener to call the onProfileImageLongClick on the current profile image */ private View.OnLongClickListener onCurrentProfileLongClickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { if (mOnAccountHeaderProfileImageListener != null) { IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header); return mOnAccountHeaderProfileImageListener.onProfileImageLongClick(v, profile, true); } return false; } }; /** * onProfileLongClickListener to call the onProfileImageLongClick on a profile image */ private View.OnLongClickListener onProfileLongClickListener = new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { if (mOnAccountHeaderProfileImageListener != null) { IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header); return mOnAccountHeaderProfileImageListener.onProfileImageLongClick(v, profile, false); } return false; } }; protected void onProfileClick(View v, boolean current) { final IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header); switchProfiles(profile); //reset the drawer content resetDrawerContent(v.getContext()); //notify the MiniDrawer about the clicked profile (only if one exists and is hooked to the Drawer if (mDrawer != null && mDrawer.getDrawerBuilder() != null && mDrawer.getDrawerBuilder().mMiniDrawer != null) { mDrawer.getDrawerBuilder().mMiniDrawer.onProfileClick(); } //notify about the changed profile boolean consumed = false; if (mOnAccountHeaderListener != null) { consumed = mOnAccountHeaderListener.onProfileChanged(v, profile, current); } if (!consumed) { if (mOnProfileClickDrawerCloseDelay > 0) { new Handler().postDelayed(new Runnable() { @Override public void run() { if (mDrawer != null) { mDrawer.closeDrawer(); } } }, mOnProfileClickDrawerCloseDelay); } else { if (mDrawer != null) { mDrawer.closeDrawer(); } } } } /** * get the current selection * * @return */ protected int getCurrentSelection() { if (mCurrentProfile != null && mProfiles != null) { int i = 0; for (IProfile profile : mProfiles) { if (profile == mCurrentProfile) { return i; } i++; } } return -1; } /** * onSelectionClickListener to notify the onClick on the checkbox */ private View.OnClickListener onSelectionClickListener = new View.OnClickListener() { @Override public void onClick(View v) { boolean consumed = false; if (mOnAccountHeaderSelectionViewClickListener != null) { consumed = mOnAccountHeaderSelectionViewClickListener.onClick(v, (IProfile) v.getTag(R.id.material_drawer_profile_header)); } if (mAccountSwitcherArrow.getVisibility() == View.VISIBLE && !consumed) { toggleSelectionList(v.getContext()); } } }; /** * helper method to toggle the collection * * @param ctx */ protected void toggleSelectionList(Context ctx) { if (mDrawer != null) { //if we already show the list. reset everything instead if (mDrawer.switchedDrawerContent()) { resetDrawerContent(ctx); mSelectionListShown = false; } else { //build and set the drawer selection list buildDrawerSelectionList(); // update the arrow image within the drawer mAccountSwitcherArrow.setImageDrawable(new IconicsDrawable(ctx, MaterialDrawerFont.Icon.mdf_arrow_drop_up).sizeRes(R.dimen.material_drawer_account_header_dropdown).paddingRes(R.dimen.material_drawer_account_header_dropdown_padding).color(ColorHolder.color(mTextColor, ctx, R.attr.material_drawer_header_selection_text, R.color.material_drawer_header_selection_text))); mSelectionListShown = true; } } } /** * helper method to build and set the drawer selection list */ protected void buildDrawerSelectionList() { int selectedPosition = -1; int position = 0; ArrayList<IDrawerItem> profileDrawerItems = new ArrayList<>(); if (mProfiles != null) { for (IProfile profile : mProfiles) { if (profile == mCurrentProfile) { if (mCurrentHiddenInList) { continue; } else { selectedPosition = mDrawer.mDrawerBuilder.getItemAdapter().getGlobalPosition(position); } } if (profile instanceof IDrawerItem) { ((IDrawerItem) profile).withSetSelected(false); profileDrawerItems.add((IDrawerItem) profile); } position = position + 1; } } mDrawer.switchDrawerContent(onDrawerItemClickListener, onDrawerItemLongClickListener, profileDrawerItems, selectedPosition); } /** * onDrawerItemClickListener to catch the selection for the new profile! */ private Drawer.OnDrawerItemClickListener onDrawerItemClickListener = new Drawer.OnDrawerItemClickListener() { @Override public boolean onItemClick(final View view, int position, final IDrawerItem drawerItem) { final boolean isCurrentSelectedProfile; if (drawerItem != null && drawerItem instanceof IProfile && drawerItem.isSelectable()) { isCurrentSelectedProfile = switchProfiles((IProfile) drawerItem); } else { isCurrentSelectedProfile = false; } if (mResetDrawerOnProfileListClick) { mDrawer.setOnDrawerItemClickListener(null); } //wrap the onSelection call and the reset stuff within a handler to prevent lag if (mResetDrawerOnProfileListClick && mDrawer != null && view != null && view.getContext() != null) { resetDrawerContent(view.getContext()); } //notify the MiniDrawer about the clicked profile (only if one exists and is hooked to the Drawer if (mDrawer != null && mDrawer.getDrawerBuilder() != null && mDrawer.getDrawerBuilder().mMiniDrawer != null) { mDrawer.getDrawerBuilder().mMiniDrawer.onProfileClick(); } boolean consumed = false; if (drawerItem != null && drawerItem instanceof IProfile) { if (mOnAccountHeaderListener != null) { consumed = mOnAccountHeaderListener.onProfileChanged(view, (IProfile) drawerItem, isCurrentSelectedProfile); } } //if a custom behavior was chosen via the CloseDrawerOnProfileListClick then use this. else react on the result of the onProfileChanged listener if (mCloseDrawerOnProfileListClick != null) { consumed = consumed && !mCloseDrawerOnProfileListClick; } //totally custom handling of the drawer behavior as otherwise the selection of the profile list is set to the Drawer if (mDrawer != null && !consumed) { //close the drawer after click mDrawer.mDrawerBuilder.closeDrawerDelayed(); } //consume the event to prevent setting the clicked item as selected in the already switched item list return true; } }; /** * onDrawerItemLongClickListener to catch the longClick for a profile */ private Drawer.OnDrawerItemLongClickListener onDrawerItemLongClickListener = new Drawer.OnDrawerItemLongClickListener() { @Override public boolean onItemLongClick(View view, int position, IDrawerItem drawerItem) { //if a longClickListener was defined use it if (mOnAccountHeaderItemLongClickListener != null) { final boolean isCurrentSelectedProfile; isCurrentSelectedProfile = drawerItem != null && drawerItem.isSelected(); if (drawerItem != null && drawerItem instanceof IProfile) { return mOnAccountHeaderItemLongClickListener.onProfileLongClick(view, (IProfile) drawerItem, isCurrentSelectedProfile); } } return false; } }; /** * helper method to reset the drawer content */ private void resetDrawerContent(Context ctx) { if (mDrawer != null) { mDrawer.resetDrawerContent(); } mAccountSwitcherArrow.setImageDrawable(new IconicsDrawable(ctx, MaterialDrawerFont.Icon.mdf_arrow_drop_down).sizeRes(R.dimen.material_drawer_account_header_dropdown).paddingRes(R.dimen.material_drawer_account_header_dropdown_padding).color(ColorHolder.color(mTextColor, ctx, R.attr.material_drawer_header_selection_text, R.color.material_drawer_header_selection_text))); } /** * small helper class to update the header and the list */ protected void updateHeaderAndList() { //recalculate the profiles calculateProfiles(); //update the profiles in the header buildProfiles(); //if we currently show the list add the new item directly to it if (mSelectionListShown) { buildDrawerSelectionList(); } } }