lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
745b78964854ba35048f69aba19d339d26e8f7b9
| 0
|
cloudsmith/orientdb,cloudsmith/orientdb,cloudsmith/orientdb,cloudsmith/orientdb
|
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import com.orientechnologies.common.collection.ONavigableMap;
import com.orientechnologies.common.concur.resource.OSharedResourceAbstract;
import com.orientechnologies.common.listener.OProgressListener;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.OMemoryWatchDog.Listener;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.annotation.ODocumentInstance;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseListener;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.db.record.ORecordLazySet;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerListRID;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerLiteral;
import com.orientechnologies.orient.core.tx.OTransactionIndexChanges.OPERATION;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabaseLazySave;
/**
* Handles indexing when records change.
*
* @author Luca Garulli
*
*/
public abstract class OIndexMVRBTreeAbstract extends OSharedResourceAbstract implements OIndexInternal, ODatabaseListener {
protected static final String CONFIG_MAP_RID = "mapRid";
protected static final String CONFIG_CLUSTERS = "clusters";
protected String name;
protected String type;
protected OMVRBTreeDatabaseLazySave<Object, Set<OIdentifiable>> map;
protected Set<String> clustersToIndex = new LinkedHashSet<String>();
protected OIndexCallback callback;
protected boolean automatic;
@ODocumentInstance
protected ODocument configuration;
private Listener watchDog;
private volatile int optimization = 0;
public OIndexMVRBTreeAbstract(final String iType) {
type = iType;
watchDog = new Listener() {
public void memoryUsageLow(final TYPE iType, final long usedMemory, final long maxMemory) {
if (iType == TYPE.JVM)
optimization = 1;
}
public void memoryUsageCritical(final TYPE iType, final long usedMemory, final long maxMemory) {
if (iType == TYPE.JVM)
optimization = 2;
}
};
}
public void flush() {
lazySave();
}
/**
* Creates the index.
*
* @param iDatabase
* Current Database instance
* @param iProperty
* Owner property
* @param iClusterIndexName
* Cluster name where to place the TreeMap
* @param iProgressListener
* Listener to get called on progress
*/
public OIndexInternal create(final String iName, final ODatabaseRecord iDatabase, final String iClusterIndexName,
final int[] iClusterIdsToIndex, final OProgressListener iProgressListener, final boolean iAutomatic) {
acquireExclusiveLock();
try {
name = iName;
configuration = new ODocument(iDatabase);
automatic = iAutomatic;
if (iClusterIdsToIndex != null)
for (int id : iClusterIdsToIndex)
clustersToIndex.add(iDatabase.getClusterNameById(id));
map = new OMVRBTreeDatabaseLazySave<Object, Set<OIdentifiable>>(iDatabase, iClusterIndexName,
OStreamSerializerLiteral.INSTANCE, OStreamSerializerListRID.INSTANCE);
installHooks(iDatabase);
rebuild(iProgressListener);
updateConfiguration();
return this;
} finally {
releaseExclusiveLock();
}
}
public OIndexInternal loadFromConfiguration(final ODocument iConfig) {
acquireExclusiveLock();
try {
final ORID rid = (ORID) iConfig.field(CONFIG_MAP_RID, ORID.class);
if (rid == null)
return null;
configuration = iConfig;
name = configuration.field(OIndexInternal.CONFIG_NAME);
automatic = (Boolean) (configuration.field(OIndexInternal.CONFIG_AUTOMATIC) != null ? configuration
.field(OIndexInternal.CONFIG_AUTOMATIC) : true);
clustersToIndex.clear();
final Collection<? extends String> clusters = configuration.field(CONFIG_CLUSTERS);
if (clusters != null)
clustersToIndex.addAll(clusters);
map = new OMVRBTreeDatabaseLazySave<Object, Set<OIdentifiable>>(getDatabase(), rid);
map.load();
installHooks(iConfig.getDatabase());
return this;
} finally {
releaseExclusiveLock();
}
}
public Set<OIdentifiable> get(final Object iKey) {
checkForOptimization();
acquireExclusiveLock();
try {
final ORecordLazySet values = (ORecordLazySet) map.get(iKey);
if (values != null)
values.setDatabase(ODatabaseRecordThreadLocal.INSTANCE.get());
if (values == null)
return ORecordLazySet.EMPTY_SET;
return values;
} finally {
releaseExclusiveLock();
}
}
public boolean contains(final Object iKey) {
checkForOptimization();
acquireExclusiveLock();
try {
return map.containsKey(iKey);
} finally {
releaseExclusiveLock();
}
}
/**
* Returns a set of records with key between the range passed as parameter. Range bounds are included.
*
* @param iRangeFrom
* Starting range
* @param iRangeTo
* Ending range
* @see #getBetween(Object, Object, boolean)
* @return
*/
public Set<OIdentifiable> getBetween(final Object iRangeFrom, final Object iRangeTo) {
return getBetween(iRangeFrom, iRangeTo, true);
}
/**
* Returns a set of records with key between the range passed as parameter.
*
* @param iRangeFrom
* Starting range
* @param iRangeTo
* Ending range
* @param iInclusive
* Include from/to bounds
* @see #getBetween(Object, Object)
* @return
*/
public Set<OIdentifiable> getBetween(final Object iRangeFrom, final Object iRangeTo, final boolean iInclusive) {
if (iRangeFrom.getClass() != iRangeTo.getClass())
throw new IllegalArgumentException("Range from-to parameters are of different types");
checkForOptimization();
acquireExclusiveLock();
try {
final ONavigableMap<Object, Set<OIdentifiable>> subSet = map.subMap(iRangeFrom, iInclusive, iRangeTo, iInclusive);
if (subSet == null)
return ORecordLazySet.EMPTY_SET;
final Set<OIdentifiable> result = new ORecordLazySet(configuration.getDatabase());
for (Set<OIdentifiable> v : subSet.values()) {
result.addAll(v);
}
return result;
} finally {
releaseExclusiveLock();
}
}
public ORID getIdentity() {
return map.getRecord().getIdentity();
}
public OIndexInternal rebuild() {
return rebuild(null);
}
/**
* Populates the index with all the existent records. Uses the massive insert intent to speed up and keep the consumed memory low.
*/
public OIndexInternal rebuild(final OProgressListener iProgressListener) {
clear();
getDatabase().declareIntent(new OIntentMassiveInsert());
checkForOptimization();
acquireExclusiveLock();
try {
int documentIndexed = 0;
int documentNum = 0;
long documentTotal = 0;
for (String cluster : clustersToIndex)
documentTotal += getDatabase().countClusterElements(cluster);
if (iProgressListener != null)
iProgressListener.onBegin(this, documentTotal);
for (String clusterName : clustersToIndex)
for (ORecord<?> record : getDatabase().browseCluster(clusterName)) {
if (record instanceof ODocument) {
final ODocument doc = (ODocument) record;
final Object fieldValue = callback.getDocumentValueToIndex(doc);
if (fieldValue != null) {
put(fieldValue, doc);
++documentIndexed;
}
}
documentNum++;
if (iProgressListener != null)
iProgressListener.onProgress(this, documentNum, documentNum * 100f / documentTotal);
}
lazySave();
if (iProgressListener != null)
iProgressListener.onCompletition(this, true);
} catch (Exception e) {
if (iProgressListener != null)
iProgressListener.onCompletition(this, false);
clear();
throw new OIndexException("Error on rebuilding the index for clusters: " + clustersToIndex, e);
} finally {
getDatabase().declareIntent(null);
releaseExclusiveLock();
}
return this;
}
public boolean remove(final Object iKey, final OIdentifiable iValue) {
return remove(iKey);
}
public boolean remove(final Object key) {
checkForOptimization();
acquireExclusiveLock();
try {
return map.remove(key) != null;
} finally {
releaseExclusiveLock();
}
}
public int remove(final OIdentifiable iRecord) {
checkForOptimization();
acquireExclusiveLock();
try {
int tot = 0;
Set<OIdentifiable> rids;
for (Entry<Object, Set<OIdentifiable>> entries : map.entrySet()) {
rids = entries.getValue();
if (rids != null) {
if (rids.contains(iRecord)) {
remove(entries.getKey(), iRecord);
++tot;
}
}
}
return tot;
} finally {
releaseExclusiveLock();
}
}
public int count(final OIdentifiable iRecord) {
checkForOptimization();
acquireExclusiveLock();
try {
Set<OIdentifiable> rids;
int tot = 0;
for (Entry<Object, Set<OIdentifiable>> entries : map.entrySet()) {
rids = entries.getValue();
if (rids != null) {
if (rids.contains(iRecord)) {
++tot;
}
}
}
return tot;
} finally {
releaseExclusiveLock();
}
}
public OIndex clear() {
checkForOptimization();
acquireExclusiveLock();
try {
map.clear();
return this;
} finally {
releaseExclusiveLock();
}
}
public OIndexInternal delete() {
checkForOptimization();
acquireExclusiveLock();
try {
map.delete();
return this;
} finally {
releaseExclusiveLock();
}
}
public OIndexInternal lazySave() {
checkForOptimization();
acquireExclusiveLock();
try {
map.lazySave();
return this;
} finally {
releaseExclusiveLock();
}
}
public ORecordBytes getRecord() {
return map.getRecord();
}
public Iterator<Entry<Object, Set<OIdentifiable>>> iterator() {
checkForOptimization();
acquireExclusiveLock();
try {
return map.entrySet().iterator();
} finally {
releaseExclusiveLock();
}
}
public Iterable<Object> keys() {
checkForOptimization();
acquireExclusiveLock();
try {
return map.keySet();
} finally {
releaseExclusiveLock();
}
}
public long getSize() {
checkForOptimization();
acquireSharedLock();
try {
return map.size();
} finally {
releaseSharedLock();
}
}
public String getName() {
return name;
}
public String getType() {
return type;
}
@Override
public String toString() {
return name + " (" + (type != null ? type : "?") + ")" + (map != null ? " " + map : "");
}
public OIndexInternal getInternal() {
return this;
}
public OIndexCallback getCallback() {
return callback;
}
public void setCallback(final OIndexCallback callback) {
this.callback = callback;
}
public Set<String> getClusters() {
checkForOptimization();
acquireSharedLock();
try {
return Collections.unmodifiableSet(clustersToIndex);
} finally {
releaseSharedLock();
}
}
public OIndexMVRBTreeAbstract addCluster(final String iClusterName) {
checkForOptimization();
acquireExclusiveLock();
try {
clustersToIndex.add(iClusterName);
return this;
} finally {
releaseSharedLock();
}
}
public void checkEntry(final OIdentifiable iRecord, final Object iKey) {
}
public void unload() {
checkForOptimization();
acquireExclusiveLock();
try {
map.unload();
} finally {
releaseExclusiveLock();
}
}
public ODocument updateConfiguration() {
checkForOptimization();
acquireExclusiveLock();
try {
configuration.setStatus(ORecordElement.STATUS.UNMARSHALLING);
try {
configuration.field(OIndexInternal.CONFIG_TYPE, type);
configuration.field(OIndexInternal.CONFIG_NAME, name);
configuration.field(OIndexInternal.CONFIG_AUTOMATIC, automatic);
configuration.field(CONFIG_CLUSTERS, clustersToIndex, OType.EMBEDDEDSET);
configuration.field(CONFIG_MAP_RID, map.getRecord().getIdentity());
} finally {
configuration.setStatus(ORecordElement.STATUS.LOADED);
}
} finally {
releaseExclusiveLock();
}
return configuration;
}
@SuppressWarnings("unchecked")
public void commit(final ODocument iDocument) {
if (iDocument == null)
return;
checkForOptimization();
acquireExclusiveLock();
try {
final Boolean clearAll = (Boolean) iDocument.field("clear");
if (clearAll != null && clearAll)
clear();
final ODocument entries = iDocument.field("entries");
for (Entry<String, Object> entry : entries) {
final Object key = entry.getKey();
final List<ODocument> operations = (List<ODocument>) entry.getValue();
if (operations != null) {
for (ODocument op : operations) {
final int operation = (Integer) op.rawField("o");
final OIdentifiable value = op.field("v");
if (operation == OPERATION.PUT.ordinal())
put(key, value);
else if (operation == OPERATION.REMOVE.ordinal()) {
if (key.equals("*"))
remove(value);
else if (value == null)
remove(key);
else
remove(key, value);
}
}
}
}
} finally {
releaseExclusiveLock();
}
}
public ODocument getConfiguration() {
return configuration;
}
public boolean isAutomatic() {
return automatic;
}
protected void installHooks(final ODatabaseRecord iDatabase) {
OProfiler.getInstance().registerHookValue("index." + name + ".items", new OProfilerHookValue() {
public Object getValue() {
acquireSharedLock();
try {
return map != null ? map.size() : "-";
} finally {
releaseSharedLock();
}
}
});
OProfiler.getInstance().registerHookValue("index." + name + ".entryPointSize", new OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getEntryPointSize() : "-";
}
});
OProfiler.getInstance().registerHookValue("index." + name + ".maxUpdateBeforeSave", new OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getMaxUpdatesBeforeSave() : "-";
}
});
OProfiler.getInstance().registerHookValue("index." + name + ".optimizationThreshold", new OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getOptimizeThreshold() : "-";
}
});
Orient.instance().getMemoryWatchDog().addListener(watchDog);
iDatabase.registerListener(this);
}
protected void uninstallHooks(final ODatabaseRecord iDatabase) {
OProfiler.getInstance().unregisterHookValue("index." + name + ".items");
OProfiler.getInstance().unregisterHookValue("index." + name + ".entryPointSize");
OProfiler.getInstance().unregisterHookValue("index." + name + ".maxUpdateBeforeSave");
OProfiler.getInstance().unregisterHookValue("index." + name + ".optimizationThreshold");
Orient.instance().getMemoryWatchDog().removeListener(watchDog);
iDatabase.unregisterListener(this);
}
public void onCreate(ODatabase iDatabase) {
}
public void onDelete(ODatabase iDatabase) {
}
public void onOpen(ODatabase iDatabase) {
}
public void onBeforeTxBegin(ODatabase iDatabase) {
}
public void onBeforeTxRollback(final ODatabase iDatabase) {
}
public void onAfterTxRollback(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.unload();
} finally {
releaseExclusiveLock();
}
}
public void onBeforeTxCommit(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.commitChanges();
} finally {
releaseExclusiveLock();
}
}
public void onAfterTxCommit(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.onAfterTxCommit();
} finally {
releaseExclusiveLock();
}
}
public void onClose(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.commitChanges();
Orient.instance().getMemoryWatchDog().removeListener(watchDog);
} finally {
releaseExclusiveLock();
}
}
protected void optimize(final boolean iHardMode) {
if (map == null)
return;
acquireExclusiveLock();
try {
OLogManager.instance().warn(this,
"Forcing " + (iHardMode ? "hard" : "soft") + " optimization of Index %s (%d items). Found %d entries in memory...", name,
map.size(), map.getInMemoryEntries());
final int freed = map.optimize(true);
OLogManager.instance().warn(this, "Completed! Freed %d entries and now %d entries reside in memory", freed,
map.getInMemoryEntries());
} finally {
releaseExclusiveLock();
}
}
protected void checkForOptimization() {
if (optimization > 0) {
final boolean hardMode = optimization == 2;
optimize(hardMode);
optimization = 0;
}
}
protected ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
}
|
core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
|
/*
* Copyright 1999-2010 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import com.orientechnologies.common.collection.ONavigableMap;
import com.orientechnologies.common.concur.resource.OSharedResourceAbstract;
import com.orientechnologies.common.listener.OProgressListener;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.OMemoryWatchDog.Listener;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.annotation.ODocumentInstance;
import com.orientechnologies.orient.core.db.ODatabase;
import com.orientechnologies.orient.core.db.ODatabaseListener;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.db.record.ORecordLazySet;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.intent.OIntentMassiveInsert;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerListRID;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializerLiteral;
import com.orientechnologies.orient.core.tx.OTransactionIndexChanges.OPERATION;
import com.orientechnologies.orient.core.type.tree.OMVRBTreeDatabaseLazySave;
/**
* Handles indexing when records change.
*
* @author Luca Garulli
*
*/
public abstract class OIndexMVRBTreeAbstract extends OSharedResourceAbstract implements OIndexInternal, ODatabaseListener {
protected static final String CONFIG_MAP_RID = "mapRid";
protected static final String CONFIG_CLUSTERS = "clusters";
protected String name;
protected String type;
protected OMVRBTreeDatabaseLazySave<Object, Set<OIdentifiable>> map;
protected Set<String> clustersToIndex = new LinkedHashSet<String>();
protected OIndexCallback callback;
protected boolean automatic;
@ODocumentInstance
protected ODocument configuration;
private Listener watchDog;
private volatile int optimization = 0;
public OIndexMVRBTreeAbstract(final String iType) {
type = iType;
watchDog = new Listener() {
public void memoryUsageLow(final TYPE iType, final long usedMemory, final long maxMemory) {
if (iType == TYPE.JVM)
optimization = 1;
}
public void memoryUsageCritical(final TYPE iType, final long usedMemory, final long maxMemory) {
if (iType == TYPE.JVM)
optimization = 2;
}
};
}
public void flush() {
lazySave();
}
/**
* Creates the index.
*
* @param iDatabase
* Current Database instance
* @param iProperty
* Owner property
* @param iClusterIndexName
* Cluster name where to place the TreeMap
* @param iProgressListener
* Listener to get called on progress
*/
public OIndexInternal create(final String iName, final ODatabaseRecord iDatabase, final String iClusterIndexName,
final int[] iClusterIdsToIndex, final OProgressListener iProgressListener, final boolean iAutomatic) {
acquireExclusiveLock();
try {
name = iName;
configuration = new ODocument(iDatabase);
automatic = iAutomatic;
if (iClusterIdsToIndex != null)
for (int id : iClusterIdsToIndex)
clustersToIndex.add(iDatabase.getClusterNameById(id));
map = new OMVRBTreeDatabaseLazySave<Object, Set<OIdentifiable>>(iDatabase, iClusterIndexName,
OStreamSerializerLiteral.INSTANCE, OStreamSerializerListRID.INSTANCE);
installHooks(iDatabase);
rebuild(iProgressListener);
updateConfiguration();
return this;
} finally {
releaseExclusiveLock();
}
}
public OIndexInternal loadFromConfiguration(final ODocument iConfig) {
acquireExclusiveLock();
try {
final ORID rid = (ORID) iConfig.field(CONFIG_MAP_RID, ORID.class);
if (rid == null)
return null;
configuration = iConfig;
name = configuration.field(OIndexInternal.CONFIG_NAME);
automatic = (Boolean) (configuration.field(OIndexInternal.CONFIG_AUTOMATIC) != null ? configuration
.field(OIndexInternal.CONFIG_AUTOMATIC) : true);
clustersToIndex.clear();
final Collection<? extends String> clusters = configuration.field(CONFIG_CLUSTERS);
if (clusters != null)
clustersToIndex.addAll(clusters);
map = new OMVRBTreeDatabaseLazySave<Object, Set<OIdentifiable>>(getDatabase(), rid);
map.load();
installHooks(iConfig.getDatabase());
return this;
} finally {
releaseExclusiveLock();
}
}
public Set<OIdentifiable> get(final Object iKey) {
checkForOptimization();
acquireExclusiveLock();
try {
final ORecordLazySet values = (ORecordLazySet) map.get(iKey);
if (values != null)
values.setDatabase(ODatabaseRecordThreadLocal.INSTANCE.get());
if (values == null)
return ORecordLazySet.EMPTY_SET;
return values;
} finally {
releaseExclusiveLock();
}
}
public boolean contains(final Object iKey) {
checkForOptimization();
acquireExclusiveLock();
try {
return map.containsKey(iKey);
} finally {
releaseExclusiveLock();
}
}
/**
* Returns a set of records with key between the range passed as parameter. Range bounds are included.
*
* @param iRangeFrom
* Starting range
* @param iRangeTo
* Ending range
* @see #getBetween(Object, Object, boolean)
* @return
*/
public Set<OIdentifiable> getBetween(final Object iRangeFrom, final Object iRangeTo) {
return getBetween(iRangeFrom, iRangeTo, true);
}
/**
* Returns a set of records with key between the range passed as parameter.
*
* @param iRangeFrom
* Starting range
* @param iRangeTo
* Ending range
* @param iInclusive
* Include from/to bounds
* @see #getBetween(Object, Object)
* @return
*/
public Set<OIdentifiable> getBetween(final Object iRangeFrom, final Object iRangeTo, final boolean iInclusive) {
if (iRangeFrom.getClass() != iRangeTo.getClass())
throw new IllegalArgumentException("Range from-to parameters are of different types");
checkForOptimization();
acquireExclusiveLock();
try {
final ONavigableMap<Object, Set<OIdentifiable>> subSet = map.subMap(iRangeFrom, iInclusive, iRangeTo, iInclusive);
if (subSet == null)
return ORecordLazySet.EMPTY_SET;
final Set<OIdentifiable> result = new ORecordLazySet(configuration.getDatabase());
for (Set<OIdentifiable> v : subSet.values()) {
result.addAll(v);
}
return result;
} finally {
releaseExclusiveLock();
}
}
public ORID getIdentity() {
return map.getRecord().getIdentity();
}
public OIndexInternal rebuild() {
return rebuild(null);
}
/**
* Populates the index with all the existent records. Uses the massive insert intent to speed up and keep the consumed memory low.
*/
public OIndexInternal rebuild(final OProgressListener iProgressListener) {
clear();
getDatabase().declareIntent(new OIntentMassiveInsert());
checkForOptimization();
acquireExclusiveLock();
try {
int documentIndexed = 0;
int documentNum = 0;
long documentTotal = 0;
for (String cluster : clustersToIndex)
documentTotal += getDatabase().countClusterElements(cluster);
if (iProgressListener != null)
iProgressListener.onBegin(this, documentTotal);
for (String clusterName : clustersToIndex)
for (ORecord<?> record : getDatabase().browseCluster(clusterName)) {
if (record instanceof ODocument) {
final ODocument doc = (ODocument) record;
final Object fieldValue = callback.getDocumentValueToIndex(doc);
if (fieldValue != null) {
put(fieldValue, doc);
++documentIndexed;
}
}
documentNum++;
if (iProgressListener != null)
iProgressListener.onProgress(this, documentNum, documentNum * 100f / documentTotal);
}
lazySave();
if (iProgressListener != null)
iProgressListener.onCompletition(this, true);
} catch (Exception e) {
if (iProgressListener != null)
iProgressListener.onCompletition(this, false);
clear();
throw new OIndexException("Error on rebuilding the index for clusters: " + clustersToIndex, e);
} finally {
getDatabase().declareIntent(null);
releaseExclusiveLock();
}
return this;
}
public boolean remove(final Object iKey, final OIdentifiable iValue) {
return remove(iKey);
}
public boolean remove(final Object key) {
checkForOptimization();
acquireExclusiveLock();
try {
return map.remove(key) != null;
} finally {
releaseExclusiveLock();
}
}
public int remove(final OIdentifiable iRecord) {
checkForOptimization();
acquireExclusiveLock();
try {
int tot = 0;
Set<OIdentifiable> rids;
for (Entry<Object, Set<OIdentifiable>> entries : map.entrySet()) {
rids = entries.getValue();
if (rids != null) {
if (rids.contains(iRecord)) {
remove(entries.getKey(), iRecord);
++tot;
}
}
}
return tot;
} finally {
releaseExclusiveLock();
}
}
public int count(final OIdentifiable iRecord) {
checkForOptimization();
acquireExclusiveLock();
try {
Set<OIdentifiable> rids;
int tot = 0;
for (Entry<Object, Set<OIdentifiable>> entries : map.entrySet()) {
rids = entries.getValue();
if (rids != null) {
if (rids.contains(iRecord)) {
++tot;
}
}
}
return tot;
} finally {
releaseExclusiveLock();
}
}
public OIndex clear() {
checkForOptimization();
acquireExclusiveLock();
try {
map.clear();
return this;
} finally {
releaseExclusiveLock();
}
}
public OIndexInternal delete() {
checkForOptimization();
acquireExclusiveLock();
try {
map.delete();
return this;
} finally {
releaseExclusiveLock();
}
}
public OIndexInternal lazySave() {
checkForOptimization();
acquireExclusiveLock();
try {
map.lazySave();
return this;
} finally {
releaseExclusiveLock();
}
}
public ORecordBytes getRecord() {
return map.getRecord();
}
public Iterator<Entry<Object, Set<OIdentifiable>>> iterator() {
checkForOptimization();
acquireExclusiveLock();
try {
return map.entrySet().iterator();
} finally {
releaseExclusiveLock();
}
}
public Iterable<Object> keys() {
checkForOptimization();
acquireExclusiveLock();
try {
return map.keySet();
} finally {
releaseExclusiveLock();
}
}
public long getSize() {
checkForOptimization();
acquireSharedLock();
try {
return map.size();
} finally {
releaseSharedLock();
}
}
public String getName() {
return name;
}
public String getType() {
return type;
}
@Override
public String toString() {
return name + " (" + (type != null ? type : "?") + ")" + (map != null ? " " + map : "");
}
public OIndexInternal getInternal() {
return this;
}
public OIndexCallback getCallback() {
return callback;
}
public void setCallback(final OIndexCallback callback) {
this.callback = callback;
}
public Set<String> getClusters() {
checkForOptimization();
acquireSharedLock();
try {
return Collections.unmodifiableSet(clustersToIndex);
} finally {
releaseSharedLock();
}
}
public OIndexMVRBTreeAbstract addCluster(final String iClusterName) {
checkForOptimization();
acquireExclusiveLock();
try {
clustersToIndex.add(iClusterName);
return this;
} finally {
releaseSharedLock();
}
}
public void checkEntry(final OIdentifiable iRecord, final Object iKey) {
}
public void unload() {
checkForOptimization();
acquireExclusiveLock();
try {
map.unload();
} finally {
releaseExclusiveLock();
}
}
public ODocument updateConfiguration() {
checkForOptimization();
acquireExclusiveLock();
try {
configuration.setStatus(ORecordElement.STATUS.UNMARSHALLING);
try {
configuration.field(OIndexInternal.CONFIG_TYPE, type);
configuration.field(OIndexInternal.CONFIG_NAME, name);
configuration.field(OIndexInternal.CONFIG_AUTOMATIC, automatic);
configuration.field(CONFIG_CLUSTERS, clustersToIndex, OType.EMBEDDEDSET);
configuration.field(CONFIG_MAP_RID, map.getRecord().getIdentity());
} finally {
configuration.setStatus(ORecordElement.STATUS.LOADED);
}
} finally {
releaseExclusiveLock();
}
return configuration;
}
@SuppressWarnings("unchecked")
public void commit(final ODocument iDocument) {
if (iDocument == null)
return;
checkForOptimization();
acquireExclusiveLock();
try {
final Boolean clearAll = (Boolean) iDocument.field("clear");
if (clearAll != null && clearAll)
clear();
final ODocument entries = iDocument.field("entries");
for (Entry<String, Object> entry : entries) {
final Object key = entry.getKey();
final List<ODocument> operations = (List<ODocument>) entry.getValue();
if (operations != null) {
for (ODocument op : operations) {
final int operation = (Integer) op.rawField("o");
final OIdentifiable value = op.field("v");
if (operation == OPERATION.PUT.ordinal())
put(key, value);
else if (operation == OPERATION.REMOVE.ordinal()) {
if (key.equals("*"))
remove(value);
else if (value == null)
remove(key);
else
remove(key, value);
}
}
}
}
} finally {
releaseExclusiveLock();
}
}
public ODocument getConfiguration() {
return configuration;
}
public boolean isAutomatic() {
return automatic;
}
protected void installHooks(final ODatabaseRecord iDatabase) {
OProfiler.getInstance().registerHookValue("index." + name + ".items", new OProfilerHookValue() {
public Object getValue() {
acquireSharedLock();
try {
return map != null ? map.size() : "-";
} finally {
releaseSharedLock();
}
}
});
OProfiler.getInstance().registerHookValue("index." + name + ".entryPointSize", new OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getEntryPointSize() : "-";
}
});
OProfiler.getInstance().registerHookValue("index." + name + ".maxUpdateBeforeSave", new OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getMaxUpdatesBeforeSave() : "-";
}
});
OProfiler.getInstance().registerHookValue("index." + name + ".optimizationThreshold", new OProfilerHookValue() {
public Object getValue() {
return map != null ? map.getOptimizeThreshold() : "-";
}
});
Orient.instance().getMemoryWatchDog().addListener(watchDog);
iDatabase.registerListener(this);
}
protected void uninstallHooks(final ODatabaseRecord iDatabase) {
OProfiler.getInstance().unregisterHookValue("index." + name + ".items");
OProfiler.getInstance().unregisterHookValue("index." + name + ".entryPointSize");
OProfiler.getInstance().unregisterHookValue("index." + name + ".maxUpdateBeforeSave");
OProfiler.getInstance().unregisterHookValue("index." + name + ".optimizationThreshold");
Orient.instance().getMemoryWatchDog().removeListener(watchDog);
iDatabase.unregisterListener(this);
}
public void onCreate(ODatabase iDatabase) {
}
public void onDelete(ODatabase iDatabase) {
}
public void onOpen(ODatabase iDatabase) {
}
public void onBeforeTxBegin(ODatabase iDatabase) {
}
public void onBeforeTxRollback(final ODatabase iDatabase) {
}
public void onAfterTxRollback(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.unload();
} finally {
releaseExclusiveLock();
}
}
public void onBeforeTxCommit(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.commitChanges();
} finally {
releaseExclusiveLock();
}
}
public void onAfterTxCommit(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.onAfterTxCommit();
} finally {
releaseExclusiveLock();
}
}
public void onClose(final ODatabase iDatabase) {
checkForOptimization();
acquireExclusiveLock();
try {
map.commitChanges();
Orient.instance().getMemoryWatchDog().removeListener(watchDog);
} finally {
releaseExclusiveLock();
}
}
protected void optimize(final boolean iHardMode) {
if (map == null)
return;
acquireExclusiveLock();
try {
OLogManager.instance().warn(this,
"Forcing " + (iHardMode ? "hard" : "soft") + " optimization of Index %s (%d items). Found %d entries in memory...", name,
map.size(), map.getInMemoryEntries());
final int freed = map.optimize(true);
OLogManager.instance().warn(this, "Completed! Freed %d entries and now %d entries reside in memory", freed,
map.getInMemoryEntries());
} finally {
releaseExclusiveLock();
}
}
protected void checkForOptimization() {
if (optimization > 0) {
final boolean hardMode = optimization == 2;
optimization = 0;
optimize(hardMode);
}
}
protected ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
}
|
Index: moved optimization reset just after optimization is completed
|
core/src/main/java/com/orientechnologies/orient/core/index/OIndexMVRBTreeAbstract.java
|
Index: moved optimization reset just after optimization is completed
|
|
Java
|
apache-2.0
|
c5836479cf04a23bc24f097ea606153b572eb76b
| 0
|
kuujo/onos,kuujo/onos,gkatsikas/onos,kuujo/onos,oplinkoms/onos,kuujo/onos,opennetworkinglab/onos,opennetworkinglab/onos,kuujo/onos,oplinkoms/onos,gkatsikas/onos,oplinkoms/onos,gkatsikas/onos,oplinkoms/onos,gkatsikas/onos,opennetworkinglab/onos,opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,kuujo/onos,oplinkoms/onos,gkatsikas/onos,gkatsikas/onos,kuujo/onos,oplinkoms/onos,opennetworkinglab/onos
|
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.packet;
import java.nio.ByteBuffer;
import static org.onlab.packet.PacketUtils.checkInput;
import static com.google.common.base.MoreObjects.toStringHelper;
/**
* ICMP packet class for echo purpose.
*/
public class ICMPEcho extends BasePacket {
private short identifier;
private short sequenceNum;
public static final short ICMP_ECHO_HEADER_LENGTH = 4;
/**
* Sets the identifier.
*
* @param identifier identifier
* @return this
*/
public ICMPEcho setIdentifier(final short identifier) {
this.identifier = identifier;
return this;
}
/**
* Gets the identifier.
*
* @return identifier
*/
public short getIdentifier() {
return this.identifier;
}
/**
* Sets the sequencer number.
*
* @param sequenceNum sequence number
* @return this
*/
public ICMPEcho setSequenceNum(final short sequenceNum) {
this.sequenceNum = sequenceNum;
return this;
}
/**
* Gets the sequence number.
*
* @return sequence number
*/
public short getSequenceNum() {
return this.sequenceNum;
}
/**
* Serializes the packet. Will compute and set the following fields if they
* are set to specific values at the time serialize is called: -checksum : 0
* -length : 0
*/
@Override
public byte[] serialize() {
int length = ICMP_ECHO_HEADER_LENGTH;
byte[] payloadData = null;
if (this.payload != null) {
this.payload.setParent(this);
payloadData = this.payload.serialize();
length += payloadData.length;
}
final byte[] data = new byte[length];
final ByteBuffer bb = ByteBuffer.wrap(data);
bb.putShort(this.identifier);
bb.putShort(this.sequenceNum);
if (payloadData != null) {
bb.put(payloadData);
}
return data;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 5807;
int result = super.hashCode();
result = prime * result + this.identifier;
result = prime * result + this.sequenceNum;
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (!(obj instanceof ICMPEcho)) {
return false;
}
final ICMPEcho other = (ICMPEcho) obj;
if (this.identifier != other.identifier) {
return false;
}
if (this.sequenceNum != other.sequenceNum) {
return false;
}
return true;
}
/**
* Deserializer function for ICMPEcho packets.
*
* @return deserializer function
*/
public static Deserializer<ICMPEcho> deserializer() {
return (data, offset, length) -> {
checkInput(data, offset, length, ICMP_ECHO_HEADER_LENGTH);
ICMPEcho icmp = new ICMPEcho();
final ByteBuffer bb = ByteBuffer.wrap(data, offset, length);
icmp.identifier = bb.getShort();
icmp.sequenceNum = bb.getShort();
return icmp;
};
}
@Override
public String toString() {
return toStringHelper(getClass())
.add("ICMP echo identifier", Short.toString(identifier))
.add("ICMP echo sequenceNumber", Short.toString(sequenceNum))
.toString();
}
}
|
utils/misc/src/main/java/org/onlab/packet/ICMPEcho.java
|
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.packet;
import java.nio.ByteBuffer;
import static org.onlab.packet.PacketUtils.checkInput;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* ICMP packet class for echo purpose.
*/
public class ICMPEcho extends BasePacket {
private short identifier;
private short sequenceNum;
public static final short ICMP_ECHO_HEADER_LENGTH = 4;
/**
* Sets the identifier.
*
* @param identifier identifier
* @return this
*/
public ICMPEcho setIdentifier(final short identifier) {
this.identifier = identifier;
return this;
}
/**
* Gets the identifier.
*
* @return identifier
*/
public short getIdentifier() {
return this.identifier;
}
/**
* Sets the sequencer number.
*
* @param sequenceNum sequence number
* @return this
*/
public ICMPEcho setSequenceNum(final short sequenceNum) {
this.sequenceNum = sequenceNum;
return this;
}
/**
* Gets the sequence number.
*
* @return sequence number
*/
public short getSequenceNum() {
return this.sequenceNum;
}
/**
* Serializes the packet. Will compute and set the following fields if they
* are set to specific values at the time serialize is called: -checksum : 0
* -length : 0
*/
@Override
public byte[] serialize() {
checkNotNull(this.identifier);
checkNotNull(this.sequenceNum);
int length = ICMP_ECHO_HEADER_LENGTH;
byte[] payloadData = null;
if (this.payload != null) {
this.payload.setParent(this);
payloadData = this.payload.serialize();
length += payloadData.length;
}
final byte[] data = new byte[length];
final ByteBuffer bb = ByteBuffer.wrap(data);
bb.putShort(this.identifier);
bb.putShort(this.sequenceNum);
if (payloadData != null) {
bb.put(payloadData);
}
return data;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 5807;
int result = super.hashCode();
result = prime * result + this.identifier;
result = prime * result + this.sequenceNum;
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (!(obj instanceof ICMPEcho)) {
return false;
}
final ICMPEcho other = (ICMPEcho) obj;
if (this.identifier != other.identifier) {
return false;
}
if (this.sequenceNum != other.sequenceNum) {
return false;
}
return true;
}
/**
* Deserializer function for ICMPEcho packets.
*
* @return deserializer function
*/
public static Deserializer<ICMPEcho> deserializer() {
return (data, offset, length) -> {
checkInput(data, offset, length, ICMP_ECHO_HEADER_LENGTH);
ICMPEcho icmp = new ICMPEcho();
final ByteBuffer bb = ByteBuffer.wrap(data, offset, length);
icmp.identifier = bb.getShort();
icmp.sequenceNum = bb.getShort();
return icmp;
};
}
@Override
public String toString() {
return toStringHelper(getClass())
.add("ICMP echo identifier", Short.toString(identifier))
.add("ICMP echo sequenceNumber", Short.toString(sequenceNum))
.toString();
}
}
|
Fixing errorprone's warning.
Change-Id: I8bb35ca74df50c7c9d689272c1f7b24274303540
|
utils/misc/src/main/java/org/onlab/packet/ICMPEcho.java
|
Fixing errorprone's warning.
|
|
Java
|
apache-2.0
|
2a7b236c1d0642b6885e609b5e3267e4ae2cc662
| 0
|
lynring24/ITimeU
|
package com.itti7.itimeu;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.EditText;
import android.widget.SeekBar;
public class SettingActivity extends AppCompatActivity {
private SeekBar worksb, breaksb, longBreaksb, sessionNumsb;
private int workTime = 25;
private int breakTime = 5;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_setting);
worksb = (SeekBar) findViewById(R.id.work_seek);
breaksb = (SeekBar) findViewById(R.id.break_seek);
worksb.setProgress(workTime);
breaksb.setProgress(breakTime);
worksb.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
printSelected(seekBar, progress);
}
public void onStartTrackingTouch(SeekBar seekBar) {}
public void onStopTrackingTouch(SeekBar seekBar) {
doAfterTrack(seekBar);
}
});
breaksb.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
printSelected(seekBar, progress);
}
public void onStartTrackingTouch(SeekBar seekBar) {}
public void onStopTrackingTouch(SeekBar seekBar) {
doAfterTrack(seekBar);
}
});
}
public void printSelected(SeekBar bar, int value) {
EditText et;
switch (bar.getId()) {
case R.id.work_seek:
et = (EditText) findViewById(R.id.work_time);
et.setText(String.valueOf(value));
break;
case R.id.break_seek:
et = (EditText) findViewById(R.id.break_time);
et.setText(String.valueOf(value));
break;
}
}
public void doAfterTrack(SeekBar bar) {
EditText et;
switch (bar.getId()) {
case R.id.work_seek:
et = (EditText) findViewById(R.id.work_time);
et.setText(et.getText());
break;
case R.id.break_seek:
et = (EditText) findViewById(R.id.break_time);
et.setText(et.getText());
break;
}
}
}
|
ITimeU/app/src/main/java/com/itti7/itimeu/SettingActivity.java
|
package com.itti7.itimeu;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.EditText;
import android.widget.SeekBar;
public class SettingActivity extends AppCompatActivity {
private SeekBar worksb, breaksb, longBreaksb, sessionNumsb;
private int workTime = 25;
private int breakTime = 5;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_setting);
worksb = (SeekBar) findViewById(R.id.work_seek);
breaksb = (SeekBar) findViewById(R.id.break_seek);
worksb.setProgress(workTime);
breaksb.setProgress(breakTime);
worksb.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
printSelected(seekBar, progress);
}
public void onStartTrackingTouch(SeekBar seekBar) {}
public void onStopTrackingTouch(SeekBar seekBar) {
doAfterTrack(seekBar);
}
});
}
public void printSelected(SeekBar bar, int value) {
switch (bar.getId()) {
case R.id.work_seek:
EditText et = (EditText) findViewById(R.id.work_time);
et.setText(String.valueOf(value));
}
}
public void doAfterTrack(SeekBar bar) {
switch (bar.getId()) {
case R.id.work_seek:
EditText et = (EditText) findViewById(R.id.work_time);
et.setText(et.getText());
}
}
}
|
브레이크 시크바 자바 연동
|
ITimeU/app/src/main/java/com/itti7/itimeu/SettingActivity.java
|
브레이크 시크바 자바 연동
|
|
Java
|
apache-2.0
|
739e1a8a9b0791810ff0bff5ee6b5f026f11ea6a
| 0
|
TNG/JGiven,TNG/JGiven,TNG/JGiven,TNG/JGiven
|
package com.tngtech.jgiven.testng;
import static com.tngtech.jgiven.testng.ScenarioTestListener.SCENARIO_ATTRIBUTE;
import com.beust.jcommander.internal.Lists;
import com.tngtech.jgiven.impl.Config;
import com.tngtech.jgiven.impl.ScenarioBase;
import com.tngtech.jgiven.report.model.ReportModel;
import com.tngtech.jgiven.testframework.TestExecutionResult;
import com.tngtech.jgiven.testframework.TestExecutor;
import java.util.List;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
import org.testng.TestNG;
public class TestNgExecutor extends TestExecutor {
public static String methodName;
@Override
public TestExecutionResult execute( Class<?> testClass, String testMethod ) {
TestNgExecutionResult result = new TestNgExecutionResult();
ScenarioTestListenerAdapter testListenerAdapter = new ScenarioTestListenerAdapter();
TestNG testng = new TestNG();
testng.setTestClasses( new Class<?>[] { testClass } );
if( testMethod != null ) {
methodName = testMethod;
testng.addMethodSelector( MethodSelector.class.getName(), 175 );
}
testng.addListener( testListenerAdapter );
Config.config().setReportEnabled( false );
testng.run();
Config.config().setReportEnabled( true );
result.reportModel = testListenerAdapter.reportModel;
result.testResults = testListenerAdapter.testResults;
return result;
}
@Override
public TestExecutionResult execute( Class<?> testClass ) {
return execute( testClass, null );
}
static class ScenarioTestListenerAdapter extends TestListenerAdapter {
ReportModel reportModel;
List<ITestResult> testResults = Lists.newArrayList();
@Override
public void onTestSuccess( ITestResult tr ) {
setTestResult( tr );
}
@Override
public void onTestFailure( ITestResult tr ) {
setTestResult( tr );
}
@Override
public void onTestSkipped( ITestResult tr ) {
setTestResult( tr );
}
private void setTestResult( ITestResult tr ) {
testResults.add( tr );
reportModel = ((ScenarioBase)tr.getAttribute (SCENARIO_ATTRIBUTE)).getModel();
}
}
}
|
jgiven-tests/src/test/java/com/tngtech/jgiven/testng/TestNgExecutor.java
|
package com.tngtech.jgiven.testng;
import com.beust.jcommander.internal.Lists;
import com.tngtech.jgiven.impl.Config;
import com.tngtech.jgiven.impl.ScenarioBase;
import com.tngtech.jgiven.report.model.ReportModel;
import com.tngtech.jgiven.testframework.TestExecutionResult;
import com.tngtech.jgiven.testframework.TestExecutor;
import org.testng.ITestResult;
import org.testng.TestListenerAdapter;
import org.testng.TestNG;
import java.util.List;
import static com.tngtech.jgiven.testng.ScenarioTestListener.SCENARIO_ATTRIBUTE;
public class TestNgExecutor extends TestExecutor {
public static String methodName;
@Override
public TestExecutionResult execute( Class<?> testClass, String testMethod ) {
TestNgExecutionResult result = new TestNgExecutionResult();
ScenarioTestListenerAdapter testListenerAdapter = new ScenarioTestListenerAdapter();
TestNG testng = new TestNG();
testng.setTestClasses( new Class<?>[] { testClass } );
if( testMethod != null ) {
methodName = testMethod;
testng.addMethodSelector( MethodSelector.class.getName(), 10 );
}
testng.addListener( testListenerAdapter );
Config.config().setReportEnabled( false );
testng.run();
Config.config().setReportEnabled( true );
result.reportModel = testListenerAdapter.reportModel;
result.testResults = testListenerAdapter.testResults;
return result;
}
@Override
public TestExecutionResult execute( Class<?> testClass ) {
return execute( testClass, null );
}
static class ScenarioTestListenerAdapter extends TestListenerAdapter {
ReportModel reportModel;
List<ITestResult> testResults = Lists.newArrayList();
@Override
public void onTestSuccess( ITestResult tr ) {
setTestResult( tr );
}
@Override
public void onTestFailure( ITestResult tr ) {
setTestResult( tr );
}
@Override
public void onTestSkipped( ITestResult tr ) {
setTestResult( tr );
}
private void setTestResult( ITestResult tr ) {
testResults.add( tr );
reportModel = ((ScenarioBase)tr.getAttribute (SCENARIO_ATTRIBUTE)).getModel();
}
}
}
|
increase priority of method selector in testng framework test
TestNG replaced the list of method selectors in their method inclusion method by a set.
Also TestNG adds a default XMLMethodSelector with priority 10 to the method selector list. This default XMLMethodSelector is always evaluated first.
Contrary to the list addition method the Set addition method uses a comparator that is attached to the selection method. This comparator only compares the priorities of the given Keys to check object equality. The employed TreeSet, or the backing treeMap, respectively exchange the value if two keys compare to equal. However, in the case of TreeSet that value is a dummy, so in effect the addition of a new method to a set with wherein the an object with the same priority already exists gets silently ignored.
The issue has been reported under https://github.com/cbeust/testng/issues/2595
Signed-off-by: l-1sqared <927badf1468b689b17391b1b1cb5895e9b797b96@users.noreply.github.com>
|
jgiven-tests/src/test/java/com/tngtech/jgiven/testng/TestNgExecutor.java
|
increase priority of method selector in testng framework test
|
|
Java
|
bsd-3-clause
|
285a8b8bee7d61631895e84678bb013b734818f7
| 0
|
bwaldvogel/mongo-java-server,bwaldvogel/mongo-java-server,bwaldvogel/mongo-java-server
|
package de.bwaldvogel.mongo.backend;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Filters.exists;
import static com.mongodb.client.model.Filters.gt;
import static com.mongodb.client.model.Filters.in;
import static com.mongodb.client.model.Filters.lt;
import static com.mongodb.client.model.Filters.ne;
import static com.mongodb.client.model.Filters.nor;
import static com.mongodb.client.model.Filters.or;
import static com.mongodb.client.model.Updates.addEachToSet;
import static com.mongodb.client.model.Updates.pull;
import static com.mongodb.client.model.Updates.pullByFilter;
import static com.mongodb.client.model.Updates.set;
import static de.bwaldvogel.mongo.backend.TestUtils.getCollectionStatistics;
import static de.bwaldvogel.mongo.backend.TestUtils.json;
import static de.bwaldvogel.mongo.backend.TestUtils.toArray;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import java.net.InetSocketAddress;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.bson.BsonObjectId;
import org.bson.BsonTimestamp;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.DBRef;
import com.mongodb.MongoCommandException;
import com.mongodb.MongoException;
import com.mongodb.MongoNamespace;
import com.mongodb.MongoQueryException;
import com.mongodb.MongoWriteException;
import com.mongodb.ServerAddress;
import com.mongodb.WriteConcern;
import com.mongodb.async.SingleResultCallback;
import com.mongodb.async.client.MongoClients;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.CountOptions;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.EstimatedDocumentCountOptions;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.RenameCollectionOptions;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.client.model.ReturnDocument;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.Updates;
import com.mongodb.client.model.WriteModel;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import de.bwaldvogel.mongo.MongoBackend;
import de.bwaldvogel.mongo.MongoServer;
public abstract class AbstractBackendTest {
private static final Logger log = LoggerFactory.getLogger(AbstractBackendTest.class);
protected static final String TEST_DATABASE_NAME = "testdb";
protected static final String OTHER_TEST_DATABASE_NAME = "bar";
private MongoServer mongoServer;
protected com.mongodb.MongoClient syncClient;
private com.mongodb.async.client.MongoClient asyncClient;
protected MongoDatabase db;
protected MongoCollection<Document> collection;
private com.mongodb.async.client.MongoCollection<Document> asyncCollection;
private Document runCommand(String commandName) {
return runCommand(new Document(commandName, Integer.valueOf(1)));
}
private Document runCommand(Document command) {
return getAdminDb().runCommand(command);
}
protected MongoCollection<Document> getCollection(String collectionName) {
return db.getCollection(collectionName);
}
protected MongoDatabase getAdminDb() {
return syncClient.getDatabase("admin");
}
protected abstract MongoBackend createBackend() throws Exception;
@Before
public void setUp() throws Exception {
spinUpServer();
}
@After
public void tearDown() {
shutdownServer();
}
protected void spinUpServer() throws Exception {
MongoBackend backend = createBackend();
mongoServer = new MongoServer(backend);
InetSocketAddress serverAddress = mongoServer.bind();
syncClient = new com.mongodb.MongoClient(new ServerAddress(serverAddress));
asyncClient = MongoClients.create("mongodb://" + serverAddress.getHostName() + ":" + serverAddress.getPort());
db = syncClient.getDatabase(TEST_DATABASE_NAME);
collection = db.getCollection("testcoll");
MongoNamespace namespace = collection.getNamespace();
com.mongodb.async.client.MongoDatabase asyncDb = asyncClient.getDatabase(namespace.getDatabaseName());
asyncCollection = asyncDb.getCollection(namespace.getCollectionName());
}
protected void shutdownServer() {
syncClient.close();
asyncClient.close();
mongoServer.shutdownNow();
}
@Test
public void testSimpleInsert() throws Exception {
collection.insertOne(json("_id: 1"));
}
@Test
public void testSimpleInsertDelete() throws Exception {
collection.insertOne(json("_id: 1"));
collection.deleteOne(json("_id: 1"));
}
@Test
public void testCreateCollection() throws Exception {
String newCollectionName = "some-collection";
assertThat(toArray(db.listCollectionNames())).doesNotContain(newCollectionName);
db.createCollection(newCollectionName, new CreateCollectionOptions());
assertThat(toArray(db.listCollectionNames())).contains(newCollectionName);
}
@Test
public void testCreateCollectionAlreadyExists() throws Exception {
db.createCollection("some-collection", new CreateCollectionOptions());
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> db.createCollection("some-collection", new CreateCollectionOptions()))
.withMessageContaining("Command failed with error 48: 'collection already exists'");
}
@Test
public void testUnsupportedModifier() throws Exception {
collection.insertOne(json("{}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("{}"), json("$foo: {}")))
.withMessageContaining("Command failed with error 10147: 'Invalid modifier specified: $foo'");
}
@Test
public void testUpsertWithInc() {
Document query = json("_id:{ f: 'ca', '1': { l: 2 }, t: { t: 11 } }");
Document update = json("'$inc': { 'n.!' : 1 , 'n.a.b:false' : 1}");
collection.updateOne(query, update, new UpdateOptions().upsert(true));
query.putAll(json("n: {'!': 1, a: {'b:false': 1}}"));
assertThat(collection.find().first()).isEqualTo(query);
}
@Test
public void testBasicUpdate() {
collection.insertOne(json("_id:1"));
collection.insertOne(json("_id:2, b:5"));
collection.insertOne(json("_id:3"));
collection.insertOne(json("_id:4"));
collection.replaceOne(json("_id:2"), json("_id:2, a:5"));
assertThat(collection.find(json("_id:2")).first()).isEqualTo(json("_id:2, a:5"));
}
@Test
public void testCollectionStats() throws Exception {
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(this::getCollStats)
.withMessageContaining("Command failed with error 26: 'No such collection");
collection.insertOne(json("{}"));
collection.insertOne(json("abc: 'foo'"));
Document stats = getCollStats();
assertThat(stats.getInteger("ok")).isEqualTo(1);
assertThat(stats.getInteger("count").longValue()).isEqualTo(2);
assertThat(stats.getLong("size").longValue()).isEqualTo(57);
assertThat(stats.getDouble("avgObjSize").doubleValue()).isEqualTo(28.5);
}
private Document getCollStats() {
String collectionName = collection.getNamespace().getCollectionName();
return getCollectionStatistics(db, collectionName);
}
@Test
public void testGetLogStartupWarnings() throws Exception {
Document startupWarnings = getAdminDb().runCommand(json("getLog: 'startupWarnings'"));
assertThat(startupWarnings.getInteger("ok")).isEqualTo(1);
assertThat(startupWarnings.get("totalLinesWritten")).isEqualTo(0);
assertThat(startupWarnings.get("log")).isEqualTo(Collections.emptyList());
}
@Test
public void testGetLogWhichDoesNotExist() throws Exception {
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> getAdminDb().runCommand(json("getLog: 'illegal'")))
.withMessageContaining("Command failed with error -1: 'no RamLog named: illegal'");
}
@Test
public void testCompoundDateIdUpserts() {
Document query = json("{ _id : { $lt : { n: 'a' , t: 10} , $gte: { n: 'a', t: 1}}}");
List<Document> toUpsert = Arrays.asList(
json("_id: {n:'a', t: 1}"),
json("_id: {n:'a', t: 2}"),
json("_id: {n:'a', t: 3}"),
json("_id: {n:'a', t: 11}"));
for (Document dbo : toUpsert) {
collection.replaceOne(dbo, new Document(dbo).append("foo", "bar"), new ReplaceOptions().upsert(true));
}
List<Document> results = toArray(collection.find(query));
assertThat(results).containsOnly(
json("_id: {n:'a', t:1}, foo:'bar'"), //
json("_id: {n:'a', t:2}, foo:'bar'"), //
json("_id: {n:'a', t:3}, foo:'bar'"));
}
@Test
public void testCompoundSort() {
collection.insertOne(json("a:1, _id:1"));
collection.insertOne(json("a:2, _id:5"));
collection.insertOne(json("a:1, _id:2"));
collection.insertOne(json("a:2, _id:4"));
collection.insertOne(json("a:1, _id:3"));
List<Document> documents = toArray(collection.find().sort(json("a:1, _id:-1")));
assertThat(documents).containsExactly(json("a:1, _id:3"), json("a:1, _id:2"), json("a:1, _id:1"),
json("a:2, _id:5"), json("a:2, _id:4"));
}
@Test
@SuppressWarnings("deprecation")
public void testCountCommand() {
assertThat(collection.count()).isZero();
}
@Test
@SuppressWarnings("deprecation")
public void testCountCommandWithQuery() {
collection.insertOne(json("n:1"));
collection.insertOne(json("n:2"));
collection.insertOne(json("n:2"));
assertThat(collection.count(json("n:2"))).isEqualTo(2);
}
@Test
@SuppressWarnings("deprecation")
public void testCountCommandWithSkipAndLimit() {
collection.insertOne(json("x: 1"));
collection.insertOne(json("x: 1"));
collection.insertOne(json("x: 2"));
collection.insertOne(json("x: 1"));
collection.insertOne(json("x: 2"));
collection.insertOne(json("x: 1"));
assertThat(collection.count(json("x: 1"), new CountOptions().skip(4).limit(2))).isEqualTo(0);
assertThat(collection.count(json("x: 1"), new CountOptions().limit(3))).isEqualTo(3);
assertThat(collection.count(json("x: 1"), new CountOptions().limit(10))).isEqualTo(4);
assertThat(collection.count(json("x: 1"), new CountOptions().skip(1))).isEqualTo(3);
}
@Test
public void testCountDocuments() throws Exception {
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testCountDocumentsWithQuery() {
collection.insertOne(json("n:1"));
collection.insertOne(json("n:2"));
collection.insertOne(json("n:2"));
assertThat(collection.countDocuments(json("n:2"))).isEqualTo(2);
}
@Test
public void testEstimatedDocumentCount() throws Exception {
assertThat(collection.estimatedDocumentCount()).isEqualTo(0);
collection.insertOne(json("n:1"));
collection.insertOne(json("n:2"));
collection.insertOne(json("n:2"));
assertThat(collection.estimatedDocumentCount()).isEqualTo(3);
assertThat(collection.estimatedDocumentCount(new EstimatedDocumentCountOptions().maxTime(1, TimeUnit.SECONDS))).isEqualTo(3);
}
@Test
public void testAggregateWithEmptyPipeline() throws Exception {
assertThat(toArray(collection.aggregate(Collections.emptyList()))).isEmpty();
collection.insertOne(json("_id:1"));
collection.insertOne(json("_id:2"));
assertThat(toArray(collection.aggregate(Collections.emptyList())))
.containsExactly(json("_id:1"), json("_id:2"));
}
@Test
public void testCreateIndexes() {
collection.createIndex(new Document("n", 1));
collection.createIndex(new Document("b", 1));
List<Document> indexes = toArray(getCollection("system.indexes").find());
assertThat(indexes).containsOnly(
json("key:{_id:1}").append("ns", collection.getNamespace().getFullName()).append("name", "_id_"),
json("key:{n:1}").append("ns", collection.getNamespace().getFullName()).append("name", "n_1"),
json("key:{b:1}").append("ns", collection.getNamespace().getFullName()).append("name", "b_1"));
}
@Test
public void testCurrentOperations() throws Exception {
Document currentOperations = getAdminDb().getCollection("$cmd.sys.inprog").find().first();
assertThat(currentOperations).isNotNull();
assertThat(currentOperations.get("inprog")).isInstanceOf(List.class);
}
@Test
public void testListCollectionsEmpty() throws Exception {
Document result = db.runCommand(json("listCollections: 1"));
assertThat(result.getInteger("ok")).isEqualTo(1);
Document cursor = (Document) result.get("cursor");
assertThat(cursor.keySet()).containsOnly("id", "ns", "firstBatch");
assertThat(cursor.get("id")).isEqualTo(Long.valueOf(0));
assertThat(cursor.get("ns")).isEqualTo(db.getName() + ".$cmd.listCollections");
List<?> firstBatch = (List<?>) cursor.get("firstBatch");
assertThat(firstBatch).isEmpty();
}
@Test
public void testListCollections() throws Exception {
List<String> collections = Arrays.asList("coll1", "coll2", "coll3");
for (String collection : collections) {
getCollection(collection).insertOne(json("_id: 1"));
}
Document result = db.runCommand(json("listCollections: 1"));
assertThat(result.getInteger("ok")).isEqualTo(1);
Document cursor = (Document) result.get("cursor");
assertThat(cursor.keySet()).containsOnly("id", "ns", "firstBatch");
assertThat(cursor.get("id")).isEqualTo(Long.valueOf(0));
assertThat(cursor.get("ns")).isEqualTo(db.getName() + ".$cmd.listCollections");
assertThat(cursor.get("firstBatch")).isInstanceOf(List.class);
@SuppressWarnings("unchecked")
List<Document> firstBatch = (List<Document>) cursor.get("firstBatch");
Set<String> expectedCollections = new HashSet<>();
expectedCollections.addAll(collections);
expectedCollections.add("system.indexes");
assertThat(firstBatch).hasSize(expectedCollections.size());
Set<String> collectionNames = new HashSet<>();
for (Document collection : firstBatch) {
assertThat(collection.keySet()).containsOnly("name", "options");
assertThat(collection.get("options")).isEqualTo(json("{}"));
assertThat(collection.get("name")).isInstanceOf(String.class);
collectionNames.add((String) collection.get("name"));
}
assertThat(collectionNames).isEqualTo(expectedCollections);
}
@Test
public void testGetCollectionNames() throws Exception {
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
List<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", "foo", "bar");
}
@Test
public void testSystemIndexes() throws Exception {
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
MongoCollection<Document> systemIndexes = db.getCollection("system.indexes");
assertThat(toArray(systemIndexes.find())).containsOnly(json("name: '_id_', ns: 'testdb.foo', key: {_id: 1}"),
json("name: '_id_', ns: 'testdb.bar', key: {_id: 1}"));
}
@Test
public void testSystemNamespaces() throws Exception {
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
MongoCollection<Document> namespaces = db.getCollection("system.namespaces");
assertThat(toArray(namespaces.find())).containsOnly(json("name: 'testdb.system.indexes'"),
json("name: 'testdb.foo'"), json("name: 'testdb.bar'"));
}
@Test
public void testDatabaseStats() throws Exception {
Document stats = db.runCommand(new Document("dbStats", 1).append("scale", 1));
assertThat(stats.getInteger("ok")).isEqualTo(1);
assertThat(stats.getLong("objects")).isZero();
assertThat(stats.getInteger("collections")).isZero();
assertThat(stats.getInteger("indexes")).isZero();
assertThat(stats.getLong("dataSize")).isZero();
getCollection("foo").insertOne(json("{}"));
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
stats = db.runCommand(new Document("dbStats", 1).append("scale", 1));
assertThat(stats.getInteger("ok")).isEqualTo(1);
assertThat(stats.getLong("objects")).isEqualTo(8);
assertThat(stats.getInteger("collections")).isEqualTo(3);
assertThat(stats.getInteger("indexes")).isEqualTo(2);
assertThat(stats.getLong("dataSize")).isEqualTo(271);
}
@Test
public void testDeleteDecrementsCount() {
collection.insertOne(json("key: 'value'"));
assertThat(collection.countDocuments()).isEqualTo(1);
collection.deleteOne(json("{}"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testDeleteInSystemNamespace() throws Exception {
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> getCollection("system.foobar").deleteOne(json("{}")))
.withMessageContaining("Command failed with error 12050: 'cannot delete from system namespace'");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> getCollection("system.namespaces").deleteOne(json("{}")))
.withMessageContaining("Command failed with error 12050: 'cannot delete from system namespace'");
}
@Test
public void testUpdateInSystemNamespace() throws Exception {
for (String collectionName : Arrays.asList("system.foobar", "system.namespaces")) {
MongoCollection<Document> collection = getCollection(collectionName);
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateMany(eq("some", "value"), set("field", "value")))
.withMessageContaining("Command failed with error 10156: 'cannot update system collection'");
}
}
@Test
public void testDistinctQuery() {
collection.insertOne(new Document("n", 3));
collection.insertOne(new Document("n", 1));
collection.insertOne(new Document("n", 2));
collection.insertOne(new Document("n", 1));
collection.insertOne(new Document("n", 1));
assertThat(toArray(collection.distinct("n", Integer.class))).containsExactly(1, 2, 3);
assertThat(toArray(collection.distinct("n", json("n: {$gt: 1}"), Integer.class))).containsExactly(2, 3);
assertThat(collection.distinct("foobar", String.class)).isEmpty();
assertThat(collection.distinct("_id", ObjectId.class)).hasSize((int) collection.countDocuments());
}
@Test
public void testDropCollection() throws Exception {
collection.insertOne(json("{}"));
assertThat(toArray(db.listCollectionNames())).contains(collection.getNamespace().getCollectionName());
collection.drop();
assertThat(toArray(db.listCollectionNames())).doesNotContain(collection.getNamespace().getCollectionName());
}
@Test
public void testDropCollectionAlsoDropsFromDB() throws Exception {
collection.insertOne(json("{}"));
collection.drop();
assertThat(collection.countDocuments()).isZero();
assertThat(toArray(db.listCollectionNames())).doesNotContain(collection.getNamespace().getCollectionName());
}
@Test
public void testDropDatabaseAlsoDropsCollectionData() throws Exception {
collection.insertOne(json("{}"));
db.drop();
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testDropDatabaseDropsAllData() throws Exception {
collection.insertOne(json("{}"));
MongoCollection<Document> collection2 = getCollection("testcoll2");
collection2.insertOne(json("{}"));
syncClient.dropDatabase(db.getName());
assertThat(listDatabaseNames()).doesNotContain(db.getName());
assertThat(collection.countDocuments()).isZero();
assertThat(toArray(db.listCollectionNames())).doesNotContain(collection.getNamespace().getCollectionName(),
collection2.getNamespace().getCollectionName());
}
@Test
public void testEmbeddedSort() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4, counts:{done:1}"));
collection.insertOne(json("_id: 5, counts:{done:2}"));
List<Document> objs = toArray(collection.find(ne("c", true)).sort(json("\"counts.done\": -1, _id: 1")));
assertThat(objs).containsExactly(
json("_id: 5, counts:{done:2}"),
json("_id: 4, counts:{done:1}"),
json("_id: 1"),
json("_id: 2"),
json("_id: 3"));
}
@Test
public void testFindAndModifyCommandEmpty() throws Exception {
Document cmd = new Document("findandmodify", collection.getNamespace().getCollectionName());
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> db.runCommand(cmd))
.withMessageContaining("Command failed with error -1: 'need remove or update'");
}
@Test
public void testFindAndModifyCommandIllegalOp() throws Exception {
collection.insertOne(json("_id: 1"));
Document cmd = new Document("findAndModify", collection.getNamespace().getCollectionName());
cmd.put("query", json("_id: 1"));
cmd.put("update", new Document("$inc", json("_id: 1")));
assertThat(collection.find().first()).isEqualTo(json("_id: 1"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> db.runCommand(cmd))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed'");
}
@Test
public void testFindAndModifyCommandUpdate() throws Exception {
collection.insertOne(json("_id: 1"));
Document cmd = new Document("findAndModify", collection.getNamespace().getCollectionName());
cmd.put("query", json("_id: 1"));
cmd.put("update", json("$inc: {a: 1}"));
Document result = db.runCommand(cmd);
assertThat(result.get("lastErrorObject")).isEqualTo(json("updatedExisting: true, n: 1"));
assertThat(result.getInteger("ok")).isEqualTo(1);
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
@Test
public void testFindOneAndUpdateError() throws Exception {
collection.insertOne(json("_id: 1, a: 1"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.findOneAndUpdate(json("_id: 1"), json("$inc: {_id: 1}")))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed'");
}
@Test
public void testFindOneAndUpdateFields() throws Exception {
collection.insertOne(json("_id: 1, a: 1"));
Document result = collection.findOneAndUpdate(json("_id: 1"), json("$inc: {a:1}"),
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 2"));
}
@Test
public void testFineOneAndUpdateNotFound() throws Exception {
collection.insertOne(json("_id: 1, a: 1"));
Document result = collection.findOneAndUpdate(json("_id: 2"), new Document("$inc", json("a: 1")));
assertThat(result).isNull();
assertThat(collection.countDocuments()).isEqualTo(1);
}
@Test
public void testFineOneAndUpdateRemove() {
collection.insertOne(json("_id: 1, a: 1"));
Document result = collection.findOneAndDelete(json("_id: 1"));
assertThat(result).isEqualTo(json("_id: 1, a: 1"));
assertThat(collection.countDocuments()).isZero();
}
// https://github.com/foursquare/fongo/issues/32
@Test
public void testFineOneAndUpdateReturnNew() {
collection.insertOne(json("_id: 1, a: 1, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$inc: {a: 1, 'b.c': 1}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 2, b: {c: 2}"));
}
@Test
public void testFineOneAndUpdateMax() {
collection.insertOne(json("_id: 1, a: 1, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$max: {a: 2, 'b.c': 2, d : 'd'}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 2, b: {c: 2}, d : 'd'"));
}
@Test
public void testFineOneAndUpdateMin() {
collection.insertOne(json("_id: 1, a: 2, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$min: {a: 1, 'b.c': 2, d : 'd'}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 1, b: {c: 1}, d : 'd'"));
}
// https://github.com/foursquare/fongo/issues/32
@Test
public void testFindOneAndUpdateReturnOld() {
collection.insertOne(json("_id: 1, a: 1, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$inc: {a: 1, 'b.c': 1}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.BEFORE));
assertThat(result).isEqualTo(json("_id: 1, a: 1, b: {c: 1}"));
assertThat(collection.find(query).first()).isEqualTo(json("_id: 1, a: 2, b: {c: 2}"));
}
@Test
public void testFindOneAndUpdateSorted() throws Exception {
collection.insertOne(json("_id: 1, a:15"));
collection.insertOne(json("_id: 2, a:10"));
collection.insertOne(json("_id: 3, a:20"));
Document order = json("a:1");
Document result = collection.findOneAndUpdate(json("{}"), json("$inc: {a: 1}"),
new FindOneAndUpdateOptions().sort(order).returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 2, a: 11"));
order = json("a: -1");
result = collection.findOneAndUpdate(json("{}"), json("$inc: {a: 1}"),
new FindOneAndUpdateOptions().sort(order).returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 3, a: 21"));
}
@Test
public void testFindOneAndUpdateUpsert() {
Document result = collection.findOneAndUpdate(json("_id: 1"), json("$inc: {a:1}"),
new FindOneAndUpdateOptions().upsert(true).returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 1"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
@Test
public void testFindOneAndUpdateUpsertReturnBefore() {
Document result = collection.findOneAndUpdate(json("_id: 1"), json("$inc: {a:1}"),
new FindOneAndUpdateOptions().upsert(true).returnDocument(ReturnDocument.BEFORE));
assertThat(result).isEqualTo(json("{}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
@Test
public void testFindAndRemoveFromEmbeddedList() {
collection.insertOne(json("_id: 1, a: [1]"));
Document result = collection.findOneAndDelete(json("_id: 1"));
assertThat(result).isEqualTo(json("_id: 1, a: [1]"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testFindOne() {
collection.insertOne(json("key: 'value'"));
collection.insertOne(json("key: 'value'"));
Document result = collection.find().first();
assertThat(result).isNotNull();
assertThat(result.get("_id")).isNotNull();
}
@Test
public void testFindOneById() {
collection.insertOne(json("_id: 1"));
Document result = collection.find(json("_id: 1")).first();
assertThat(result).isEqualTo(json("_id: 1"));
assertThat(collection.find(json("_id: 2")).first()).isNull();
}
@Test
public void testFindOneIn() {
collection.insertOne(json("_id: 1"));
Document result = collection.find(json("_id: {$in: [1,2]}")).first();
assertThat(result).isEqualTo(json("_id: 1"));
}
@Test
public void testFindWithLimit() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
List<Document> actual = toArray(collection.find().sort(json("_id: 1")).limit(2));
assertThat(actual).containsExactly(json("_id: 1"), json("_id: 2"));
List<Document> actualNegativeLimit = toArray(collection.find().sort(json("_id: 1")).limit(-2));
assertThat(actualNegativeLimit).isEqualTo(actual);
}
@Test
public void testFindInReverseNaturalOrder() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
List<Document> actual = toArray(collection.find().sort(json("$natural: -1")));
assertThat(actual).containsOnly(json("_id: 1"), json("_id: 2"));
}
@Test
public void testFindWithPattern() {
collection.insertOne(json("_id: 'marta'"));
collection.insertOne(json("_id: 'john', foo: 'bar'"));
collection.insertOne(json("_id: 'jon', foo: 'ba'"));
collection.insertOne(json("_id: 'jo'"));
assertThat(toArray(collection.find(new Document("_id", Pattern.compile("mart")))))
.containsOnly(json("_id: 'marta'"));
assertThat(toArray(collection.find(new Document("foo", Pattern.compile("ba")))))
.containsOnly(json("_id: 'john', foo: 'bar'"), json("_id: 'jon', foo: 'ba'"));
assertThat(toArray(collection.find(new Document("foo", Pattern.compile("ba$")))))
.containsOnly(json("_id: 'jon', foo: 'ba'"));
}
@Test
public void testFindWithQuery() {
collection.insertOne(json("name: 'jon'"));
collection.insertOne(json("name: 'leo'"));
collection.insertOne(json("name: 'neil'"));
collection.insertOne(json("name: 'neil'"));
assertThat(toArray(collection.find(json("name: 'neil'")))).hasSize(2);
}
@Test
public void testFindWithSkipLimit() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
assertThat(toArray(collection.find().sort(json("_id: 1")).limit(2).skip(2)))
.containsExactly(json("_id: 3"), json("_id: 4"));
}
@Test
public void testFindWithSkipLimitInReverseOrder() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
assertThat(toArray(collection.find().sort(json("_id: -1")).limit(2).skip(2)))
.containsExactly(json("_id: 2"), json("_id: 1"));
}
@Test
public void testFindWithSkipLimitAfterDelete() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
collection.insertOne(json("_id: 5"));
collection.deleteOne(json("_id: 1"));
collection.deleteOne(json("_id: 3"));
assertThat(toArray(collection.find().sort(json("_id: 1")).limit(2).skip(2)))
.containsExactly(json("_id: 5"));
}
@Test
public void testFullUpdateWithSameId() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2, b: 5"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
collection.replaceOne(json("_id: 2, b:5"), json("_id: 2, a:5"));
assertThat(collection.find(json("_id: 2")).first()).isEqualTo(json("_id: 2, a:5"));
}
@Test
public void testGetCollection() {
MongoCollection<Document> collection = getCollection("coll");
getCollection("coll").insertOne(json("{}"));
assertThat(collection).isNotNull();
assertThat(toArray(db.listCollectionNames())).contains("coll");
}
@Test
public void testNullId() throws Exception {
collection.insertOne(json("{_id: null, name: 'test'}"));
Document result = collection.find(json("name: 'test'")).first();
assertThat(result).isNotNull();
assertThat(result.getObjectId(Constants.ID_FIELD)).isNull();
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> collection.insertOne(json("_id: null")))
.withMessage("duplicate key error index: _id_ dup key: null");
assertThat(collection.countDocuments()).isEqualTo(1);
assertThat(collection.find(json("_id: null")).first()).isEqualTo(json("{_id: null, name: 'test'}"));
collection.deleteOne(json("_id: null"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testIdInQueryResultsInIndexOrder() {
collection.insertOne(json("_id: 4"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
List<Document> docs = toArray(collection.find(json("_id: {$in: [3,2,1]}")));
assertThat(docs).containsExactlyInAnyOrder(json("_id: 1"), json("_id: 2"), json("_id: 3"));
}
@Test
public void testIdNotAllowedToBeUpdated() {
collection.insertOne(json("_id: 1"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.replaceOne(json("_id: 1"), json("_id:2, a:4")))
.withMessageContaining("Command failed with error 13596: 'cannot change _id of a document old: 1, new: 2'");
// test with $set
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), new Document("$set", json("_id: 2"))))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed'");
}
@Test
public void testIllegalCommand() throws Exception {
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> db.runCommand(json("foo: 1")))
.withMessageContaining("Command failed with error 59: 'no such cmd: foo'");
}
@Test
public void testInsert() throws Exception {
assertThat(collection.countDocuments()).isEqualTo(0);
for (int i = 0; i < 3; i++) {
collection.insertOne(new Document("_id", Integer.valueOf(i)));
}
assertThat(collection.countDocuments()).isEqualTo(3);
collection.insertOne(json("foo: [1,2,3]"));
collection.insertOne(new Document("foo", new byte[10]));
Document insertedObject = new Document("foo", UUID.randomUUID());
collection.insertOne(insertedObject);
Document document = collection.find(insertedObject).first();
assertThat(document).isEqualTo(insertedObject);
}
@Test
public void testInsertDuplicate() throws Exception {
assertThat(collection.countDocuments()).isEqualTo(0);
collection.insertOne(json("_id: 1"));
assertThat(collection.countDocuments()).isEqualTo(1);
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> collection.insertOne(json("_id: 1")))
.withMessageContaining("duplicate key error index: _id_ dup key: 1");
assertThat(collection.countDocuments()).isEqualTo(1);
}
@Test(expected = MongoException.class)
public void testInsertDuplicateThrows() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 1"));
}
@Test(expected = MongoException.class)
public void testInsertDuplicateWithConcernThrows() {
collection.insertOne(json("_id: 1"));
collection.withWriteConcern(WriteConcern.ACKNOWLEDGED).insertOne(json("_id: 1"));
}
@Test
public void testInsertIncrementsCount() {
assertThat(collection.countDocuments()).isZero();
collection.insertOne(json("key: 'value'"));
assertThat(collection.countDocuments()).isEqualTo(1);
}
@Test
public void testInsertQuery() throws Exception {
assertThat(collection.countDocuments()).isEqualTo(0);
Document insertedObject = json("_id: 1");
insertedObject.put("foo", "bar");
collection.insertOne(insertedObject);
assertThat(collection.find(insertedObject).first()).isEqualTo(insertedObject);
assertThat(collection.find(new Document("_id", Long.valueOf(1))).first()).isEqualTo(insertedObject);
assertThat(collection.find(new Document("_id", Double.valueOf(1.0))).first()).isEqualTo(insertedObject);
assertThat(collection.find(new Document("_id", Float.valueOf(1.0001f))).first()).isNull();
assertThat(collection.find(json("foo: 'bar'")).first()).isEqualTo(insertedObject);
}
@Test
public void testInsertRemove() throws Exception {
for (int i = 0; i < 10; i++) {
collection.insertOne(json("_id: 1"));
assertThat(collection.countDocuments()).isEqualTo(1);
collection.deleteOne(json("_id: 1"));
assertThat(collection.countDocuments()).isZero();
collection.insertOne(new Document("_id", i));
collection.deleteOne(new Document("_id", i));
}
assertThat(collection.countDocuments()).isZero();
collection.deleteOne(json("'doesnt exist': 1"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testInsertInSystemNamespace() throws Exception {
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> getCollection("system.foobar").insertOne(json("{}")))
.withMessage("attempt to insert in system namespace");
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> getCollection("system.namespaces").insertOne(json("{}")))
.withMessage("attempt to insert in system namespace");
}
@Test
public void testListDatabaseNames() throws Exception {
assertThat(listDatabaseNames()).isEmpty();
collection.insertOne(json("{}"));
assertThat(listDatabaseNames()).containsExactly(db.getName());
getDatabase().getCollection("some-collection").insertOne(json("{}"));
assertThat(listDatabaseNames()).containsExactly("bar", db.getName());
}
private MongoDatabase getDatabase() {
return syncClient.getDatabase(OTHER_TEST_DATABASE_NAME);
}
private List<String> listDatabaseNames() {
List<String> databaseNames = new ArrayList<>();
for (String databaseName : syncClient.listDatabaseNames()) {
databaseNames.add(databaseName);
}
return databaseNames;
}
@Test
public void testMaxBsonSize() throws Exception {
int maxBsonObjectSize = syncClient.getMaxBsonObjectSize();
assertThat(maxBsonObjectSize).isEqualTo(16777216);
}
@Test
public void testQuery() throws Exception {
Document obj = collection.find(json("_id: 1")).first();
assertThat(obj).isNull();
assertThat(collection.countDocuments()).isEqualTo(0);
}
@Test
public void testQueryAll() throws Exception {
List<Object> inserted = new ArrayList<>();
for (int i = 0; i < 10; i++) {
Document obj = new Document("_id", i);
collection.insertOne(obj);
inserted.add(obj);
}
assertThat(collection.countDocuments()).isEqualTo(10);
assertThat(toArray(collection.find().sort(json("_id: 1")))).isEqualTo(inserted);
}
@Test
public void testQueryCount() throws Exception {
for (int i = 0; i < 100; i++) {
collection.insertOne(json("{}"));
}
assertThat(collection.countDocuments()).isEqualTo(100);
Document obj = json("_id: 1");
assertThat(collection.countDocuments(obj)).isEqualTo(0);
collection.insertOne(obj);
assertThat(collection.countDocuments(obj)).isEqualTo(1);
}
@Test
public void testQueryLimitEmptyQuery() throws Exception {
for (int i = 0; i < 5; i++) {
collection.insertOne(json("{}"));
}
assertThat(collection.countDocuments(json("{}"), new CountOptions().limit(1))).isEqualTo(1);
assertThat(collection.countDocuments(json("{}"), new CountOptions().limit(-1))).isEqualTo(5);
assertThat(collection.countDocuments(json("{}"))).isEqualTo(5);
}
@Test
public void testQueryLimitSimpleQuery() throws Exception {
for (int i = 0; i < 5; i++) {
collection.insertOne(json("a:1"));
}
assertThat(collection.countDocuments(json("a:1"), new CountOptions().limit(1))).isEqualTo(1);
assertThat(collection.countDocuments(json("a:1"), new CountOptions().limit(-1))).isEqualTo(5);
assertThat(collection.countDocuments(json("a:1"))).isEqualTo(5);
}
@Test
public void testQueryNull() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
assertThat(collection.find(json("foo: null")).first()).isEqualTo(object);
}
@Test
public void testQuerySkipLimitEmptyQuery() throws Exception {
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(3))).isEqualTo(0);
for (int i = 0; i < 10; i++) {
collection.insertOne(json("{}"));
}
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(3))).isEqualTo(7);
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(15))).isEqualTo(0);
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(3).limit(5))).isEqualTo(5);
}
@Test
public void testQuerySkipLimitSimpleQuery() throws Exception {
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(3))).isEqualTo(0);
for (int i = 0; i < 10; i++) {
collection.insertOne(json("a:1"));
}
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(3))).isEqualTo(7);
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(3).limit(5))).isEqualTo(5);
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(15).limit(5))).isEqualTo(0);
}
@Test
public void testQuerySort() throws Exception {
Random random = new Random(4711);
for (int i = 0; i < 10; i++) {
collection.insertOne(new Document("_id", Double.valueOf(random.nextDouble())));
}
List<Document> objects = toArray(collection.find().sort(json("_id: 1")));
double before = Double.MIN_VALUE;
for (Document obj : objects) {
double value = obj.getDouble("_id").doubleValue();
assertThat(value).isGreaterThanOrEqualTo(before);
before = value;
}
// reverse sort
objects = toArray(collection.find().sort(json("_id: -1")));
before = Double.MAX_VALUE;
for (Document obj : objects) {
double value = obj.getDouble("_id").doubleValue();
assertThat(value).isLessThanOrEqualTo(before);
before = value;
}
}
@Test
public void testQueryWithFieldSelector() throws Exception {
collection.insertOne(json("foo: 'bar'"));
Document obj = collection.find(json("{}")).projection(json("foo: 1")).first();
assertThat(obj.keySet()).containsOnly("_id", "foo");
obj = collection.find(json("foo:'bar'")).projection(json("_id: 1")).first();
assertThat(obj.keySet()).containsOnly("_id");
obj = collection.find(json("foo: 'bar'")).projection(json("_id: 0, foo:1")).first();
assertThat(obj.keySet()).containsOnly("foo");
}
@Test
public void testQueryWithDotNotationFieldSelector() throws Exception {
collection.insertOne(json("_id: 123, index: false, foo: { a: 'a1', b: 0}"));
Document obj = collection.find(json("{}")).projection(json("'foo.a': 1, 'foo.b': 1")).first();
assertThat(obj).isEqualTo(json("_id: 123, foo: {a: 'a1', b: 0}"));
obj = collection.find(json("{}")).projection(json("'foo.a': 1")).first();
assertThat(obj).isEqualTo(json("_id: 123, foo: {a: 'a1'}"));
obj = collection.find(json("{}")).projection(json("'foo.a': 1, index: 1, _id: 0")).first();
assertThat(obj).isEqualTo(json("foo: {a: 'a1'}, index: false"));
obj = collection.find(json("{}")).projection(json("foo: 1, _id: 0")).first();
assertThat(obj).isEqualTo(json("foo: {a: 'a1', b: 0}"));
obj = collection.find(json("{}")).projection(json("'foo.a.b.c.d': 1")).first();
assertThat(obj).isEqualTo(json("_id: 123, foo: {}"));
}
@Test
public void testQuerySystemNamespace() throws Exception {
assertThat(getCollection("system.foobar").find().first()).isNull();
assertThat(db.listCollectionNames()).isEmpty();
collection.insertOne(json("{}"));
Document expectedObj = new Document("name", collection.getNamespace().getFullName());
Document coll = getCollection("system.namespaces").find(expectedObj).first();
assertThat(coll).isEqualTo(expectedObj);
}
@Test
public void testQueryAllExpression() throws Exception {
collection.insertOne(json(" _id : [ { x : 1 } , { x : 2 } ]"));
collection.insertOne(json(" _id : [ { x : 2 } , { x : 3 } ]"));
assertThat(collection.countDocuments(json("'_id.x':{$all:[1,2]}"))).isEqualTo(1);
assertThat(collection.countDocuments(json("'_id.x':{$all:[2,3]}"))).isEqualTo(1);
}
@Test
public void testQueryWithSubdocumentIndex() throws Exception {
collection.createIndex(json("action:{actionId:1}"), new IndexOptions().unique(true));
collection.insertOne(json("action: { actionId: 1 }, value: 'a'"));
collection.insertOne(json("action: { actionId: 2 }, value: 'b'"));
collection.insertOne(json("action: { actionId: 3 }, value: 'c'"));
Document foundWithNestedDocument = collection.find(json("action: { actionId: 2 }")).first();
assertThat(foundWithNestedDocument.get("value")).isEqualTo("b");
Document foundWithDotNotation = collection.find(json("'action.actionId': 2")).first();
assertThat(foundWithDotNotation.get("value")).isEqualTo("b");
}
// see https://github.com/bwaldvogel/mongo-java-server/issues/12
@Test
public void testQueryBinaryData() throws Exception {
byte[] firstBytes = new byte[] { 0x01, 0x02, 0x03 };
byte[] secondBytes = new byte[] { 0x03, 0x02, 0x01 };
collection.insertOne(new Document("_id", 1).append("test", firstBytes));
collection.insertOne(new Document("_id", 2).append("test", secondBytes));
Document first = collection.find(new Document("test", firstBytes)).first();
assertThat(first).isNotNull();
assertThat(first.get("_id")).isEqualTo(1);
Document second = collection.find(new Document("test", secondBytes)).first();
assertThat(second).isNotNull();
assertThat(second.get("_id")).isEqualTo(2);
}
@Test
public void testRemove() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
collection.deleteOne(json("_id: 2"));
assertThat(collection.find(json("_id: 2")).first()).isNull();
assertThat(collection.countDocuments()).isEqualTo(3);
collection.deleteMany(json("_id: {$gte: 3}"));
assertThat(collection.countDocuments()).isEqualTo(1);
assertThat(collection.find().first()).isEqualTo(json("_id: 1"));
}
@Test
public void testRemoveSingle() throws Exception {
Document obj = new Document("_id", ObjectId.get());
collection.insertOne(obj);
collection.deleteOne(obj);
}
@Test
public void testRemoveReturnsModifiedDocumentCount() {
collection.insertOne(json("{}"));
collection.insertOne(json("{}"));
DeleteResult result = collection.deleteMany(json("{}"));
assertThat(result.getDeletedCount()).isEqualTo(2);
result = collection.deleteMany(json("{}"));
assertThat(result.getDeletedCount()).isEqualTo(0);
}
@Test
public void testReservedCollectionNames() throws Exception {
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> getCollection("foo$bar").insertOne(json("{}")))
.withMessage("cannot insert into reserved $ collection");
String veryLongString = repeat("verylongstring", 5);
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> getCollection(veryLongString).insertOne(json("{}")))
.withMessage("ns name too long, max size is 128");
}
private static String repeat(String str, int num) {
String repeated = str;
for (int i = 0; i < num; i++) {
repeated += repeated;
}
return repeated;
}
@Test
public void testServerStatus() throws Exception {
Date before = new Date();
Document serverStatus = runCommand("serverStatus");
assertThat(serverStatus.getInteger("ok")).isEqualTo(1);
assertThat(serverStatus.get("uptime")).isInstanceOf(Number.class);
assertThat(serverStatus.get("uptimeMillis")).isInstanceOf(Long.class);
Date serverTime = (Date) serverStatus.get("localTime");
assertThat(serverTime).isNotNull();
assertThat(serverTime.after(new Date())).isFalse();
assertThat(before.after(serverTime)).isFalse();
Document connections = (Document) serverStatus.get("connections");
assertThat(connections.get("current")).isNotNull();
}
@Test
public void testPing() throws Exception {
Document response = runCommand("ping");
assertThat(response.getInteger("ok")).isEqualTo(1);
}
@Test
public void testPingTrue() throws Exception {
Document command = new Document("ping", Boolean.TRUE);
Document response = runCommand(command);
assertThat(response.getInteger("ok")).isEqualTo(1);
}
@Test
public void testReplSetGetStatus() throws Exception {
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> runCommand("replSetGetStatus"))
.withMessageContaining("Command failed with error -1: 'not running with --replSet'");
}
@Test
public void testWhatsMyUri() throws Exception {
for (String dbName : new String[] { "admin", "local", "test" }) {
Document result = syncClient.getDatabase(dbName).runCommand(new Document("whatsmyuri", 1));
assertThat(result.get("you")).isNotNull();
assertThat(result.get("you").toString()).startsWith("127.0.0.1:");
}
}
@Test
public void testSort() {
collection.insertOne(json("a:1, _id:1"));
collection.insertOne(json("a:2, _id:2"));
collection.insertOne(json("_id: 5"));
collection.insertOne(json("a:3, _id:3"));
collection.insertOne(json("a:4, _id:4"));
List<Document> objs = toArray(collection.find().sort(json("a: -1")));
assertThat(objs).containsExactly(
json("a:4, _id:4"),
json("a:3, _id:3"),
json("a:2, _id:2"),
json("a:1, _id:1"),
json("_id: 5")
);
}
@Test
public void testSortByEmbeddedKey() {
collection.insertOne(json("_id: 1, a: { b:1 }"));
collection.insertOne(json("_id: 2, a: { b:2 }"));
collection.insertOne(json("_id: 3, a: { b:3 }"));
List<Document> results = toArray(collection.find().sort(json("'a.b': -1")));
assertThat(results).containsExactly(
json("_id: 3, a: { b:3 }"),
json("_id: 2, a: { b:2 }"),
json("_id: 1, a: { b:1 }")
);
}
@Test
public void testUpdate() throws Exception {
Document object = json("_id: 1");
Document newObject = json("{_id: 1, foo: 'bar'}");
collection.insertOne(object);
UpdateResult result = collection.replaceOne(object, newObject);
assertThat(result.getModifiedCount()).isEqualTo(1);
assertThat(result.getUpsertedId()).isNull();
assertThat(collection.find(object).first()).isEqualTo(newObject);
}
@Test
public void testUpdateNothing() throws Exception {
Document object = json("_id: 1");
UpdateResult result = collection.replaceOne(object, object);
assertThat(result.getModifiedCount()).isEqualTo(0);
assertThat(result.getMatchedCount()).isEqualTo(0);
assertThat(result.getUpsertedId()).isNull();
}
@Test
public void testUpdateBlank() throws Exception {
Document document = json("'': 1, _id: 2, a: 3, b: 4");
collection.insertOne(document);
collection.updateOne(json("{}"), json("$set: {c:5}"));
assertThat(collection.find().first()).isEqualTo(json("'': 1, _id: 2, a: 3, b: 4, c:5"));
}
@Test
public void testUpdateEmptyPositional() throws Exception {
collection.insertOne(json("{}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("{}"), json("$set:{'a.$.b': 1}")))
.withMessageContaining("Command failed with error 16650: 'Cannot apply the positional operator without a corresponding query field containing an array.'");
}
@Test
public void testUpdateMultiplePositional() throws Exception {
collection.insertOne(json("{a: {b: {c: 1}}}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("{'a.b.c':1}"), json("$set:{'a.$.b.$.c': 1}")))
.withMessageContaining("Command failed with error 16650: 'Cannot apply the positional operator without a corresponding query field containing an array.'");
}
@Test
public void testUpdateIllegalFieldName() throws Exception {
// Disallow $ in field names - SERVER-3730
collection.insertOne(json("{x:1}"));
collection.updateOne(json("{x:1}"), json("$set: {y:1}")); // ok
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("{x:1}"), json("$set: {$z:1}")))
.withMessageContaining("Command failed with error 15896: 'Modified field name may not start with $'");
// unset ok to remove bad fields
collection.updateOne(json("{x:1}"), json("$unset: {$z:1}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("{x:1}"), json("$inc: {$z:1}")))
.withMessageContaining("Command failed with error 15896: 'Modified field name may not start with $'");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("{x:1}"), json("$pushAll: {$z:[1,2,3]}")))
.withMessageContaining("Command failed with error 15896: 'Modified field name may not start with $'");
}
@Test
public void testUpdateSubdocument() throws Exception {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> collection.updateOne(json("{}"), json("'a.b.c': 123")))
.withMessage("Invalid BSON field name a.b.c");
}
@Test
public void testUpdateIdNoChange() {
collection.insertOne(json("_id: 1"));
collection.replaceOne(json("_id: 1"), json("_id: 1, a: 5"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, a: 5"));
collection.updateOne(json("_id: 1"), json("$set: {_id: 1, b: 3}"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, a: 5, b: 3"));
// test with $set
collection.updateOne(json("_id: 1"), json("$set: {_id: 1, a: 7}"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, a: 7, b: 3"));
}
@Test
public void testUpdatePush() throws Exception {
Document idObj = json("_id: 1");
collection.insertOne(idObj);
collection.updateOne(idObj, json("$push: {'field.subfield.subsubfield': 'value'}"));
Document expected = json("_id: 1, field:{subfield:{subsubfield: ['value']}}");
assertThat(collection.find(idObj).first()).isEqualTo(expected);
// push to non-array
collection.updateOne(idObj, json("$set: {field: 'value'}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(idObj, json("$push: {field: 'value'}")))
.withMessageContaining("Command failed with error 10141: 'Cannot apply $push modifier to non-array'");
// push with multiple fields
Document pushObj = json("$push: {field1: 'value', field2: 'value2'}");
collection.updateOne(idObj, pushObj);
expected = json("_id: 1, field: 'value', field1: ['value'], field2: ['value2']");
assertThat(collection.find(idObj).first()).isEqualTo(expected);
// push duplicate
pushObj = json("$push: {field1: 'value'}");
collection.updateOne(idObj, pushObj);
expected.put("field1", Arrays.asList("value", "value"));
assertThat(collection.find(idObj).first()).isEqualTo(expected);
}
@Test
public void testUpdatePushAll() throws Exception {
Document idObj = json("_id: 1");
collection.insertOne(idObj);
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(idObj, json("$pushAll: {field: 'value'}")))
.withMessageContaining("Command failed with error 10153: 'Modifier $pushAll allowed for arrays only'");
collection.updateOne(idObj, json("$pushAll: {field: ['value', 'value2']}"));
assertThat(collection.find(idObj).first()).isEqualTo(json("_id: 1, field: ['value', 'value2']"));
}
@Test
public void testUpdateAddToSet() throws Exception {
Document idObj = json("_id: 1");
collection.insertOne(idObj);
collection.updateOne(idObj, json("$addToSet: {'field.subfield.subsubfield': 'value'}"));
assertThat(collection.find(idObj).first()).isEqualTo(json("_id: 1, field:{subfield:{subsubfield:['value']}}"));
// addToSet to non-array
collection.updateOne(idObj, json("$set: {field: 'value'}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(idObj, json("$addToSet: {field: 'value'}")))
.withMessageContaining("Command failed with error 10141: 'Cannot apply $addToSet modifier to non-array'");
// addToSet with multiple fields
collection.updateOne(idObj, json("$addToSet: {field1: 'value', field2: 'value2'}"));
assertThat(collection.find(idObj).first())
.isEqualTo(json("_id: 1, field: 'value', field1: ['value'], field2: ['value2']"));
// addToSet duplicate
collection.updateOne(idObj, json("$addToSet: {field1: 'value'}"));
assertThat(collection.find(idObj).first())
.isEqualTo(json("_id: 1, field: 'value', field1: ['value'], field2: ['value2']"));
}
@Test
public void testUpdateAddToSetEach() throws Exception {
collection.insertOne(json("_id: 1"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(6, 5, 4)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4]"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(3, 2, 1)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4,3,2,1]"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(7, 7, 9, 2)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4,3,2,1,7,9]"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(12, 13, 12)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4,3,2,1,7,9,12,13]"));
}
@Test
public void testUpdateDatasize() throws Exception {
Document obj = json("{_id:1, a:{x:[1, 2, 3]}}");
collection.insertOne(obj);
Number oldSize = getCollStats().getLong("size");
collection.updateOne(json("_id:1"), set("a.x.0", 3));
assertThat(collection.find().first().get("a")).isEqualTo(json("x:[3,2,3]"));
Number newSize = getCollStats().getLong("size");
assertThat(newSize).isEqualTo(oldSize);
// now increase the db
collection.updateOne(json("_id:1"), set("a.x.0", "abc"));
Number yetNewSize = getCollStats().getLong("size");
assertThat(yetNewSize.longValue() - oldSize.longValue()).isEqualTo(4);
}
@Test
public void testUpdatePull() throws Exception {
Document obj = json("_id: 1");
collection.insertOne(obj);
// pull from non-existing field
assertThat(collection.find(obj).first()).isEqualTo(obj);
// pull from non-array
collection.updateOne(obj, set("field", "value"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(obj, pull("field", "value")))
.withMessageContaining("Command failed with error 10142: 'Cannot apply $pull modifier to non-array'");
// pull standard
collection.updateOne(obj, json("$set: {field: ['value1', 'value2', 'value1']}"));
collection.updateOne(obj, pull("field", "value1"));
assertThat(collection.find(obj).first().get("field")).isEqualTo(Collections.singletonList("value2"));
// pull with multiple fields
collection.updateOne(obj, json("{$set: {field1: ['value1', 'value2', 'value1']}}"));
collection.updateOne(obj, json("$set: {field2: ['value3', 'value3', 'value1']}"));
collection.updateOne(obj, json("$pull: {field1: 'value2', field2: 'value3'}"));
assertThat(collection.find(obj).first().get("field1")).isEqualTo(Arrays.asList("value1", "value1"));
assertThat(collection.find(obj).first().get("field2")).isEqualTo(Collections.singletonList("value1"));
}
@Test
public void testUpdatePullValueWithCondition() {
collection.insertOne(json("_id: 1, votes: [ 3, 5, 6, 7, 7, 8 ]"));
collection.updateOne(json("_id: 1"), json("$pull: { votes: { $gte: 6 } }"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, votes: [ 3, 5 ]"));
}
@Test
public void testUpdatePullDocuments() {
collection.insertOne(json("_id: 1, results: [{item: 'A', score: 5}, {item: 'B', score: 8, comment: 'foobar'}]"));
collection.insertOne(json("_id: 2, results: [{item: 'C', score: 8, comment: 'foobar'}, {item: 'B', score: 4}]"));
collection.updateOne(json("{}"), json("$pull: { results: { score: 8 , item: 'B' } }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, results: [{item: 'A', score: 5}]"));
assertThat(collection.find(json("_id: 2")).first()).isEqualTo(json("_id: 2, results: [{item: 'C', score: 8, comment: 'foobar'}, {item: 'B', score: 4}]"));
}
// https://github.com/bwaldvogel/mongo-java-server/issues/20
@Test
public void testUpdatePullLeavesEmptyArray() {
Document obj = json("_id: 1");
collection.insertOne(obj);
collection.updateOne(obj, json("$set: {field: [{'key1': 'value1', 'key2': 'value2'}]}"));
collection.updateOne(obj, json("$pull: {field: {'key1': 'value1'}}"));
assertThat(collection.find(obj).first()).isEqualTo(json("_id: 1, field: []"));
}
@Test
public void testUpdatePullAll() throws Exception {
Document obj = json("_id: 1");
collection.insertOne(obj);
collection.updateOne(obj, json("$set: {field: 'value'}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(obj, json("$pullAll: {field: 'value'}")))
.withMessageContaining("Command failed with error 10142: 'Cannot apply $pullAll modifier to non-array'");
collection.updateOne(obj, json("$set: {field1: ['value1', 'value2', 'value1', 'value3', 'value4', 'value3']}"));
collection.updateOne(obj, json("$pullAll: {field1: ['value1', 'value3']}"));
assertThat(collection.find(obj).first().get("field1")).isEqualTo(Arrays.asList("value2", "value4"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(obj, json("$pullAll: {field1: 'bar'}")))
.withMessageContaining("Command failed with error 10153: 'Modifier $pullAll allowed for arrays only'");
}
@Test
public void testUpdateSet() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
assertThat(collection.find(object).first()).isEqualTo(object);
collection.updateOne(object, json("$set: {foo: 'bar'}"));
Document expected = json("{}");
expected.putAll(object);
expected.put("foo", "bar");
collection.updateOne(object, json("$set: {bar: 'bla'}"));
expected.put("bar", "bla");
assertThat(collection.find(object).first()).isEqualTo(expected);
collection.updateOne(object, json("$set: {'foo.bar': 'bla'}"));
expected.put("foo", json("bar: 'bla'"));
assertThat(collection.find(object).first()).isEqualTo(expected);
collection.updateOne(object, json("$set: {'foo.foo': '123'}"));
((Document) expected.get("foo")).put("foo", "123");
assertThat(collection.find(object).first()).isEqualTo(expected);
}
@Test
public void testUpdateSetOnInsert() throws Exception {
Document object = json("_id: 1");
collection.updateOne(object, json("$set: {b: 3}, $setOnInsert: {a: 3}"), new UpdateOptions().upsert(true));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, b: 3, a: 3"));
collection.updateOne(object, json("$set: {b: 4}, $setOnInsert: {a: 5}"), new UpdateOptions().upsert(true));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, b: 4, a: 3")); // 'a' is unchanged
}
@Test
public void testUpdateSetWithArrayIndices() throws Exception {
// SERVER-181
collection.insertOne(json("_id: 1, a: [{x:0}]"));
collection.updateOne(json("{}"), json("$set: {'a.0.x': 3}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3}]"));
collection.updateOne(json("{}"), json("$set: {'a.1.z': 17}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3}, {z:17}]"));
collection.updateOne(json("{}"), json("$set: {'a.0.y': 7}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3, y:7}, {z:17}]"));
collection.updateOne(json("{}"), json("$set: {'a.1': 'test'}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3, y:7}, 'test']"));
}
@Test
public void testUpdateUnsetWithArrayIndices() throws Exception {
// SERVER-273
collection.insertOne(json("_id: 1, a:[{x:0}]"));
collection.updateOne(json("{}"), json("$unset: {'a.0.x': 1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a:[{}]"));
collection.updateOne(json("{}"), json("$unset: {'a.0': 1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a:[null]"));
collection.updateOne(json("{}"), json("$unset: {'a.10': 1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a:[null]"));
}
@Test
public void testUpdateMax() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$max: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$max: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$max: {'foo.bar': 10}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$max: {'foo.bar': -100}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$max: {'foo.bar': '1'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : '1'}"));
collection.updateOne(object, json("$max: {'foo.bar': null}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : '1'}"));
collection.updateOne(object, json("$max: {'foo.bar': '2', 'buz' : 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : '2'}, buz : 1"));
}
@Test
public void testUpdateMin() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$min: {'foo.bar': 'b'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 'b'}"));
collection.updateOne(object, json("$min: {'foo.bar': 'a'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 'a'}"));
collection.updateOne(object, json("$min: {'foo.bar': 10}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$min: {'foo.bar': 10}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$min: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$min: {'foo.bar': 100}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$min: {'foo.bar': null}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : null}"));
collection.updateOne(object, json("$min: {'foo.bar': 'a'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : null}"));
}
// see http://docs.mongodb.org/manual/reference/operator/update/max
@Test
public void testUpdateMaxCompareNumbers() throws Exception {
Document object = json("_id: 1, highScore: 800, lowScore: 200");
collection.insertOne(object);
collection.updateOne(json("_id: 1"), json("$max: { highScore: 950 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 950, lowScore: 200"));
collection.updateOne(json("_id: 1"), json("$max: { highScore: 870 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 950, lowScore: 200"));
}
// see http://docs.mongodb.org/manual/reference/operator/update/max
@Test
public void testUpdateMaxCompareDates() throws Exception {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
Document object = new Document("_id", 1).append("desc", "crafts")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2013-10-01T16:38:16"));
collection.insertOne(object);
collection.updateOne(json("_id: 1"),
new Document("$max", new Document("dateExpired", df.parse("2013-09-30T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first())
.isEqualTo(json("_id: 1, desc: 'crafts'")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2013-10-01T16:38:16")));
collection.updateOne(json("_id: 1"),
new Document("$max", new Document("dateExpired", df.parse("2014-01-07T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(
json("_id: 1, desc: 'crafts'")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2014-01-07T00:00:00")));
}
// see http://docs.mongodb.org/manual/reference/operator/update/min
@Test
public void testUpdateMinCompareNumbers() throws Exception {
Document object = json("_id: 1, highScore: 800, lowScore: 200");
collection.insertOne(object);
collection.updateOne(json("_id: 1"), json("$min: { lowScore: 150 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 800, lowScore: 150"));
collection.updateOne(json("_id: 1"), json("$min: { lowScore: 250 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 800, lowScore: 150"));
}
// see http://docs.mongodb.org/manual/reference/operator/update/min
@Test
public void testUpdateMinCompareDates() throws Exception {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
Document object = new Document("_id", 1).append("desc", "crafts")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2013-10-01T16:38:16"));
collection.insertOne(object);
collection.updateOne(json("_id: 1"),
new Document("$min", new Document("dateEntered", df.parse("2013-09-25T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first()) //
.isEqualTo(json("_id: 1, desc: 'crafts'") //
.append("dateEntered", df.parse("2013-09-25T00:00:00")) //
.append("dateExpired", df.parse("2013-10-01T16:38:16")));
collection.updateOne(json("_id: 1"),
new Document("$min", new Document("dateEntered", df.parse("2014-01-07T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first()) //
.isEqualTo(json("_id: 1, desc: 'crafts'") //
.append("dateEntered", df.parse("2013-09-25T00:00:00")) //
.append("dateExpired", df.parse("2013-10-01T16:38:16")));
}
@Test
public void testUpdatePop() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$pop: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(object);
collection.updateOne(object, json("$set: {'foo.bar': [1,2,3]}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[1,2,3]}"));
collection.updateOne(object, json("$pop: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[1,2]}"));
collection.updateOne(object, json("$pop: {'foo.bar': -1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[2]}"));
collection.updateOne(object, json("$pop: {'foo.bar': null}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[]}"));
}
@Test
public void testUpdateUnset() throws Exception {
Document obj = json("_id: 1, a: 1, b: null, c: 'value'");
collection.insertOne(obj);
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(obj, json("$unset: {_id: ''}")))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed'");
collection.updateOne(obj, json("$unset: {a:'', b:''}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, c: 'value'"));
collection.updateOne(obj, Updates.unset("c.y"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, c: 'value'"));
collection.replaceOne(json("_id: 1"), json("a: {b: 'foo', c: 'bar'}"));
collection.updateOne(json("_id: 1"), json("$unset: {'a.b':1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: {c: 'bar'}"));
}
@Test
public void testUpdateWithIdIn() {
collection.insertOne(json("_id: 1"));
Document update = json("$push: {n: {_id: 2, u:3}}, $inc: {c:4}");
Document expected = json("_id: 1, n: [{_id: 2, u:3}], c:4");
collection.updateOne(json("_id: {$in: [1]}"), update);
assertThat(collection.find().first()).isEqualTo(expected);
}
@Test
public void testUpdateMulti() throws Exception {
collection.insertOne(json("a: 1"));
collection.insertOne(json("a: 1"));
UpdateResult result = collection.updateOne(json("a: 1"), json("$set: {b: 2}"));
assertThat(result.getModifiedCount()).isEqualTo(1);
assertThat(collection.countDocuments(new Document("b", 2))).isEqualTo(1);
result = collection.updateMany(json("a: 1"), json("$set: {b: 3}"));
assertThat(result.getModifiedCount()).isEqualTo(2);
assertThat(collection.countDocuments(new Document("b", 2))).isEqualTo(0);
assertThat(collection.countDocuments(new Document("b", 3))).isEqualTo(2);
}
@Test
public void testUpdateIllegalInt() throws Exception {
collection.insertOne(json("_id: 1, a: {x:1}"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$inc: {a: 1}")))
.withMessageContaining("Command failed with error -1: 'cannot increment value");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$inc: {'a.x': 'b'}")))
.withMessageContaining("Command failed with error -1: 'cannot increment with non-numeric value");
}
@Test
public void testUpdateWithIdInMulti() {
collection.insertMany(Arrays.asList(json("_id: 1"), json("_id: 2")));
collection.updateMany(json("_id: {$in:[1,2]}"), json("$set: {n:1}"));
List<Document> results = toArray(collection.find());
assertThat(results).containsOnly(json("_id: 1, n:1"), json("_id: 2, n: 1"));
}
@Test
public void testUpdateWithIdInMultiReturnModifiedDocumentCount() {
collection.insertMany(Arrays.asList(json("_id: 1"), json("_id: 2")));
UpdateResult result = collection.updateMany(json("_id: {$in:[1,2]}"), json("$set:{n:1}"));
assertThat(result.getModifiedCount()).isEqualTo(2);
}
@Test
public void testUpdateWithIdQuery() {
collection.insertMany(Arrays.asList(json("_id: 1"), json("_id: 2")));
collection.updateMany(json("_id: {$gt:1}"), json("$set: {n:1}"));
List<Document> results = toArray(collection.find());
assertThat(results).containsOnly(json("_id: 1"), json("_id: 2, n:1"));
}
@Test
public void testUpdateWithObjectId() {
collection.insertOne(json("_id: {n:1}"));
UpdateResult result = collection.updateOne(json("_id: {n:1}"), json("$set: {a:1}"));
assertThat(result.getModifiedCount()).isEqualTo(1);
assertThat(collection.find().first()).isEqualTo(json("_id: {n:1}, a:1"));
}
@Test
public void testUpdateArrayMatch() throws Exception {
collection.insertOne(json("_id:1, a:[{x:1,y:1}, {x:2,y:2}, {x:3,y:3}]"));
collection.updateOne(json("'a.x': 2"), json("$inc: {'a.$.y': 1}"));
assertThat(collection.find(json("'a.x': 2")).first()).isEqualTo(json("_id:1, a:[{x:1,y:1}, {x:2,y:3}, {x:3,y:3}]"));
collection.insertOne(json("{'array': [{'123a':{'name': 'old'}}]}"));
assertThat(collection.find(json("{'array.123a.name': 'old'}")).first()).isNotNull();
collection.updateOne(json("{'array.123a.name': 'old'}"), json("{$set: {'array.$.123a.name': 'new'}}"));
assertThat(collection.find(json("{'array.123a.name': 'new'}")).first()).isNotNull();
assertThat(collection.find(json("{'array.123a.name': 'old'}")).first()).isNull();
}
@Test
public void testMultiUpdateArrayMatch() throws Exception {
collection.insertOne(json("{}"));
collection.insertOne(json("x:[1,2,3]"));
collection.insertOne(json("x:99"));
collection.updateMany(json("x:2"), json("$inc:{'x.$': 1}"));
assertThat(collection.find(json("x:1")).first().get("x")).isEqualTo(Arrays.asList(1, 3, 3));
}
@Test
public void testUpsert() {
UpdateResult result = collection.updateMany(json("n:'jon'"), json("$inc:{a:1}"), new UpdateOptions().upsert(true));
assertThat(result.getModifiedCount()).isEqualTo(0);
Document object = collection.find().first();
assertThat(result.getUpsertedId()).isEqualTo(new BsonObjectId(object.getObjectId("_id")));
object.remove("_id");
assertThat(object).isEqualTo(json("n:'jon', a:1"));
result = collection.updateOne(json("_id: 17, n:'jon'"), json("$inc:{a:1}"), new UpdateOptions().upsert(true));
assertThat(result.getUpsertedId()).isNull();
assertThat(collection.find(json("_id:17")).first()).isEqualTo(json("_id: 17, n:'jon', a:1"));
}
@Test
public void testUpsertFieldOrder() throws Exception {
collection.updateOne(json("'x.y': 2"), json("$inc: {a:7}"), new UpdateOptions().upsert(true));
Document obj = collection.find().first();
obj.remove("_id");
// this actually differs from the official MongoDB implementation
assertThat(obj).isEqualTo(json("x:{y:2}, a:7"));
}
@Test
public void testUpsertWithoutId() {
UpdateResult result = collection.updateOne(eq("a", 1), set("a", 2), new UpdateOptions().upsert(true));
assertThat(result.getModifiedCount()).isEqualTo(0);
assertThat(result.getUpsertedId()).isNotNull();
assertThat(collection.find().first().get("_id")).isInstanceOf(ObjectId.class);
assertThat(collection.find().first().get("a")).isEqualTo(2);
}
@Test
public void testUpsertOnIdWithPush() {
Document update1 = json("$push: {c: {a:1, b:2} }");
Document update2 = json("$push: {c: {a:3, b:4} }");
collection.updateOne(json("_id: 1"), update1, new UpdateOptions().upsert(true));
collection.updateOne(json("_id: 1"), update2, new UpdateOptions().upsert(true));
Document expected = json("_id: 1, c: [{a:1, b:2}, {a:3, b:4}]");
assertThat(collection.find(json("'c.a':3, 'c.b':4")).first()).isEqualTo(expected);
}
@Test
public void testUpsertWithConditional() {
Document query = json("_id: 1, b: {$gt: 5}");
Document update = json("$inc: {a: 1}");
UpdateResult updateResult = collection.updateOne(query, update, new UpdateOptions().upsert(true));
assertThat(updateResult.getModifiedCount()).isZero();
assertThat(updateResult.getMatchedCount()).isZero();
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
// https://github.com/bwaldvogel/mongo-java-server/issues/29
@Test
public void testUpsertWithoutChange() {
collection.insertOne(json("_id: 1, a: 2, b: 3"));
Document query = json("_id: 1");
Document update = json("$set: {a: 2}");
UpdateResult updateResult = collection.updateOne(query, update, new UpdateOptions().upsert(true));
assertThat(updateResult.getModifiedCount()).isZero();
assertThat(updateResult.getMatchedCount()).isOne();
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 2, b: 3"));
}
@Test
public void testUpsertWithEmbeddedQuery() {
collection.updateOne(json("_id: 1, 'e.i': 1"), json("$set: {a:1}"), new UpdateOptions().upsert(true));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id:1, e: {i:1}, a:1"));
}
@Test
public void testUpsertWithIdIn() throws Exception {
Document query = json("_id: {$in: [1]}");
Document update = json("$push: {n: {_id: 2 ,u : 3}}, $inc: {c: 4}");
Document expected = json("_id: 1, n: [{_id: 2 ,u : 3}], c: 4");
UpdateResult updateResult = collection.updateOne(query, update, new UpdateOptions().upsert(true));
assertThat(updateResult.getModifiedCount()).isZero();
assertThat(updateResult.getMatchedCount()).isZero();
// the ID generation actually differs from official MongoDB which just
// create a random object id
Document actual = collection.find().first();
assertThat(actual).isEqualTo(expected);
}
@Test
public void testUpdateWithMultiplyOperator() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$mul: {a: 2}, $set: {b: 2}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, a: 0, b: 2"));
collection.updateOne(object, json("$mul: {b: 2.5}, $inc: {a: 0.5}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, a: 0.5, b: 5.0"));
}
@Test
public void testUpdateWithIllegalMultiplyFails() throws Exception {
Document object = json("_id: 1, foo: 'x', bar: 1");
collection.insertOne(object);
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$mul: {_id: 2}")))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed'");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$mul: {foo: 2}")))
.withMessageContaining("Command failed with error -1: 'cannot multiply value 'x''");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$mul: {bar: 'x'}")))
.withMessageContaining("Command failed with error -1: 'cannot multiply with non-numeric value: {bar=x}'");
}
@Test
public void testIsMaster() throws Exception {
Document isMaster = db.runCommand(new Document("isMaster", Integer.valueOf(1)));
assertThat(isMaster.getBoolean("ismaster")).isTrue();
assertThat(isMaster.getDate("localTime")).isInstanceOf(Date.class);
assertThat(isMaster.getInteger("maxBsonObjectSize")).isGreaterThan(1000);
assertThat(isMaster.getInteger("maxMessageSizeBytes")).isGreaterThan(isMaster.getInteger("maxBsonObjectSize"));
}
// https://github.com/foursquare/fongo/pull/26
// http://stackoverflow.com/questions/12403240/storing-null-vs-not-storing-the-key-at-all-in-mongodb
@Test
public void testFindWithNullOrNoFieldFilter() {
collection.insertOne(json("name: 'jon', group: 'group1'"));
collection.insertOne(json("name: 'leo', group: 'group1'"));
collection.insertOne(json("name: 'neil1', group: 'group2'"));
collection.insertOne(json("name: 'neil2', group: null"));
collection.insertOne(json("name: 'neil3'"));
// check {group: null} vs {group: {$exists: false}} filter
List<Document> objs = toArray(collection.find(json("group: null")));
assertThat(objs).as("should have two neils (neil2, neil3)").hasSize(2);
objs = toArray(collection.find(exists("group", false)));
assertThat(objs).as("should have one neils (neil3)").hasSize(1);
// same check but for fields which do not exist in DB
objs = toArray(collection.find(json("other: null")));
assertThat(objs).as("should return all documents").hasSize(5);
objs = toArray(collection.find(exists("other", false)));
assertThat(objs).as("should return all documents").hasSize(5);
}
@Test
public void testInsertsWithUniqueIndex() {
collection.createIndex(new Document("uniqueKeyField", 1), new IndexOptions().unique(true));
collection.insertOne(json("uniqueKeyField: 'abc1', afield: 'avalue'"));
collection.insertOne(json("uniqueKeyField: 'abc2', afield: 'avalue'"));
collection.insertOne(json("uniqueKeyField: 'abc3', afield: 'avalue'"));
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> collection.insertOne(json("uniqueKeyField: 'abc2', afield: 'avalue'")))
.withMessage("duplicate key error index: uniqueKeyField_1 dup key: abc2");
}
@Test
public void testInsertBinaryData() throws Exception {
collection.insertOne(new Document("test", new byte[] { 0x01, 0x02, 0x03 }));
}
// see https://github.com/bwaldvogel/mongo-java-server/issues/9
@Test
public void testUniqueIndexWithSubdocument() {
collection.createIndex(new Document("action.actionId", 1), new IndexOptions().unique(true));
collection.insertOne(json("action: 'abc1'"));
collection.insertOne(json("action: { actionId: 1 }"));
collection.insertOne(json("action: { actionId: 2 }"));
collection.insertOne(json("action: { actionId: 3 }"));
assertThatExceptionOfType(MongoWriteException.class)
.isThrownBy(() -> collection.insertOne(json("action: { actionId: 1 }")))
.withMessageContaining("duplicate key error index: action.actionId_1 dup key: 1");
}
@Test
public void testAddNonUniqueIndexOnNonIdField() {
collection.createIndex(new Document("someField", 1), new IndexOptions().unique(false));
collection.insertOne(json("someField: 'abc'"));
collection.insertOne(json("someField: 'abc'"));
}
@Test
public void testCompoundUniqueIndicesNotSupportedAndThrowsException() {
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.createIndex(new Document("a", 1).append("b", 1), new IndexOptions().unique(true)))
.withMessageContaining("Command failed with error -1: 'Compound unique indices are not yet implemented'");
}
@Test
public void testCursorOptionNoTimeout() throws Exception {
try (MongoCursor<Document> cursor = collection.find().noCursorTimeout(true).iterator()) {
assertThat(cursor.hasNext()).isFalse();
}
}
@Test
public void testBulkInsert() throws Exception {
List<WriteModel<Document>> inserts = new ArrayList<>();
inserts.add(new InsertOneModel<>(json("_id: 1")));
inserts.add(new InsertOneModel<>(json("_id: 2")));
inserts.add(new InsertOneModel<>(json("_id: 3")));
BulkWriteResult result = collection.bulkWrite(inserts);
assertThat(result.getInsertedCount()).isEqualTo(3);
}
@Test
public void testBulkUpdateOrdered() throws Exception {
testBulkUpdate(true);
}
@Test
public void testBulkUpdateUnordered() throws Exception {
testBulkUpdate(false);
}
private void testBulkUpdate(boolean ordered) {
insertUpdateInBulk(ordered);
removeInBulk(ordered);
insertUpdateInBulkNoMatch(ordered);
}
@Test
public void testUpdateCurrentDateIllegalTypeSpecification() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$currentDate: {lastModified: null}")))
.withMessageContaining("Command failed with error 2 (BadValue): 'NULL is not a valid type for $currentDate");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$currentDate: {lastModified: 123.456}")))
.withMessageContaining("Command failed with error 2 (BadValue): 'Double is not a valid type for $currentDate");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$currentDate: {lastModified: 'foo'}")))
.withMessageContaining("Command failed with error 2 (BadValue): 'String is not a valid type for $currentDate");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(object, json("$currentDate: {lastModified: {$type: 'foo'}}")))
.withMessageContaining("Command failed with error 2 (BadValue): 'The '$type' string field is required to be 'date' or 'timestamp'");
assertThat(collection.find(object).first()).isEqualTo(object);
}
@Test
public void testUpdateCurrentDate() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$currentDate: {'x.lastModified': true}"));
assertThat(((Document) collection.find(object).first().get("x")).get("lastModified"))
.isInstanceOf(Date.class);
collection.updateOne(object, json("$currentDate: {'x.lastModified': {$type: 'date'}}"));
assertThat(((Document) collection.find(object).first().get("x")).get("lastModified"))
.isInstanceOf(Date.class);
collection.updateOne(object, json("$currentDate: {'x.lastModified': {$type: 'timestamp'}}"));
assertThat(((Document) collection.find(object).first().get("x")).get("lastModified"))
.isInstanceOf(BsonTimestamp.class);
}
@Test
public void testRenameField() throws Exception {
Document object = json("_id: 1, foo: 'x', bar: 'y'");
collection.insertOne(object);
collection.updateOne(json("_id: 1"), json("$rename: {foo: 'foo2', bar: 'bar2'}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, foo2: 'x', bar2: 'y'"));
collection.updateOne(json("_id: 1"), json("$rename: {'bar2': 'foo', foo2: 'bar'}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, bar: 'x', foo: 'y'"));
}
@Test
public void testRenameFieldIllegalValue() throws Exception {
Document object = json("_id: 1, foo: 'x', bar: 'y'");
collection.insertOne(object);
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$rename: {foo: 12345}")))
.withMessageContaining("Command failed with error 2 (BadValue): 'The 'to' field for $rename must be a string");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$rename: {'_id': 'id'}")))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$rename: {foo: '_id'}")))
.withMessageContaining("Command failed with error 10148: 'Mod on _id not allowed");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$rename: {foo: 'bar', 'bar': 'bar2'}")))
.withMessageContaining("Command failed with error 16837: 'Cannot update 'bar' and 'bar' at the same time");
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.updateOne(json("_id: 1"), json("$rename: {bar: 'foo', bar2: 'foo'}")))
.withMessageContaining("Command failed with error 16837: 'Cannot update 'foo' and 'foo' at the same time");
}
@Test
public void testRenameCollection() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.renameCollection(new MongoNamespace(collection.getNamespace().getDatabaseName(), "other-collection-name"));
Collection<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", "other-collection-name");
assertThat(getCollection("other-collection-name").countDocuments()).isEqualTo(3);
}
@Test
public void testRenameCollection_targetAlreadyExists() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
MongoCollection<Document> otherCollection = db.getCollection("other-collection-name");
otherCollection.insertOne(json("_id: 1"));
assertThatExceptionOfType(MongoCommandException.class)
.isThrownBy(() -> collection.renameCollection(new MongoNamespace(db.getName(), "other-collection-name")))
.withMessageContaining("Command failed with error -1: 'target namespace already exists'");
List<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", collection.getNamespace().getCollectionName(),
"other-collection-name");
assertThat(collection.countDocuments()).isEqualTo(3);
assertThat(getCollection("other-collection-name").countDocuments()).isEqualTo(1);
}
@Test
public void testRenameCollection_dropTarget() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
MongoCollection<Document> otherCollection = db.getCollection("other-collection-name");
otherCollection.insertOne(json("_id: 1"));
collection.renameCollection(new MongoNamespace(db.getName(), "other-collection-name"),
new RenameCollectionOptions().dropTarget(true));
List<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", "other-collection-name");
assertThat(getCollection("other-collection-name").countDocuments()).isEqualTo(3);
}
@Test
public void testListIndexes_empty() throws Exception {
assertThat(collection.listIndexes()).isEmpty();
}
@Test
public void testListIndexes() throws Exception {
collection.insertOne(json("_id: 1"));
db.getCollection("other").insertOne(json("_id: 1"));
collection.createIndex(json("bla: 1"));
List<Document> indexInfo = toArray(collection.listIndexes());
assertThat(indexInfo).containsOnly( //
json("name:'_id_', ns:'testdb.testcoll', key:{_id:1}"), //
json("name:'_id_', ns:'testdb.other', key:{_id:1}"), //
json("name:'bla_1', ns:'testdb.testcoll', key:{bla:1}"));
}
@Test
public void testFieldSelection_deselectId() {
collection.insertOne(json("_id: 1, order:1, visits: 2"));
Document document = collection.find(json("{}")).projection(json("_id: 0")).first();
assertThat(document).isEqualTo(json("order:1, visits:2"));
}
@Test
public void testFieldSelection_deselectOneField() {
Document obj = json("_id: 1, order:1, visits: 2, eid: 12345");
collection.insertOne(obj);
Document document = collection.find(new Document()).projection(json("visits: 0")).first();
assertThat(document).isEqualTo(json("_id:1, order:1, eid: 12345"));
}
@Test
public void testFieldSelection_deselectTwoFields() {
Document obj = json("_id: 1, order:1, visits: 2, eid: 12345");
collection.insertOne(obj);
Document document = collection.find(new Document()).projection(json("visits: 0, eid: 0")).first();
assertThat(document).isEqualTo(json("_id:1, order:1"));
}
@Test
public void testFieldSelection_selectAndDeselectFields() {
Document obj = json("_id: 1, order:1, visits: 2, eid: 12345");
collection.insertOne(obj);
Document document = collection.find(new Document()).projection(json("visits: 0, eid: 1")).first();
assertThat(document).isEqualTo(json("_id:1, eid: 12345"));
}
@Test
public void testPullWithInPattern() {
collection.insertOne(json("_id: 1, tags: ['aa', 'bb', 'ab', 'cc']"));
collection.updateOne(json("_id: 1"), pullByFilter(in("tags", Pattern.compile("a+"))));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, tags: ['bb', 'cc']"));
}
@Test
public void testPullWithInPatternAnchored() {
collection.insertOne(json("_id: 1, tags: ['aa', 'bb', 'ab', 'cc']"));
collection.updateOne(json("_id: 1"), pullByFilter(in("tags", Pattern.compile("^a+$"))));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, tags: ['bb', 'ab', 'cc']"));
}
@Test
public void testPullWithInNumbers() {
collection.insertOne(json("_id: 1, values: [1, 2, 2.5, 3.0, 4]"));
collection.updateOne(json("_id: 1"), pullByFilter(in("values", Arrays.asList(2.0, 3, 4L))));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, values: [1, 2.5]"));
}
@Test
public void testDocumentWithHashMap() {
Map<String, String> value = new HashMap<>();
value.put("foo", "bar");
collection.insertOne(new Document("_id", 1).append("map", value));
Bson document = collection.find().first();
assertThat(document).isEqualTo(json("{_id: 1, map: {foo: 'bar'}}"));
}
@Test
public void testFindAndOfOrs() throws Exception {
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
collection.insertOne(new Document("_id", 1).append("published", true).append("startDate", dateFormat.parse("2015-03-01 13:20:05")));
collection.insertOne(new Document("_id", 2).append("published", true).append("expiration", dateFormat.parse("2020-12-31 18:00:00")));
collection.insertOne(new Document("_id", 3).append("published", true));
collection.insertOne(new Document("_id", 4).append("published", false));
collection.insertOne(new Document("_id", 5).append("published", true).append("startDate", dateFormat.parse("2017-01-01 00:00:00")));
collection.insertOne(new Document("_id", 6).append("published", true).append("expiration", dateFormat.parse("2016-01-01 00:00:00")));
Date now = dateFormat.parse("2016-01-01 00:00:00");
Bson query = and(
ne("published", false),
or(exists("startDate", false), lt("startDate", now)),
or(exists("expiration", false), gt("expiration", now))
);
List<Document> documents = toArray(collection.find(query).projection(json("_id: 1")));
assertThat(documents).containsOnly(json("_id: 1"), json("_id: 2"), json("_id: 3"));
}
@Test
public void testInOperatorWithNullValue() {
collection.insertMany(Arrays.asList(
json("_id: 1, a: 1"),
json("_id: 2, a: 2"),
json("_id: 3, a: 3"),
json("_id: 4, a: 4"),
json("_id: 5"))
);
Bson inQueryWithNull = in("a", 2, null, 3);
List<Document> results = toArray(collection.find(inQueryWithNull).projection(json("_id: 1")));
assertThat(results).containsExactly(
json("_id: 2"),
json("_id: 3"),
json("_id: 5")
);
}
@Test
public void testQueryWithReference() throws Exception {
collection.insertOne(json("_id: 1"));
String collectionName = collection.getNamespace().getCollectionName();
collection.insertOne(new Document("_id", 2).append("ref", new DBRef(collectionName, 1)));
collection.insertOne(new Document("_id", 3).append("ref", new DBRef(collectionName, 2)));
Document doc = collection.find(new Document("ref", new DBRef(collectionName, 1))).projection(json("_id: 1")).first();
assertThat(doc).isEqualTo(json("_id: 2"));
}
@Test
public void testQueryWithIllegalReference() throws Exception {
collection.insertOne(json("_id: 1"));
String collectionName = collection.getNamespace().getCollectionName();
collection.insertOne(new Document("_id", 2).append("ref", new DBRef(collectionName, 1)));
collection.insertOne(new Document("_id", 3).append("ref", new DBRef(collectionName, 2)));
assertThatExceptionOfType(MongoQueryException.class)
.isThrownBy(() -> collection.find(json("ref: {$ref: 'coll'}")).first())
.withMessageContaining("Query failed with error code 10068 and error message 'invalid operator: $ref'");
}
@Test
public void testAndOrNorWithEmptyArray() throws Exception {
collection.insertOne(json("{}"));
assertThatExceptionOfType(MongoQueryException.class)
.isThrownBy(() -> collection.find(and()).first())
.withMessageContaining("Query failed with error code 14816 and error message '$and expression must be a nonempty array'");
assertThatExceptionOfType(MongoQueryException.class)
.isThrownBy(() -> collection.find(nor()).first())
.withMessageContaining("Query failed with error code 14816 and error message '$nor expression must be a nonempty array'");
assertThatExceptionOfType(MongoQueryException.class)
.isThrownBy(() -> collection.find(or()).first())
.withMessageContaining("Query failed with error code 14816 and error message '$or expression must be a nonempty array'");
}
@Test
public void testInsertLargeDocument() throws Exception {
insertAndFindLargeDocument(100, 1);
insertAndFindLargeDocument(1000, 2);
insertAndFindLargeDocument(10000, 3);
}
@Test
public void testInsertAndUpdateAsynchronously() throws Exception {
int numDocuments = 1000;
final CountDownLatch latch = new CountDownLatch(numDocuments);
final Queue<RuntimeException> errors = new LinkedBlockingQueue<>();
final Semaphore concurrentOperationsOnTheFly = new Semaphore(50); // prevent MongoWaitQueueFullException
for (int i = 1; i <= numDocuments; i++) {
final Document document = new Document("_id", i);
for (int j = 0; j < 10; j++) {
document.append("key-" + i + "-" + j, "value-" + i + "-" + j);
}
concurrentOperationsOnTheFly.acquire();
asyncCollection.insertOne(document, new SingleResultCallback<Void>() {
@Override
public void onResult(Void result, Throwable t) {
checkError("insert", t);
log.info("inserted {}", document);
final Document query = new Document("_id", document.getInteger("_id"));
asyncCollection.updateOne(query, Updates.set("updated", true), new SingleResultCallback<UpdateResult>() {
@Override
public void onResult(UpdateResult result, Throwable t) {
checkError("update", t);
log.info("updated {}: {}", query, result);
release();
}
});
}
private void checkError(String operation, Throwable t) {
if (t != null) {
log.error(operation + " of {} failed", document, t);
RuntimeException exception = new RuntimeException("Failed to " + operation + " " + document, t);
errors.add(exception);
release();
throw exception;
}
}
private void release() {
latch.countDown();
concurrentOperationsOnTheFly.release();
}
});
}
boolean success = latch.await(30, TimeUnit.SECONDS);
assertThat(success).isTrue();
if (!errors.isEmpty()) {
throw errors.poll();
}
log.info("finished");
for (int i = 1; i <= numDocuments; i++) {
Document query = new Document("_id", i);
Document document = collection.find(query).first();
assertThat(document).describedAs(query.toJson()).isNotNull();
assertThat(document.getBoolean("updated")).describedAs(document.toJson()).isTrue();
}
long count = collection.countDocuments();
assertThat(count).isEqualTo(numDocuments);
}
@Test
public void testAllQuery() throws Exception {
// see https://docs.mongodb.com/manual/reference/operator/query/all/
collection.insertOne(new Document("_id", new ObjectId("5234cc89687ea597eabee675"))
.append("code", "xyz")
.append("tags", Arrays.asList("school", "book", "bag", "headphone", "appliance"))
.append("qty", Arrays.asList(
new Document().append("size", "S").append("num", 10).append("color", "blue"),
new Document().append("size", "M").append("num", 45).append("color", "blue"),
new Document().append("size", "L").append("num", 100).append("color", "green")
)));
collection.insertOne(new Document("_id", new ObjectId("5234cc8a687ea597eabee676"))
.append("code", "abc")
.append("tags", Arrays.asList("appliance", "school", "book"))
.append("qty", Arrays.asList(
new Document().append("size", "6").append("num", 100).append("color", "green"),
new Document().append("size", "6").append("num", 50).append("color", "blue"),
new Document().append("size", "8").append("num", 100).append("color", "brown")
)));
collection.insertOne(new Document("_id", new ObjectId("5234ccb7687ea597eabee677"))
.append("code", "efg")
.append("tags", Arrays.asList("school", "book"))
.append("qty", Arrays.asList(
new Document().append("size", "S").append("num", 10).append("color", "blue"),
new Document().append("size", "M").append("num", 100).append("color", "blue"),
new Document().append("size", "L").append("num", 100).append("color", "green")
)));
collection.insertOne(new Document("_id", new ObjectId("52350353b2eff1353b349de9"))
.append("code", "ijk")
.append("tags", Arrays.asList("electronics", "school"))
.append("qty", Collections.singletonList(
new Document().append("size", "M").append("num", 100).append("color", "green")
)));
List<Document> documents = toArray(collection.find(json("{ tags: { $all: [ \"appliance\", \"school\", \"book\" ] } }")));
assertThat(documents).hasSize(2);
assertThat(documents.get(0).get("_id")).isEqualTo(new ObjectId("5234cc89687ea597eabee675"));
assertThat(documents.get(1).get("_id")).isEqualTo(new ObjectId("5234cc8a687ea597eabee676"));
}
@Test
public void testMatchesElementQuery() throws Exception {
collection.insertOne(json("_id: 1, results: [ 82, 85, 88 ]"));
collection.insertOne(json("_id: 2, results: [ 75, 88, 89 ]"));
List<Document> results = toArray(collection.find(json("results: { $elemMatch: { $gte: 80, $lt: 85 } }")));
assertThat(results).hasSize(1);
assertThat(results.get(0)).isEqualTo(json("\"_id\" : 1, \"results\" : [ 82, 85, 88 ]"));
}
@Test
public void testIllegalElementMatchQuery() throws Exception {
collection.insertOne(json("_id: 1, results: [ 82, 85, 88 ]"));
assertThatExceptionOfType(MongoQueryException.class)
.isThrownBy(() -> collection.find(json("results: { $elemMatch: [ 85 ] }")).first())
.withMessageContaining("Query failed with error code 2 and error message '$elemMatch needs an Object'");
assertThatExceptionOfType(MongoQueryException.class)
.isThrownBy(() -> collection.find(json("results: { $elemMatch: 1 }")).first())
.withMessageContaining("Query failed with error code 2 and error message '$elemMatch needs an Object'");
}
@Test
public void testQueryWithComment() throws Exception {
collection.insertOne(json("_id: 1, x: 2"));
collection.insertOne(json("_id: 2, x: 3"));
collection.insertOne(json("_id: 3, x: 4"));
List<Document> documents = toArray(collection.find(json("x: { $mod: [ 2, 0 ] }, $comment: \"Find even values.\"")));
assertThat(documents).hasSize(2);
assertThat(documents.get(0).get("_id")).isEqualTo(1);
assertThat(documents.get(1).get("_id")).isEqualTo(3);
}
private void insertAndFindLargeDocument(int numKeyValues, int id) {
Document document = new Document("_id", id);
for (int i = 0; i < numKeyValues; i++) {
document.put("key-" + i, "value-" + i);
}
collection.insertOne(document);
Document persistentDocument = collection.find(new Document("_id", id)).first();
assertThat(persistentDocument.keySet()).hasSize(numKeyValues + 1);
}
private void insertUpdateInBulk(boolean ordered) {
List<WriteModel<Document>> ops = new ArrayList<>();
ops.add(new InsertOneModel<>(json("_id: 1, field: 'x'")));
ops.add(new InsertOneModel<>(json("_id: 2, field: 'x'")));
ops.add(new InsertOneModel<>(json("_id: 3, field: 'x'")));
ops.add(new UpdateManyModel<Document>(json("field: 'x'"), set("field", "y")));
BulkWriteResult result = collection.bulkWrite(ops, new BulkWriteOptions().ordered(ordered));
assertThat(result.getInsertedCount()).isEqualTo(3);
assertThat(result.getDeletedCount()).isEqualTo(0);
assertThat(result.getModifiedCount()).isEqualTo(3);
assertThat(result.getMatchedCount()).isEqualTo(3);
long totalDocuments = collection.countDocuments();
assertThat(totalDocuments).isEqualTo(3);
long documentsWithY = collection.countDocuments(json("field: 'y'"));
assertThat(documentsWithY).isEqualTo(3);
}
private void insertUpdateInBulkNoMatch(boolean ordered) {
collection.insertOne(json("foo: 'bar'"));
List<WriteModel<Document>> ops = new ArrayList<>();
ops.add(new UpdateOneModel<Document>(ne("foo", "bar"), set("field", "y")));
BulkWriteResult result = collection.bulkWrite(ops, new BulkWriteOptions().ordered(ordered));
assertThat(result.getInsertedCount()).isEqualTo(0);
assertThat(result.getDeletedCount()).isEqualTo(0);
assertThat(result.getModifiedCount()).isEqualTo(0);
assertThat(result.getMatchedCount()).isEqualTo(0);
}
private void removeInBulk(boolean ordered) {
DeleteManyModel<Document> deleteOp = new DeleteManyModel<>(json("field: 'y'"));
BulkWriteResult result = collection.bulkWrite(Collections.singletonList(deleteOp),
new BulkWriteOptions().ordered(ordered));
assertThat(result.getDeletedCount()).isEqualTo(3);
assertThat(collection.countDocuments()).isZero();
}
}
|
test-common/src/main/java/de/bwaldvogel/mongo/backend/AbstractBackendTest.java
|
package de.bwaldvogel.mongo.backend;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Filters.exists;
import static com.mongodb.client.model.Filters.gt;
import static com.mongodb.client.model.Filters.in;
import static com.mongodb.client.model.Filters.lt;
import static com.mongodb.client.model.Filters.ne;
import static com.mongodb.client.model.Filters.nor;
import static com.mongodb.client.model.Filters.or;
import static com.mongodb.client.model.Updates.addEachToSet;
import static com.mongodb.client.model.Updates.pull;
import static com.mongodb.client.model.Updates.pullByFilter;
import static com.mongodb.client.model.Updates.set;
import static de.bwaldvogel.mongo.backend.TestUtils.getCollectionStatistics;
import static de.bwaldvogel.mongo.backend.TestUtils.json;
import static de.bwaldvogel.mongo.backend.TestUtils.toArray;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.net.InetSocketAddress;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.bson.BsonObjectId;
import org.bson.BsonTimestamp;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.mongodb.DBRef;
import com.mongodb.MongoCommandException;
import com.mongodb.MongoException;
import com.mongodb.MongoNamespace;
import com.mongodb.MongoQueryException;
import com.mongodb.MongoWriteException;
import com.mongodb.ServerAddress;
import com.mongodb.WriteConcern;
import com.mongodb.async.SingleResultCallback;
import com.mongodb.async.client.MongoClients;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.CountOptions;
import com.mongodb.client.model.CreateCollectionOptions;
import com.mongodb.client.model.DeleteManyModel;
import com.mongodb.client.model.EstimatedDocumentCountOptions;
import com.mongodb.client.model.FindOneAndUpdateOptions;
import com.mongodb.client.model.IndexOptions;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.RenameCollectionOptions;
import com.mongodb.client.model.ReplaceOptions;
import com.mongodb.client.model.ReturnDocument;
import com.mongodb.client.model.UpdateManyModel;
import com.mongodb.client.model.UpdateOneModel;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.model.Updates;
import com.mongodb.client.model.WriteModel;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import de.bwaldvogel.mongo.MongoBackend;
import de.bwaldvogel.mongo.MongoServer;
public abstract class AbstractBackendTest {
private static final Logger log = LoggerFactory.getLogger(AbstractBackendTest.class);
protected static final String TEST_DATABASE_NAME = "testdb";
protected static final String OTHER_TEST_DATABASE_NAME = "bar";
private MongoServer mongoServer;
protected com.mongodb.MongoClient syncClient;
private com.mongodb.async.client.MongoClient asyncClient;
protected MongoDatabase db;
protected MongoCollection<Document> collection;
private com.mongodb.async.client.MongoCollection<Document> asyncCollection;
private Document runCommand(String commandName) {
return runCommand(new Document(commandName, Integer.valueOf(1)));
}
private Document runCommand(Document command) {
return getAdminDb().runCommand(command);
}
protected MongoCollection<Document> getCollection(String collectionName) {
return db.getCollection(collectionName);
}
protected MongoDatabase getAdminDb() {
return syncClient.getDatabase("admin");
}
protected abstract MongoBackend createBackend() throws Exception;
@Before
public void setUp() throws Exception {
spinUpServer();
}
@After
public void tearDown() {
shutdownServer();
}
protected void spinUpServer() throws Exception {
MongoBackend backend = createBackend();
mongoServer = new MongoServer(backend);
InetSocketAddress serverAddress = mongoServer.bind();
syncClient = new com.mongodb.MongoClient(new ServerAddress(serverAddress));
asyncClient = MongoClients.create("mongodb://" + serverAddress.getHostName() + ":" + serverAddress.getPort());
db = syncClient.getDatabase(TEST_DATABASE_NAME);
collection = db.getCollection("testcoll");
MongoNamespace namespace = collection.getNamespace();
com.mongodb.async.client.MongoDatabase asyncDb = asyncClient.getDatabase(namespace.getDatabaseName());
asyncCollection = asyncDb.getCollection(namespace.getCollectionName());
}
protected void shutdownServer() {
syncClient.close();
asyncClient.close();
mongoServer.shutdownNow();
}
@Test
public void testSimpleInsert() throws Exception {
collection.insertOne(json("_id: 1"));
}
@Test
public void testSimpleInsertDelete() throws Exception {
collection.insertOne(json("_id: 1"));
collection.deleteOne(json("_id: 1"));
}
@Test
public void testCreateCollection() throws Exception {
String newCollectionName = "some-collection";
assertThat(toArray(db.listCollectionNames())).doesNotContain(newCollectionName);
db.createCollection(newCollectionName, new CreateCollectionOptions());
assertThat(toArray(db.listCollectionNames())).contains(newCollectionName);
}
@Test
public void testCreateCollectionAlreadyExists() throws Exception {
db.createCollection("some-collection", new CreateCollectionOptions());
try {
db.createCollection("some-collection", new CreateCollectionOptions());
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(48);
assertThat(e.getMessage()).contains("collection already exists");
}
}
@Test
public void testUnsupportedModifier() throws Exception {
collection.insertOne(json("{}"));
try {
collection.updateOne(json("{}"), json("$foo: {}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10147);
assertThat(e.getMessage()).contains("Invalid modifier specified: $foo");
}
}
@Test
public void testUpsertWithInc() {
Document query = json("_id:{ f: 'ca', '1': { l: 2 }, t: { t: 11 } }");
Document update = json("'$inc': { 'n.!' : 1 , 'n.a.b:false' : 1}");
collection.updateOne(query, update, new UpdateOptions().upsert(true));
query.putAll(json("n: {'!': 1, a: {'b:false': 1}}"));
assertThat(collection.find().first()).isEqualTo(query);
}
@Test
public void testBasicUpdate() {
collection.insertOne(json("_id:1"));
collection.insertOne(json("_id:2, b:5"));
collection.insertOne(json("_id:3"));
collection.insertOne(json("_id:4"));
collection.replaceOne(json("_id:2"), json("_id:2, a:5"));
assertThat(collection.find(json("_id:2")).first()).isEqualTo(json("_id:2, a:5"));
}
@Test
public void testCollectionStats() throws Exception {
try {
getCollStats();
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(26);
assertThat(e.getMessage()).contains("No such collection");
}
collection.insertOne(json("{}"));
collection.insertOne(json("abc: 'foo'"));
Document stats = getCollStats();
assertThat(stats.getInteger("ok")).isEqualTo(1);
assertThat(stats.getInteger("count").longValue()).isEqualTo(2);
assertThat(stats.getLong("size").longValue()).isEqualTo(57);
assertThat(stats.getDouble("avgObjSize").doubleValue()).isEqualTo(28.5);
}
private Document getCollStats() {
String collectionName = collection.getNamespace().getCollectionName();
return getCollectionStatistics(db, collectionName);
}
@Test
public void testGetLogStartupWarnings() throws Exception {
Document startupWarnings = getAdminDb().runCommand(json("getLog: 'startupWarnings'"));
assertThat(startupWarnings.getInteger("ok")).isEqualTo(1);
assertThat(startupWarnings.get("totalLinesWritten")).isEqualTo(0);
assertThat(startupWarnings.get("log")).isEqualTo(Collections.emptyList());
}
@Test
public void testGetLogWhichDoesNotExist() throws Exception {
try {
getAdminDb().runCommand(json("getLog: 'illegal'"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("no RamLog");
}
}
@Test
public void testCompoundDateIdUpserts() {
Document query = json("{ _id : { $lt : { n: 'a' , t: 10} , $gte: { n: 'a', t: 1}}}");
List<Document> toUpsert = Arrays.asList(
json("_id: {n:'a', t: 1}"),
json("_id: {n:'a', t: 2}"),
json("_id: {n:'a', t: 3}"),
json("_id: {n:'a', t: 11}"));
for (Document dbo : toUpsert) {
collection.replaceOne(dbo, new Document(dbo).append("foo", "bar"), new ReplaceOptions().upsert(true));
}
List<Document> results = toArray(collection.find(query));
assertThat(results).containsOnly(
json("_id: {n:'a', t:1}, foo:'bar'"), //
json("_id: {n:'a', t:2}, foo:'bar'"), //
json("_id: {n:'a', t:3}, foo:'bar'"));
}
@Test
public void testCompoundSort() {
collection.insertOne(json("a:1, _id:1"));
collection.insertOne(json("a:2, _id:5"));
collection.insertOne(json("a:1, _id:2"));
collection.insertOne(json("a:2, _id:4"));
collection.insertOne(json("a:1, _id:3"));
List<Document> documents = toArray(collection.find().sort(json("a:1, _id:-1")));
assertThat(documents).containsExactly(json("a:1, _id:3"), json("a:1, _id:2"), json("a:1, _id:1"),
json("a:2, _id:5"), json("a:2, _id:4"));
}
@Test
@SuppressWarnings("deprecation")
public void testCountCommand() {
assertThat(collection.count()).isZero();
}
@Test
@SuppressWarnings("deprecation")
public void testCountCommandWithQuery() {
collection.insertOne(json("n:1"));
collection.insertOne(json("n:2"));
collection.insertOne(json("n:2"));
assertThat(collection.count(json("n:2"))).isEqualTo(2);
}
@Test
@SuppressWarnings("deprecation")
public void testCountCommandWithSkipAndLimit() {
collection.insertOne(json("x: 1"));
collection.insertOne(json("x: 1"));
collection.insertOne(json("x: 2"));
collection.insertOne(json("x: 1"));
collection.insertOne(json("x: 2"));
collection.insertOne(json("x: 1"));
assertThat(collection.count(json("x: 1"), new CountOptions().skip(4).limit(2))).isEqualTo(0);
assertThat(collection.count(json("x: 1"), new CountOptions().limit(3))).isEqualTo(3);
assertThat(collection.count(json("x: 1"), new CountOptions().limit(10))).isEqualTo(4);
assertThat(collection.count(json("x: 1"), new CountOptions().skip(1))).isEqualTo(3);
}
@Test
public void testCountDocuments() throws Exception {
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testCountDocumentsWithQuery() {
collection.insertOne(json("n:1"));
collection.insertOne(json("n:2"));
collection.insertOne(json("n:2"));
assertThat(collection.countDocuments(json("n:2"))).isEqualTo(2);
}
@Test
public void testEstimatedDocumentCount() throws Exception {
assertThat(collection.estimatedDocumentCount()).isEqualTo(0);
collection.insertOne(json("n:1"));
collection.insertOne(json("n:2"));
collection.insertOne(json("n:2"));
assertThat(collection.estimatedDocumentCount()).isEqualTo(3);
assertThat(collection.estimatedDocumentCount(new EstimatedDocumentCountOptions().maxTime(1, TimeUnit.SECONDS))).isEqualTo(3);
}
@Test
public void testAggregateWithEmptyPipeline() throws Exception {
assertThat(toArray(collection.aggregate(Collections.emptyList()))).isEmpty();
collection.insertOne(json("_id:1"));
collection.insertOne(json("_id:2"));
assertThat(toArray(collection.aggregate(Collections.emptyList())))
.containsExactly(json("_id:1"), json("_id:2"));
}
@Test
public void testCreateIndexes() {
collection.createIndex(new Document("n", 1));
collection.createIndex(new Document("b", 1));
List<Document> indexes = toArray(getCollection("system.indexes").find());
assertThat(indexes).containsOnly(
json("key:{_id:1}").append("ns", collection.getNamespace().getFullName()).append("name", "_id_"),
json("key:{n:1}").append("ns", collection.getNamespace().getFullName()).append("name", "n_1"),
json("key:{b:1}").append("ns", collection.getNamespace().getFullName()).append("name", "b_1"));
}
@Test
public void testCurrentOperations() throws Exception {
Document currentOperations = getAdminDb().getCollection("$cmd.sys.inprog").find().first();
assertThat(currentOperations).isNotNull();
assertThat(currentOperations.get("inprog")).isInstanceOf(List.class);
}
@Test
public void testListCollectionsEmpty() throws Exception {
Document result = db.runCommand(json("listCollections: 1"));
assertThat(result.getInteger("ok")).isEqualTo(1);
Document cursor = (Document) result.get("cursor");
assertThat(cursor.keySet()).containsOnly("id", "ns", "firstBatch");
assertThat(cursor.get("id")).isEqualTo(Long.valueOf(0));
assertThat(cursor.get("ns")).isEqualTo(db.getName() + ".$cmd.listCollections");
List<?> firstBatch = (List<?>) cursor.get("firstBatch");
assertThat(firstBatch).isEmpty();
}
@Test
public void testListCollections() throws Exception {
List<String> collections = Arrays.asList("coll1", "coll2", "coll3");
for (String collection : collections) {
getCollection(collection).insertOne(json("_id: 1"));
}
Document result = db.runCommand(json("listCollections: 1"));
assertThat(result.getInteger("ok")).isEqualTo(1);
Document cursor = (Document) result.get("cursor");
assertThat(cursor.keySet()).containsOnly("id", "ns", "firstBatch");
assertThat(cursor.get("id")).isEqualTo(Long.valueOf(0));
assertThat(cursor.get("ns")).isEqualTo(db.getName() + ".$cmd.listCollections");
assertThat(cursor.get("firstBatch")).isInstanceOf(List.class);
@SuppressWarnings("unchecked")
List<Document> firstBatch = (List<Document>) cursor.get("firstBatch");
Set<String> expectedCollections = new HashSet<>();
expectedCollections.addAll(collections);
expectedCollections.add("system.indexes");
assertThat(firstBatch).hasSize(expectedCollections.size());
Set<String> collectionNames = new HashSet<>();
for (Document collection : firstBatch) {
assertThat(collection.keySet()).containsOnly("name", "options");
assertThat(collection.get("options")).isEqualTo(json("{}"));
assertThat(collection.get("name")).isInstanceOf(String.class);
collectionNames.add((String) collection.get("name"));
}
assertThat(collectionNames).isEqualTo(expectedCollections);
}
@Test
public void testGetCollectionNames() throws Exception {
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
List<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", "foo", "bar");
}
@Test
public void testSystemIndexes() throws Exception {
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
MongoCollection<Document> systemIndexes = db.getCollection("system.indexes");
assertThat(toArray(systemIndexes.find())).containsOnly(json("name: '_id_', ns: 'testdb.foo', key: {_id: 1}"),
json("name: '_id_', ns: 'testdb.bar', key: {_id: 1}"));
}
@Test
public void testSystemNamespaces() throws Exception {
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
MongoCollection<Document> namespaces = db.getCollection("system.namespaces");
assertThat(toArray(namespaces.find())).containsOnly(json("name: 'testdb.system.indexes'"),
json("name: 'testdb.foo'"), json("name: 'testdb.bar'"));
}
@Test
public void testDatabaseStats() throws Exception {
Document stats = db.runCommand(new Document("dbStats", 1).append("scale", 1));
assertThat(stats.getInteger("ok")).isEqualTo(1);
assertThat(stats.getLong("objects")).isZero();
assertThat(stats.getInteger("collections")).isZero();
assertThat(stats.getInteger("indexes")).isZero();
assertThat(stats.getLong("dataSize")).isZero();
getCollection("foo").insertOne(json("{}"));
getCollection("foo").insertOne(json("{}"));
getCollection("bar").insertOne(json("{}"));
stats = db.runCommand(new Document("dbStats", 1).append("scale", 1));
assertThat(stats.getInteger("ok")).isEqualTo(1);
assertThat(stats.getLong("objects")).isEqualTo(8);
assertThat(stats.getInteger("collections")).isEqualTo(3);
assertThat(stats.getInteger("indexes")).isEqualTo(2);
assertThat(stats.getLong("dataSize")).isEqualTo(271);
}
@Test
public void testDeleteDecrementsCount() {
collection.insertOne(json("key: 'value'"));
assertThat(collection.countDocuments()).isEqualTo(1);
collection.deleteOne(json("{}"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testDeleteInSystemNamespace() throws Exception {
try {
getCollection("system.foobar").deleteOne(json("{}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(12050);
assertThat(e.getMessage()).contains("cannot delete from system namespace");
}
try {
getCollection("system.namespaces").deleteOne(json("{}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(12050);
assertThat(e.getMessage()).contains("cannot delete from system namespace");
}
}
@Test
public void testUpdateInSystemNamespace() throws Exception {
for (String collectionName : Arrays.asList("system.foobar", "system.namespaces")) {
MongoCollection<Document> collection = getCollection(collectionName);
try {
collection.updateMany(eq("some", "value"), set("field", "value"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10156);
assertThat(e.getMessage()).contains("cannot update system collection");
}
}
}
@Test
public void testDistinctQuery() {
collection.insertOne(new Document("n", 3));
collection.insertOne(new Document("n", 1));
collection.insertOne(new Document("n", 2));
collection.insertOne(new Document("n", 1));
collection.insertOne(new Document("n", 1));
assertThat(toArray(collection.distinct("n", Integer.class))).containsExactly(1, 2, 3);
assertThat(toArray(collection.distinct("n", json("n: {$gt: 1}"), Integer.class))).containsExactly(2, 3);
assertThat(collection.distinct("foobar", String.class)).isEmpty();
assertThat(collection.distinct("_id", ObjectId.class)).hasSize((int) collection.countDocuments());
}
@Test
public void testDropCollection() throws Exception {
collection.insertOne(json("{}"));
assertThat(toArray(db.listCollectionNames())).contains(collection.getNamespace().getCollectionName());
collection.drop();
assertThat(toArray(db.listCollectionNames())).doesNotContain(collection.getNamespace().getCollectionName());
}
@Test
public void testDropCollectionAlsoDropsFromDB() throws Exception {
collection.insertOne(json("{}"));
collection.drop();
assertThat(collection.countDocuments()).isZero();
assertThat(toArray(db.listCollectionNames())).doesNotContain(collection.getNamespace().getCollectionName());
}
@Test
public void testDropDatabaseAlsoDropsCollectionData() throws Exception {
collection.insertOne(json("{}"));
db.drop();
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testDropDatabaseDropsAllData() throws Exception {
collection.insertOne(json("{}"));
MongoCollection<Document> collection2 = getCollection("testcoll2");
collection2.insertOne(json("{}"));
syncClient.dropDatabase(db.getName());
assertThat(listDatabaseNames()).doesNotContain(db.getName());
assertThat(collection.countDocuments()).isZero();
assertThat(toArray(db.listCollectionNames())).doesNotContain(collection.getNamespace().getCollectionName(),
collection2.getNamespace().getCollectionName());
}
@Test
public void testEmbeddedSort() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4, counts:{done:1}"));
collection.insertOne(json("_id: 5, counts:{done:2}"));
List<Document> objs = toArray(collection.find(ne("c", true)).sort(json("\"counts.done\": -1, _id: 1")));
assertThat(objs).containsExactly(
json("_id: 5, counts:{done:2}"),
json("_id: 4, counts:{done:1}"),
json("_id: 1"),
json("_id: 2"),
json("_id: 3"));
}
@Test
public void testFindAndModifyCommandEmpty() throws Exception {
Document cmd = new Document("findandmodify", collection.getNamespace().getCollectionName());
try {
db.runCommand(cmd);
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("need remove or update");
}
}
@Test
public void testFindAndModifyCommandIllegalOp() throws Exception {
collection.insertOne(json("_id: 1"));
Document cmd = new Document("findAndModify", collection.getNamespace().getCollectionName());
cmd.put("query", json("_id: 1"));
cmd.put("update", new Document("$inc", json("_id: 1")));
assertThat(collection.find().first()).isEqualTo(json("_id: 1"));
try {
db.runCommand(cmd);
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(10148);
assertThat(e.getMessage()).contains("Mod on _id not allowed");
}
}
@Test
public void testFindAndModifyCommandUpdate() throws Exception {
collection.insertOne(json("_id: 1"));
Document cmd = new Document("findAndModify", collection.getNamespace().getCollectionName());
cmd.put("query", json("_id: 1"));
cmd.put("update", json("$inc: {a: 1}"));
Document result = db.runCommand(cmd);
assertThat(result.get("lastErrorObject")).isEqualTo(json("updatedExisting: true, n: 1"));
assertThat(result.getInteger("ok")).isEqualTo(1);
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
@Test
public void testFindOneAndUpdateError() throws Exception {
collection.insertOne(json("_id: 1, a: 1"));
try {
collection.findOneAndUpdate(json("_id: 1"), json("$inc: {_id: 1}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10148);
assertThat(e.getMessage()).contains("Mod on _id not allowed");
}
}
@Test
public void testFindOneAndUpdateFields() throws Exception {
collection.insertOne(json("_id: 1, a: 1"));
Document result = collection.findOneAndUpdate(json("_id: 1"), json("$inc: {a:1}"),
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 2"));
}
@Test
public void testFineOneAndUpdateNotFound() throws Exception {
collection.insertOne(json("_id: 1, a: 1"));
Document result = collection.findOneAndUpdate(json("_id: 2"), new Document("$inc", json("a: 1")));
assertThat(result).isNull();
assertThat(collection.countDocuments()).isEqualTo(1);
}
@Test
public void testFineOneAndUpdateRemove() {
collection.insertOne(json("_id: 1, a: 1"));
Document result = collection.findOneAndDelete(json("_id: 1"));
assertThat(result).isEqualTo(json("_id: 1, a: 1"));
assertThat(collection.countDocuments()).isZero();
}
// https://github.com/foursquare/fongo/issues/32
@Test
public void testFineOneAndUpdateReturnNew() {
collection.insertOne(json("_id: 1, a: 1, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$inc: {a: 1, 'b.c': 1}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 2, b: {c: 2}"));
}
@Test
public void testFineOneAndUpdateMax() {
collection.insertOne(json("_id: 1, a: 1, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$max: {a: 2, 'b.c': 2, d : 'd'}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 2, b: {c: 2}, d : 'd'"));
}
@Test
public void testFineOneAndUpdateMin() {
collection.insertOne(json("_id: 1, a: 2, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$min: {a: 1, 'b.c': 2, d : 'd'}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 1, b: {c: 1}, d : 'd'"));
}
// https://github.com/foursquare/fongo/issues/32
@Test
public void testFindOneAndUpdateReturnOld() {
collection.insertOne(json("_id: 1, a: 1, b: {c: 1}"));
Document query = json("_id: 1");
Document update = json("$inc: {a: 1, 'b.c': 1}");
Document result = collection.findOneAndUpdate(query, update,
new FindOneAndUpdateOptions().returnDocument(ReturnDocument.BEFORE));
assertThat(result).isEqualTo(json("_id: 1, a: 1, b: {c: 1}"));
assertThat(collection.find(query).first()).isEqualTo(json("_id: 1, a: 2, b: {c: 2}"));
}
@Test
public void testFindOneAndUpdateSorted() throws Exception {
collection.insertOne(json("_id: 1, a:15"));
collection.insertOne(json("_id: 2, a:10"));
collection.insertOne(json("_id: 3, a:20"));
Document order = json("a:1");
Document result = collection.findOneAndUpdate(json("{}"), json("$inc: {a: 1}"),
new FindOneAndUpdateOptions().sort(order).returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 2, a: 11"));
order = json("a: -1");
result = collection.findOneAndUpdate(json("{}"), json("$inc: {a: 1}"),
new FindOneAndUpdateOptions().sort(order).returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 3, a: 21"));
}
@Test
public void testFindOneAndUpdateUpsert() {
Document result = collection.findOneAndUpdate(json("_id: 1"), json("$inc: {a:1}"),
new FindOneAndUpdateOptions().upsert(true).returnDocument(ReturnDocument.AFTER));
assertThat(result).isEqualTo(json("_id: 1, a: 1"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
@Test
public void testFindOneAndUpdateUpsertReturnBefore() {
Document result = collection.findOneAndUpdate(json("_id: 1"), json("$inc: {a:1}"),
new FindOneAndUpdateOptions().upsert(true).returnDocument(ReturnDocument.BEFORE));
assertThat(result).isEqualTo(json("{}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
@Test
public void testFindAndRemoveFromEmbeddedList() {
collection.insertOne(json("_id: 1, a: [1]"));
Document result = collection.findOneAndDelete(json("_id: 1"));
assertThat(result).isEqualTo(json("_id: 1, a: [1]"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testFindOne() {
collection.insertOne(json("key: 'value'"));
collection.insertOne(json("key: 'value'"));
Document result = collection.find().first();
assertThat(result).isNotNull();
assertThat(result.get("_id")).isNotNull();
}
@Test
public void testFindOneById() {
collection.insertOne(json("_id: 1"));
Document result = collection.find(json("_id: 1")).first();
assertThat(result).isEqualTo(json("_id: 1"));
assertThat(collection.find(json("_id: 2")).first()).isNull();
}
@Test
public void testFindOneIn() {
collection.insertOne(json("_id: 1"));
Document result = collection.find(json("_id: {$in: [1,2]}")).first();
assertThat(result).isEqualTo(json("_id: 1"));
}
@Test
public void testFindWithLimit() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
List<Document> actual = toArray(collection.find().sort(json("_id: 1")).limit(2));
assertThat(actual).containsExactly(json("_id: 1"), json("_id: 2"));
List<Document> actualNegativeLimit = toArray(collection.find().sort(json("_id: 1")).limit(-2));
assertThat(actualNegativeLimit).isEqualTo(actual);
}
@Test
public void testFindInReverseNaturalOrder() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
List<Document> actual = toArray(collection.find().sort(json("$natural: -1")));
assertThat(actual).containsOnly(json("_id: 1"), json("_id: 2"));
}
@Test
public void testFindWithPattern() {
collection.insertOne(json("_id: 'marta'"));
collection.insertOne(json("_id: 'john', foo: 'bar'"));
collection.insertOne(json("_id: 'jon', foo: 'ba'"));
collection.insertOne(json("_id: 'jo'"));
assertThat(toArray(collection.find(new Document("_id", Pattern.compile("mart")))))
.containsOnly(json("_id: 'marta'"));
assertThat(toArray(collection.find(new Document("foo", Pattern.compile("ba")))))
.containsOnly(json("_id: 'john', foo: 'bar'"), json("_id: 'jon', foo: 'ba'"));
assertThat(toArray(collection.find(new Document("foo", Pattern.compile("ba$")))))
.containsOnly(json("_id: 'jon', foo: 'ba'"));
}
@Test
public void testFindWithQuery() {
collection.insertOne(json("name: 'jon'"));
collection.insertOne(json("name: 'leo'"));
collection.insertOne(json("name: 'neil'"));
collection.insertOne(json("name: 'neil'"));
assertThat(toArray(collection.find(json("name: 'neil'")))).hasSize(2);
}
@Test
public void testFindWithSkipLimit() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
assertThat(toArray(collection.find().sort(json("_id: 1")).limit(2).skip(2)))
.containsExactly(json("_id: 3"), json("_id: 4"));
}
@Test
public void testFindWithSkipLimitInReverseOrder() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
assertThat(toArray(collection.find().sort(json("_id: -1")).limit(2).skip(2)))
.containsExactly(json("_id: 2"), json("_id: 1"));
}
@Test
public void testFindWithSkipLimitAfterDelete() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
collection.insertOne(json("_id: 5"));
collection.deleteOne(json("_id: 1"));
collection.deleteOne(json("_id: 3"));
assertThat(toArray(collection.find().sort(json("_id: 1")).limit(2).skip(2)))
.containsExactly(json("_id: 5"));
}
@Test
public void testFullUpdateWithSameId() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2, b: 5"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
collection.replaceOne(json("_id: 2, b:5"), json("_id: 2, a:5"));
assertThat(collection.find(json("_id: 2")).first()).isEqualTo(json("_id: 2, a:5"));
}
@Test
public void testGetCollection() {
MongoCollection<Document> collection = getCollection("coll");
getCollection("coll").insertOne(json("{}"));
assertThat(collection).isNotNull();
assertThat(toArray(db.listCollectionNames())).contains("coll");
}
@Test
public void testNullId() throws Exception {
collection.insertOne(json("{_id: null, name: 'test'}"));
Document result = collection.find(json("name: 'test'")).first();
assertThat(result).isNotNull();
assertThat(result.getObjectId(Constants.ID_FIELD)).isNull();
try {
collection.insertOne(json("_id: null"));
fail("MongoWriteException expected");
} catch (MongoWriteException e) {
assertThat(e.getMessage()).contains("duplicate key error");
}
assertThat(collection.countDocuments()).isEqualTo(1);
assertThat(collection.find(json("_id: null")).first()).isEqualTo(json("{_id: null, name: 'test'}"));
collection.deleteOne(json("_id: null"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testIdInQueryResultsInIndexOrder() {
collection.insertOne(json("_id: 4"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
List<Document> docs = toArray(collection.find(json("_id: {$in: [3,2,1]}")));
assertThat(docs).containsExactlyInAnyOrder(json("_id: 1"), json("_id: 2"), json("_id: 3"));
}
@Test
public void testIdNotAllowedToBeUpdated() {
collection.insertOne(json("_id: 1"));
try {
collection.replaceOne(json("_id: 1"), json("_id:2, a:4"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("cannot change _id of a document old: 1, new: 2");
}
// test with $set
try {
collection.updateOne(json("_id: 1"), new Document("$set", json("_id: 2")));
fail("should throw exception");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("Mod on _id not allowed");
}
}
@Test
public void testIllegalCommand() throws Exception {
try {
db.runCommand(json("foo: 1"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("no such cmd: foo");
}
}
@Test
public void testInsert() throws Exception {
assertThat(collection.countDocuments()).isEqualTo(0);
for (int i = 0; i < 3; i++) {
collection.insertOne(new Document("_id", Integer.valueOf(i)));
}
assertThat(collection.countDocuments()).isEqualTo(3);
collection.insertOne(json("foo: [1,2,3]"));
collection.insertOne(new Document("foo", new byte[10]));
Document insertedObject = new Document("foo", UUID.randomUUID());
collection.insertOne(insertedObject);
Document document = collection.find(insertedObject).first();
assertThat(document).isEqualTo(insertedObject);
}
@Test
public void testInsertDuplicate() throws Exception {
assertThat(collection.countDocuments()).isEqualTo(0);
collection.insertOne(json("_id: 1"));
assertThat(collection.countDocuments()).isEqualTo(1);
try {
collection.insertOne(json("_id: 1"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("duplicate key error");
}
assertThat(collection.countDocuments()).isEqualTo(1);
}
@Test(expected = MongoException.class)
public void testInsertDuplicateThrows() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 1"));
}
@Test(expected = MongoException.class)
public void testInsertDuplicateWithConcernThrows() {
collection.insertOne(json("_id: 1"));
collection.withWriteConcern(WriteConcern.ACKNOWLEDGED).insertOne(json("_id: 1"));
}
@Test
public void testInsertIncrementsCount() {
assertThat(collection.countDocuments()).isZero();
collection.insertOne(json("key: 'value'"));
assertThat(collection.countDocuments()).isEqualTo(1);
}
@Test
public void testInsertQuery() throws Exception {
assertThat(collection.countDocuments()).isEqualTo(0);
Document insertedObject = json("_id: 1");
insertedObject.put("foo", "bar");
collection.insertOne(insertedObject);
assertThat(collection.find(insertedObject).first()).isEqualTo(insertedObject);
assertThat(collection.find(new Document("_id", Long.valueOf(1))).first()).isEqualTo(insertedObject);
assertThat(collection.find(new Document("_id", Double.valueOf(1.0))).first()).isEqualTo(insertedObject);
assertThat(collection.find(new Document("_id", Float.valueOf(1.0001f))).first()).isNull();
assertThat(collection.find(json("foo: 'bar'")).first()).isEqualTo(insertedObject);
}
@Test
public void testInsertRemove() throws Exception {
for (int i = 0; i < 10; i++) {
collection.insertOne(json("_id: 1"));
assertThat(collection.countDocuments()).isEqualTo(1);
collection.deleteOne(json("_id: 1"));
assertThat(collection.countDocuments()).isZero();
collection.insertOne(new Document("_id", i));
collection.deleteOne(new Document("_id", i));
}
assertThat(collection.countDocuments()).isZero();
collection.deleteOne(json("'doesnt exist': 1"));
assertThat(collection.countDocuments()).isZero();
}
@Test
public void testInsertInSystemNamespace() throws Exception {
try {
getCollection("system.foobar").insertOne(json("{}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(16459);
assertThat(e.getMessage()).contains("attempt to insert in system namespace");
}
try {
getCollection("system.namespaces").insertOne(json("{}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(16459);
assertThat(e.getMessage()).contains("attempt to insert in system namespace");
}
}
@Test
public void testListDatabaseNames() throws Exception {
assertThat(listDatabaseNames()).isEmpty();
collection.insertOne(json("{}"));
assertThat(listDatabaseNames()).containsExactly(db.getName());
getDatabase().getCollection("some-collection").insertOne(json("{}"));
assertThat(listDatabaseNames()).containsExactly("bar", db.getName());
}
private MongoDatabase getDatabase() {
return syncClient.getDatabase(OTHER_TEST_DATABASE_NAME);
}
private List<String> listDatabaseNames() {
List<String> databaseNames = new ArrayList<>();
for (String databaseName : syncClient.listDatabaseNames()) {
databaseNames.add(databaseName);
}
return databaseNames;
}
@Test
public void testMaxBsonSize() throws Exception {
int maxBsonObjectSize = syncClient.getMaxBsonObjectSize();
assertThat(maxBsonObjectSize).isEqualTo(16777216);
}
@Test
public void testQuery() throws Exception {
Document obj = collection.find(json("_id: 1")).first();
assertThat(obj).isNull();
assertThat(collection.countDocuments()).isEqualTo(0);
}
@Test
public void testQueryAll() throws Exception {
List<Object> inserted = new ArrayList<>();
for (int i = 0; i < 10; i++) {
Document obj = new Document("_id", i);
collection.insertOne(obj);
inserted.add(obj);
}
assertThat(collection.countDocuments()).isEqualTo(10);
assertThat(toArray(collection.find().sort(json("_id: 1")))).isEqualTo(inserted);
}
@Test
public void testQueryCount() throws Exception {
for (int i = 0; i < 100; i++) {
collection.insertOne(json("{}"));
}
assertThat(collection.countDocuments()).isEqualTo(100);
Document obj = json("_id: 1");
assertThat(collection.countDocuments(obj)).isEqualTo(0);
collection.insertOne(obj);
assertThat(collection.countDocuments(obj)).isEqualTo(1);
}
@Test
public void testQueryLimitEmptyQuery() throws Exception {
for (int i = 0; i < 5; i++) {
collection.insertOne(json("{}"));
}
assertThat(collection.countDocuments(json("{}"), new CountOptions().limit(1))).isEqualTo(1);
assertThat(collection.countDocuments(json("{}"), new CountOptions().limit(-1))).isEqualTo(5);
assertThat(collection.countDocuments(json("{}"))).isEqualTo(5);
}
@Test
public void testQueryLimitSimpleQuery() throws Exception {
for (int i = 0; i < 5; i++) {
collection.insertOne(json("a:1"));
}
assertThat(collection.countDocuments(json("a:1"), new CountOptions().limit(1))).isEqualTo(1);
assertThat(collection.countDocuments(json("a:1"), new CountOptions().limit(-1))).isEqualTo(5);
assertThat(collection.countDocuments(json("a:1"))).isEqualTo(5);
}
@Test
public void testQueryNull() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
assertThat(collection.find(json("foo: null")).first()).isEqualTo(object);
}
@Test
public void testQuerySkipLimitEmptyQuery() throws Exception {
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(3))).isEqualTo(0);
for (int i = 0; i < 10; i++) {
collection.insertOne(json("{}"));
}
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(3))).isEqualTo(7);
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(15))).isEqualTo(0);
assertThat(collection.countDocuments(json("{}"), new CountOptions().skip(3).limit(5))).isEqualTo(5);
}
@Test
public void testQuerySkipLimitSimpleQuery() throws Exception {
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(3))).isEqualTo(0);
for (int i = 0; i < 10; i++) {
collection.insertOne(json("a:1"));
}
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(3))).isEqualTo(7);
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(3).limit(5))).isEqualTo(5);
assertThat(collection.countDocuments(json("a:1"), new CountOptions().skip(15).limit(5))).isEqualTo(0);
}
@Test
public void testQuerySort() throws Exception {
Random random = new Random(4711);
for (int i = 0; i < 10; i++) {
collection.insertOne(new Document("_id", Double.valueOf(random.nextDouble())));
}
List<Document> objects = toArray(collection.find().sort(json("_id: 1")));
double before = Double.MIN_VALUE;
for (Document obj : objects) {
double value = obj.getDouble("_id").doubleValue();
assertThat(value).isGreaterThanOrEqualTo(before);
before = value;
}
// reverse sort
objects = toArray(collection.find().sort(json("_id: -1")));
before = Double.MAX_VALUE;
for (Document obj : objects) {
double value = obj.getDouble("_id").doubleValue();
assertThat(value).isLessThanOrEqualTo(before);
before = value;
}
}
@Test
public void testQueryWithFieldSelector() throws Exception {
collection.insertOne(json("foo: 'bar'"));
Document obj = collection.find(json("{}")).projection(json("foo: 1")).first();
assertThat(obj.keySet()).containsOnly("_id", "foo");
obj = collection.find(json("foo:'bar'")).projection(json("_id: 1")).first();
assertThat(obj.keySet()).containsOnly("_id");
obj = collection.find(json("foo: 'bar'")).projection(json("_id: 0, foo:1")).first();
assertThat(obj.keySet()).containsOnly("foo");
}
@Test
public void testQueryWithDotNotationFieldSelector() throws Exception {
collection.insertOne(json("_id: 123, index: false, foo: { a: 'a1', b: 0}"));
Document obj = collection.find(json("{}")).projection(json("'foo.a': 1, 'foo.b': 1")).first();
assertThat(obj).isEqualTo(json("_id: 123, foo: {a: 'a1', b: 0}"));
obj = collection.find(json("{}")).projection(json("'foo.a': 1")).first();
assertThat(obj).isEqualTo(json("_id: 123, foo: {a: 'a1'}"));
obj = collection.find(json("{}")).projection(json("'foo.a': 1, index: 1, _id: 0")).first();
assertThat(obj).isEqualTo(json("foo: {a: 'a1'}, index: false"));
obj = collection.find(json("{}")).projection(json("foo: 1, _id: 0")).first();
assertThat(obj).isEqualTo(json("foo: {a: 'a1', b: 0}"));
obj = collection.find(json("{}")).projection(json("'foo.a.b.c.d': 1")).first();
assertThat(obj).isEqualTo(json("_id: 123, foo: {}"));
}
@Test
public void testQuerySystemNamespace() throws Exception {
assertThat(getCollection("system.foobar").find().first()).isNull();
assertThat(db.listCollectionNames()).isEmpty();
collection.insertOne(json("{}"));
Document expectedObj = new Document("name", collection.getNamespace().getFullName());
Document coll = getCollection("system.namespaces").find(expectedObj).first();
assertThat(coll).isEqualTo(expectedObj);
}
@Test
public void testQueryAllExpression() throws Exception {
collection.insertOne(json(" _id : [ { x : 1 } , { x : 2 } ]"));
collection.insertOne(json(" _id : [ { x : 2 } , { x : 3 } ]"));
assertThat(collection.countDocuments(json("'_id.x':{$all:[1,2]}"))).isEqualTo(1);
assertThat(collection.countDocuments(json("'_id.x':{$all:[2,3]}"))).isEqualTo(1);
}
@Test
public void testQueryWithSubdocumentIndex() throws Exception {
collection.createIndex(json("action:{actionId:1}"), new IndexOptions().unique(true));
collection.insertOne(json("action: { actionId: 1 }, value: 'a'"));
collection.insertOne(json("action: { actionId: 2 }, value: 'b'"));
collection.insertOne(json("action: { actionId: 3 }, value: 'c'"));
Document foundWithNestedDocument = collection.find(json("action: { actionId: 2 }")).first();
assertThat(foundWithNestedDocument.get("value")).isEqualTo("b");
Document foundWithDotNotation = collection.find(json("'action.actionId': 2")).first();
assertThat(foundWithDotNotation.get("value")).isEqualTo("b");
}
// see https://github.com/bwaldvogel/mongo-java-server/issues/12
@Test
public void testQueryBinaryData() throws Exception {
byte[] firstBytes = new byte[] { 0x01, 0x02, 0x03 };
byte[] secondBytes = new byte[] { 0x03, 0x02, 0x01 };
collection.insertOne(new Document("_id", 1).append("test", firstBytes));
collection.insertOne(new Document("_id", 2).append("test", secondBytes));
Document first = collection.find(new Document("test", firstBytes)).first();
assertThat(first).isNotNull();
assertThat(first.get("_id")).isEqualTo(1);
Document second = collection.find(new Document("test", secondBytes)).first();
assertThat(second).isNotNull();
assertThat(second.get("_id")).isEqualTo(2);
}
@Test
public void testRemove() {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.insertOne(json("_id: 4"));
collection.deleteOne(json("_id: 2"));
assertThat(collection.find(json("_id: 2")).first()).isNull();
assertThat(collection.countDocuments()).isEqualTo(3);
collection.deleteMany(json("_id: {$gte: 3}"));
assertThat(collection.countDocuments()).isEqualTo(1);
assertThat(collection.find().first()).isEqualTo(json("_id: 1"));
}
@Test
public void testRemoveSingle() throws Exception {
Document obj = new Document("_id", ObjectId.get());
collection.insertOne(obj);
collection.deleteOne(obj);
}
@Test
public void testRemoveReturnsModifiedDocumentCount() {
collection.insertOne(json("{}"));
collection.insertOne(json("{}"));
DeleteResult result = collection.deleteMany(json("{}"));
assertThat(result.getDeletedCount()).isEqualTo(2);
result = collection.deleteMany(json("{}"));
assertThat(result.getDeletedCount()).isEqualTo(0);
}
@Test
public void testReservedCollectionNames() throws Exception {
try {
getCollection("foo$bar").insertOne(json("{}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("cannot insert into reserved $ collection");
}
String veryLongString = "verylongstring";
for (int i = 0; i < 5; i++) {
veryLongString += veryLongString;
}
try {
getCollection(veryLongString).insertOne(json("{}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("name too long");
}
}
@Test
public void testServerStatus() throws Exception {
Date before = new Date();
Document serverStatus = runCommand("serverStatus");
assertThat(serverStatus.getInteger("ok")).isEqualTo(1);
assertThat(serverStatus.get("uptime")).isInstanceOf(Number.class);
assertThat(serverStatus.get("uptimeMillis")).isInstanceOf(Long.class);
Date serverTime = (Date) serverStatus.get("localTime");
assertThat(serverTime).isNotNull();
assertThat(serverTime.after(new Date())).isFalse();
assertThat(before.after(serverTime)).isFalse();
Document connections = (Document) serverStatus.get("connections");
assertThat(connections.get("current")).isNotNull();
}
@Test
public void testPing() throws Exception {
Document response = runCommand("ping");
assertThat(response.getInteger("ok")).isEqualTo(1);
}
@Test
public void testPingTrue() throws Exception {
Document command = new Document("ping", Boolean.TRUE);
Document response = runCommand(command);
assertThat(response.getInteger("ok")).isEqualTo(1);
}
@Test
public void testReplSetGetStatus() throws Exception {
try {
runCommand("replSetGetStatus");
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getErrorMessage()).contains("not running with --replSet");
}
}
@Test
public void testWhatsMyUri() throws Exception {
for (String dbName : new String[] { "admin", "local", "test" }) {
Document result = syncClient.getDatabase(dbName).runCommand(new Document("whatsmyuri", 1));
assertThat(result.get("you")).isNotNull();
assertThat(result.get("you").toString()).startsWith("127.0.0.1:");
}
}
@Test
public void testSort() {
collection.insertOne(json("a:1, _id:1"));
collection.insertOne(json("a:2, _id:2"));
collection.insertOne(json("_id: 5"));
collection.insertOne(json("a:3, _id:3"));
collection.insertOne(json("a:4, _id:4"));
List<Document> objs = toArray(collection.find().sort(json("a: -1")));
assertThat(objs).containsExactly(
json("a:4, _id:4"),
json("a:3, _id:3"),
json("a:2, _id:2"),
json("a:1, _id:1"),
json("_id: 5")
);
}
@Test
public void testSortByEmbeddedKey() {
collection.insertOne(json("_id: 1, a: { b:1 }"));
collection.insertOne(json("_id: 2, a: { b:2 }"));
collection.insertOne(json("_id: 3, a: { b:3 }"));
List<Document> results = toArray(collection.find().sort(json("'a.b': -1")));
assertThat(results).containsExactly(
json("_id: 3, a: { b:3 }"),
json("_id: 2, a: { b:2 }"),
json("_id: 1, a: { b:1 }")
);
}
@Test
public void testUpdate() throws Exception {
Document object = json("_id: 1");
Document newObject = json("{_id: 1, foo: 'bar'}");
collection.insertOne(object);
UpdateResult result = collection.replaceOne(object, newObject);
assertThat(result.getModifiedCount()).isEqualTo(1);
assertThat(result.getUpsertedId()).isNull();
assertThat(collection.find(object).first()).isEqualTo(newObject);
}
@Test
public void testUpdateNothing() throws Exception {
Document object = json("_id: 1");
UpdateResult result = collection.replaceOne(object, object);
assertThat(result.getModifiedCount()).isEqualTo(0);
assertThat(result.getMatchedCount()).isEqualTo(0);
assertThat(result.getUpsertedId()).isNull();
}
@Test
public void testUpdateBlank() throws Exception {
Document document = json("'': 1, _id: 2, a: 3, b: 4");
collection.insertOne(document);
collection.updateOne(json("{}"), json("$set: {c:5}"));
assertThat(collection.find().first()).isEqualTo(json("'': 1, _id: 2, a: 3, b: 4, c:5"));
}
@Test
public void testUpdateEmptyPositional() throws Exception {
collection.insertOne(json("{}"));
try {
collection.updateOne(json("{}"), json("$set:{'a.$.b': 1}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(16650);
assertThat(e.getMessage()).contains("Cannot apply the positional operator without a corresponding query field containing an array.");
}
}
@Test
public void testUpdateMultiplePositional() throws Exception {
collection.insertOne(json("{a: {b: {c: 1}}}"));
try {
collection.updateOne(json("{'a.b.c':1}"), json("$set:{'a.$.b.$.c': 1}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(16650);
assertThat(e.getMessage()).contains("Cannot apply the positional operator without a corresponding query field containing an array.");
}
}
@Test
public void testUpdateIllegalFieldName() throws Exception {
// Disallow $ in field names - SERVER-3730
collection.insertOne(json("{x:1}"));
collection.updateOne(json("{x:1}"), json("$set: {y:1}")); // ok
try {
collection.updateOne(json("{x:1}"), json("$set: {$z:1}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(15896);
assertThat(e.getMessage()).contains("Modified field name may not start with $");
}
// unset ok to remove bad fields
collection.updateOne(json("{x:1}"), json("$unset: {$z:1}"));
try {
collection.updateOne(json("{x:1}"), json("$inc: {$z:1}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(15896);
assertThat(e.getMessage()).contains("Modified field name may not start with $");
}
try {
collection.updateOne(json("{x:1}"), json("$pushAll: {$z:[1,2,3]}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(15896);
assertThat(e.getMessage()).contains("Modified field name may not start with $");
}
}
@Test
public void testUpdateSubdocument() throws Exception {
try {
collection.updateOne(json("{}"), json("'a.b.c': 123"));
fail("IllegalArgumentException expected");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage()).contains("Invalid BSON field name a.b.c");
}
}
@Test
public void testUpdateIdNoChange() {
collection.insertOne(json("_id: 1"));
collection.replaceOne(json("_id: 1"), json("_id: 1, a: 5"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, a: 5"));
collection.updateOne(json("_id: 1"), json("$set: {_id: 1, b: 3}"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, a: 5, b: 3"));
// test with $set
collection.updateOne(json("_id: 1"), json("$set: {_id: 1, a: 7}"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, a: 7, b: 3"));
}
@Test
public void testUpdatePush() throws Exception {
Document idObj = json("_id: 1");
collection.insertOne(idObj);
collection.updateOne(idObj, json("$push: {'field.subfield.subsubfield': 'value'}"));
Document expected = json("_id: 1, field:{subfield:{subsubfield: ['value']}}");
assertThat(collection.find(idObj).first()).isEqualTo(expected);
// push to non-array
collection.updateOne(idObj, json("$set: {field: 'value'}"));
try {
collection.updateOne(idObj, json("$push: {field: 'value'}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10141);
assertThat(e.getMessage()).contains("Cannot apply $push modifier to non-array");
}
// push with multiple fields
Document pushObj = json("$push: {field1: 'value', field2: 'value2'}");
collection.updateOne(idObj, pushObj);
expected = json("_id: 1, field: 'value', field1: ['value'], field2: ['value2']");
assertThat(collection.find(idObj).first()).isEqualTo(expected);
// push duplicate
pushObj = json("$push: {field1: 'value'}");
collection.updateOne(idObj, pushObj);
expected.put("field1", Arrays.asList("value", "value"));
assertThat(collection.find(idObj).first()).isEqualTo(expected);
}
@Test
public void testUpdatePushAll() throws Exception {
Document idObj = json("_id: 1");
collection.insertOne(idObj);
try {
collection.updateOne(idObj, json("$pushAll: {field: 'value'}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10153);
assertThat(e.getMessage()).contains("Modifier $pushAll allowed for arrays only");
}
collection.updateOne(idObj, json("$pushAll: {field: ['value', 'value2']}"));
assertThat(collection.find(idObj).first()).isEqualTo(json("_id: 1, field: ['value', 'value2']"));
}
@Test
public void testUpdateAddToSet() throws Exception {
Document idObj = json("_id: 1");
collection.insertOne(idObj);
collection.updateOne(idObj, json("$addToSet: {'field.subfield.subsubfield': 'value'}"));
assertThat(collection.find(idObj).first()).isEqualTo(json("_id: 1, field:{subfield:{subsubfield:['value']}}"));
// addToSet to non-array
collection.updateOne(idObj, json("$set: {field: 'value'}"));
try {
collection.updateOne(idObj, json("$addToSet: {field: 'value'}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10141);
assertThat(e.getMessage()).contains("Cannot apply $addToSet modifier to non-array");
}
// addToSet with multiple fields
collection.updateOne(idObj, json("$addToSet: {field1: 'value', field2: 'value2'}"));
assertThat(collection.find(idObj).first())
.isEqualTo(json("_id: 1, field: 'value', field1: ['value'], field2: ['value2']"));
// addToSet duplicate
collection.updateOne(idObj, json("$addToSet: {field1: 'value'}"));
assertThat(collection.find(idObj).first())
.isEqualTo(json("_id: 1, field: 'value', field1: ['value'], field2: ['value2']"));
}
@Test
public void testUpdateAddToSetEach() throws Exception {
collection.insertOne(json("_id: 1"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(6, 5, 4)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4]"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(3, 2, 1)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4,3,2,1]"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(7, 7, 9, 2)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4,3,2,1,7,9]"));
collection.updateOne(json("_id: 1"), addEachToSet("a", Arrays.asList(12, 13, 12)));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [6,5,4,3,2,1,7,9,12,13]"));
}
@Test
public void testUpdateDatasize() throws Exception {
Document obj = json("{_id:1, a:{x:[1, 2, 3]}}");
collection.insertOne(obj);
Number oldSize = getCollStats().getLong("size");
collection.updateOne(json("_id:1"), set("a.x.0", 3));
assertThat(collection.find().first().get("a")).isEqualTo(json("x:[3,2,3]"));
Number newSize = getCollStats().getLong("size");
assertThat(newSize).isEqualTo(oldSize);
// now increase the db
collection.updateOne(json("_id:1"), set("a.x.0", "abc"));
Number yetNewSize = getCollStats().getLong("size");
assertThat(yetNewSize.longValue() - oldSize.longValue()).isEqualTo(4);
}
@Test
public void testUpdatePull() throws Exception {
Document obj = json("_id: 1");
collection.insertOne(obj);
// pull from non-existing field
assertThat(collection.find(obj).first()).isEqualTo(obj);
// pull from non-array
collection.updateOne(obj, set("field", "value"));
try {
collection.updateOne(obj, pull("field", "value"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10142);
assertThat(e.getMessage()).contains("Cannot apply $pull modifier to non-array");
}
// pull standard
collection.updateOne(obj, json("$set: {field: ['value1', 'value2', 'value1']}"));
collection.updateOne(obj, pull("field", "value1"));
assertThat(collection.find(obj).first().get("field")).isEqualTo(Collections.singletonList("value2"));
// pull with multiple fields
collection.updateOne(obj, json("{$set: {field1: ['value1', 'value2', 'value1']}}"));
collection.updateOne(obj, json("$set: {field2: ['value3', 'value3', 'value1']}"));
collection.updateOne(obj, json("$pull: {field1: 'value2', field2: 'value3'}"));
assertThat(collection.find(obj).first().get("field1")).isEqualTo(Arrays.asList("value1", "value1"));
assertThat(collection.find(obj).first().get("field2")).isEqualTo(Collections.singletonList("value1"));
}
@Test
public void testUpdatePullValueWithCondition() {
collection.insertOne(json("_id: 1, votes: [ 3, 5, 6, 7, 7, 8 ]"));
collection.updateOne(json("_id: 1"), json("$pull: { votes: { $gte: 6 } }"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, votes: [ 3, 5 ]"));
}
@Test
public void testUpdatePullDocuments() {
collection.insertOne(json("_id: 1, results: [{item: 'A', score: 5}, {item: 'B', score: 8, comment: 'foobar'}]"));
collection.insertOne(json("_id: 2, results: [{item: 'C', score: 8, comment: 'foobar'}, {item: 'B', score: 4}]"));
collection.updateOne(json("{}"), json("$pull: { results: { score: 8 , item: 'B' } }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, results: [{item: 'A', score: 5}]"));
assertThat(collection.find(json("_id: 2")).first()).isEqualTo(json("_id: 2, results: [{item: 'C', score: 8, comment: 'foobar'}, {item: 'B', score: 4}]"));
}
// https://github.com/bwaldvogel/mongo-java-server/issues/20
@Test
public void testUpdatePullLeavesEmptyArray() {
Document obj = json("_id: 1");
collection.insertOne(obj);
collection.updateOne(obj, json("$set: {field: [{'key1': 'value1', 'key2': 'value2'}]}"));
collection.updateOne(obj, json("$pull: {field: {'key1': 'value1'}}"));
assertThat(collection.find(obj).first()).isEqualTo(json("_id: 1, field: []"));
}
@Test
public void testUpdatePullAll() throws Exception {
Document obj = json("_id: 1");
collection.insertOne(obj);
collection.updateOne(obj, json("$set: {field: 'value'}"));
try {
collection.updateOne(obj, json("$pullAll: {field: 'value'}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10142);
assertThat(e.getMessage()).contains("Cannot apply $pullAll modifier to non-array");
}
collection.updateOne(obj, json("$set: {field1: ['value1', 'value2', 'value1', 'value3', 'value4', 'value3']}"));
collection.updateOne(obj, json("$pullAll: {field1: ['value1', 'value3']}"));
assertThat(collection.find(obj).first().get("field1")).isEqualTo(Arrays.asList("value2", "value4"));
try {
collection.updateOne(obj, json("$pullAll: {field1: 'bar'}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10153);
assertThat(e.getMessage()).contains("Modifier $pullAll allowed for arrays only");
}
}
@Test
public void testUpdateSet() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
assertThat(collection.find(object).first()).isEqualTo(object);
collection.updateOne(object, json("$set: {foo: 'bar'}"));
Document expected = json("{}");
expected.putAll(object);
expected.put("foo", "bar");
collection.updateOne(object, json("$set: {bar: 'bla'}"));
expected.put("bar", "bla");
assertThat(collection.find(object).first()).isEqualTo(expected);
collection.updateOne(object, json("$set: {'foo.bar': 'bla'}"));
expected.put("foo", json("bar: 'bla'"));
assertThat(collection.find(object).first()).isEqualTo(expected);
collection.updateOne(object, json("$set: {'foo.foo': '123'}"));
((Document) expected.get("foo")).put("foo", "123");
assertThat(collection.find(object).first()).isEqualTo(expected);
}
@Test
public void testUpdateSetOnInsert() throws Exception {
Document object = json("_id: 1");
collection.updateOne(object, json("$set: {b: 3}, $setOnInsert: {a: 3}"), new UpdateOptions().upsert(true));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, b: 3, a: 3"));
collection.updateOne(object, json("$set: {b: 4}, $setOnInsert: {a: 5}"), new UpdateOptions().upsert(true));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, b: 4, a: 3")); // 'a' is unchanged
}
@Test
public void testUpdateSetWithArrayIndices() throws Exception {
// SERVER-181
collection.insertOne(json("_id: 1, a: [{x:0}]"));
collection.updateOne(json("{}"), json("$set: {'a.0.x': 3}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3}]"));
collection.updateOne(json("{}"), json("$set: {'a.1.z': 17}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3}, {z:17}]"));
collection.updateOne(json("{}"), json("$set: {'a.0.y': 7}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3, y:7}, {z:17}]"));
collection.updateOne(json("{}"), json("$set: {'a.1': 'test'}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: [{x:3, y:7}, 'test']"));
}
@Test
public void testUpdateUnsetWithArrayIndices() throws Exception {
// SERVER-273
collection.insertOne(json("_id: 1, a:[{x:0}]"));
collection.updateOne(json("{}"), json("$unset: {'a.0.x': 1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a:[{}]"));
collection.updateOne(json("{}"), json("$unset: {'a.0': 1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a:[null]"));
collection.updateOne(json("{}"), json("$unset: {'a.10': 1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a:[null]"));
}
@Test
public void testUpdateMax() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$max: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$max: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$max: {'foo.bar': 10}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$max: {'foo.bar': -100}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$max: {'foo.bar': '1'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : '1'}"));
collection.updateOne(object, json("$max: {'foo.bar': null}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : '1'}"));
collection.updateOne(object, json("$max: {'foo.bar': '2', 'buz' : 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : '2'}, buz : 1"));
}
@Test
public void testUpdateMin() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$min: {'foo.bar': 'b'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 'b'}"));
collection.updateOne(object, json("$min: {'foo.bar': 'a'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 'a'}"));
collection.updateOne(object, json("$min: {'foo.bar': 10}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$min: {'foo.bar': 10}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 10}"));
collection.updateOne(object, json("$min: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$min: {'foo.bar': 100}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : 1}"));
collection.updateOne(object, json("$min: {'foo.bar': null}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : null}"));
collection.updateOne(object, json("$min: {'foo.bar': 'a'}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, foo : {bar : null}"));
}
// see http://docs.mongodb.org/manual/reference/operator/update/max
@Test
public void testUpdateMaxCompareNumbers() throws Exception {
Document object = json("_id: 1, highScore: 800, lowScore: 200");
collection.insertOne(object);
collection.updateOne(json("_id: 1"), json("$max: { highScore: 950 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 950, lowScore: 200"));
collection.updateOne(json("_id: 1"), json("$max: { highScore: 870 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 950, lowScore: 200"));
}
// see http://docs.mongodb.org/manual/reference/operator/update/max
@Test
public void testUpdateMaxCompareDates() throws Exception {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
Document object = new Document("_id", 1).append("desc", "crafts")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2013-10-01T16:38:16"));
collection.insertOne(object);
collection.updateOne(json("_id: 1"),
new Document("$max", new Document("dateExpired", df.parse("2013-09-30T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first())
.isEqualTo(json("_id: 1, desc: 'crafts'")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2013-10-01T16:38:16")));
collection.updateOne(json("_id: 1"),
new Document("$max", new Document("dateExpired", df.parse("2014-01-07T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(
json("_id: 1, desc: 'crafts'")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2014-01-07T00:00:00")));
}
// see http://docs.mongodb.org/manual/reference/operator/update/min
@Test
public void testUpdateMinCompareNumbers() throws Exception {
Document object = json("_id: 1, highScore: 800, lowScore: 200");
collection.insertOne(object);
collection.updateOne(json("_id: 1"), json("$min: { lowScore: 150 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 800, lowScore: 150"));
collection.updateOne(json("_id: 1"), json("$min: { lowScore: 250 }"));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id: 1, highScore: 800, lowScore: 150"));
}
// see http://docs.mongodb.org/manual/reference/operator/update/min
@Test
public void testUpdateMinCompareDates() throws Exception {
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
Document object = new Document("_id", 1).append("desc", "crafts")
.append("dateEntered", df.parse("2013-10-01T05:00:00"))
.append("dateExpired", df.parse("2013-10-01T16:38:16"));
collection.insertOne(object);
collection.updateOne(json("_id: 1"),
new Document("$min", new Document("dateEntered", df.parse("2013-09-25T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first()) //
.isEqualTo(json("_id: 1, desc: 'crafts'") //
.append("dateEntered", df.parse("2013-09-25T00:00:00")) //
.append("dateExpired", df.parse("2013-10-01T16:38:16")));
collection.updateOne(json("_id: 1"),
new Document("$min", new Document("dateEntered", df.parse("2014-01-07T00:00:00"))));
assertThat(collection.find(json("_id: 1")).first()) //
.isEqualTo(json("_id: 1, desc: 'crafts'") //
.append("dateEntered", df.parse("2013-09-25T00:00:00")) //
.append("dateExpired", df.parse("2013-10-01T16:38:16")));
}
@Test
public void testUpdatePop() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$pop: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(object);
collection.updateOne(object, json("$set: {'foo.bar': [1,2,3]}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[1,2,3]}"));
collection.updateOne(object, json("$pop: {'foo.bar': 1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[1,2]}"));
collection.updateOne(object, json("$pop: {'foo.bar': -1}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[2]}"));
collection.updateOne(object, json("$pop: {'foo.bar': null}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id:1, foo:{bar:[]}"));
}
@Test
public void testUpdateUnset() throws Exception {
Document obj = json("_id: 1, a: 1, b: null, c: 'value'");
collection.insertOne(obj);
try {
collection.updateOne(obj, json("$unset: {_id: ''}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getCode()).isEqualTo(10148);
assertThat(e.getMessage()).contains("Mod on _id not allowed");
}
collection.updateOne(obj, json("$unset: {a:'', b:''}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, c: 'value'"));
collection.updateOne(obj, Updates.unset("c.y"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, c: 'value'"));
collection.replaceOne(json("_id: 1"), json("a: {b: 'foo', c: 'bar'}"));
collection.updateOne(json("_id: 1"), json("$unset: {'a.b':1}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: {c: 'bar'}"));
}
@Test
public void testUpdateWithIdIn() {
collection.insertOne(json("_id: 1"));
Document update = json("$push: {n: {_id: 2, u:3}}, $inc: {c:4}");
Document expected = json("_id: 1, n: [{_id: 2, u:3}], c:4");
collection.updateOne(json("_id: {$in: [1]}"), update);
assertThat(collection.find().first()).isEqualTo(expected);
}
@Test
public void testUpdateMulti() throws Exception {
collection.insertOne(json("a: 1"));
collection.insertOne(json("a: 1"));
UpdateResult result = collection.updateOne(json("a: 1"), json("$set: {b: 2}"));
assertThat(result.getModifiedCount()).isEqualTo(1);
assertThat(collection.countDocuments(new Document("b", 2))).isEqualTo(1);
result = collection.updateMany(json("a: 1"), json("$set: {b: 3}"));
assertThat(result.getModifiedCount()).isEqualTo(2);
assertThat(collection.countDocuments(new Document("b", 2))).isEqualTo(0);
assertThat(collection.countDocuments(new Document("b", 3))).isEqualTo(2);
}
@Test
public void testUpdateIllegalInt() throws Exception {
collection.insertOne(json("_id: 1, a: {x:1}"));
try {
collection.updateOne(json("_id: 1"), json("$inc: {a: 1}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("cannot increment value");
}
try {
collection.updateOne(json("_id: 1"), json("$inc: {'a.x': 'b'}"));
fail("MongoException expected");
} catch (MongoException e) {
assertThat(e.getMessage()).contains("cannot increment with non-numeric value");
}
}
@Test
public void testUpdateWithIdInMulti() {
collection.insertMany(Arrays.asList(json("_id: 1"), json("_id: 2")));
collection.updateMany(json("_id: {$in:[1,2]}"), json("$set: {n:1}"));
List<Document> results = toArray(collection.find());
assertThat(results).containsOnly(json("_id: 1, n:1"), json("_id: 2, n: 1"));
}
@Test
public void testUpdateWithIdInMultiReturnModifiedDocumentCount() {
collection.insertMany(Arrays.asList(json("_id: 1"), json("_id: 2")));
UpdateResult result = collection.updateMany(json("_id: {$in:[1,2]}"), json("$set:{n:1}"));
assertThat(result.getModifiedCount()).isEqualTo(2);
}
@Test
public void testUpdateWithIdQuery() {
collection.insertMany(Arrays.asList(json("_id: 1"), json("_id: 2")));
collection.updateMany(json("_id: {$gt:1}"), json("$set: {n:1}"));
List<Document> results = toArray(collection.find());
assertThat(results).containsOnly(json("_id: 1"), json("_id: 2, n:1"));
}
@Test
public void testUpdateWithObjectId() {
collection.insertOne(json("_id: {n:1}"));
UpdateResult result = collection.updateOne(json("_id: {n:1}"), json("$set: {a:1}"));
assertThat(result.getModifiedCount()).isEqualTo(1);
assertThat(collection.find().first()).isEqualTo(json("_id: {n:1}, a:1"));
}
@Test
public void testUpdateArrayMatch() throws Exception {
collection.insertOne(json("_id:1, a:[{x:1,y:1}, {x:2,y:2}, {x:3,y:3}]"));
collection.updateOne(json("'a.x': 2"), json("$inc: {'a.$.y': 1}"));
assertThat(collection.find(json("'a.x': 2")).first()).isEqualTo(json("_id:1, a:[{x:1,y:1}, {x:2,y:3}, {x:3,y:3}]"));
collection.insertOne(json("{'array': [{'123a':{'name': 'old'}}]}"));
assertThat(collection.find(json("{'array.123a.name': 'old'}")).first()).isNotNull();
collection.updateOne(json("{'array.123a.name': 'old'}"), json("{$set: {'array.$.123a.name': 'new'}}"));
assertThat(collection.find(json("{'array.123a.name': 'new'}")).first()).isNotNull();
assertThat(collection.find(json("{'array.123a.name': 'old'}")).first()).isNull();
}
@Test
public void testMultiUpdateArrayMatch() throws Exception {
collection.insertOne(json("{}"));
collection.insertOne(json("x:[1,2,3]"));
collection.insertOne(json("x:99"));
collection.updateMany(json("x:2"), json("$inc:{'x.$': 1}"));
assertThat(collection.find(json("x:1")).first().get("x")).isEqualTo(Arrays.asList(1, 3, 3));
}
@Test
public void testUpsert() {
UpdateResult result = collection.updateMany(json("n:'jon'"), json("$inc:{a:1}"), new UpdateOptions().upsert(true));
assertThat(result.getModifiedCount()).isEqualTo(0);
Document object = collection.find().first();
assertThat(result.getUpsertedId()).isEqualTo(new BsonObjectId(object.getObjectId("_id")));
object.remove("_id");
assertThat(object).isEqualTo(json("n:'jon', a:1"));
result = collection.updateOne(json("_id: 17, n:'jon'"), json("$inc:{a:1}"), new UpdateOptions().upsert(true));
assertThat(result.getUpsertedId()).isNull();
assertThat(collection.find(json("_id:17")).first()).isEqualTo(json("_id: 17, n:'jon', a:1"));
}
@Test
public void testUpsertFieldOrder() throws Exception {
collection.updateOne(json("'x.y': 2"), json("$inc: {a:7}"), new UpdateOptions().upsert(true));
Document obj = collection.find().first();
obj.remove("_id");
// this actually differs from the official MongoDB implementation
assertThat(obj).isEqualTo(json("x:{y:2}, a:7"));
}
@Test
public void testUpsertWithoutId() {
UpdateResult result = collection.updateOne(eq("a", 1), set("a", 2), new UpdateOptions().upsert(true));
assertThat(result.getModifiedCount()).isEqualTo(0);
assertThat(result.getUpsertedId()).isNotNull();
assertThat(collection.find().first().get("_id")).isInstanceOf(ObjectId.class);
assertThat(collection.find().first().get("a")).isEqualTo(2);
}
@Test
public void testUpsertOnIdWithPush() {
Document update1 = json("$push: {c: {a:1, b:2} }");
Document update2 = json("$push: {c: {a:3, b:4} }");
collection.updateOne(json("_id: 1"), update1, new UpdateOptions().upsert(true));
collection.updateOne(json("_id: 1"), update2, new UpdateOptions().upsert(true));
Document expected = json("_id: 1, c: [{a:1, b:2}, {a:3, b:4}]");
assertThat(collection.find(json("'c.a':3, 'c.b':4")).first()).isEqualTo(expected);
}
@Test
public void testUpsertWithConditional() {
Document query = json("_id: 1, b: {$gt: 5}");
Document update = json("$inc: {a: 1}");
UpdateResult updateResult = collection.updateOne(query, update, new UpdateOptions().upsert(true));
assertThat(updateResult.getModifiedCount()).isZero();
assertThat(updateResult.getMatchedCount()).isZero();
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 1"));
}
// https://github.com/bwaldvogel/mongo-java-server/issues/29
@Test
public void testUpsertWithoutChange() {
collection.insertOne(json("_id: 1, a: 2, b: 3"));
Document query = json("_id: 1");
Document update = json("$set: {a: 2}");
UpdateResult updateResult = collection.updateOne(query, update, new UpdateOptions().upsert(true));
assertThat(updateResult.getModifiedCount()).isZero();
assertThat(updateResult.getMatchedCount()).isOne();
assertThat(collection.find().first()).isEqualTo(json("_id: 1, a: 2, b: 3"));
}
@Test
public void testUpsertWithEmbeddedQuery() {
collection.updateOne(json("_id: 1, 'e.i': 1"), json("$set: {a:1}"), new UpdateOptions().upsert(true));
assertThat(collection.find(json("_id: 1")).first()).isEqualTo(json("_id:1, e: {i:1}, a:1"));
}
@Test
public void testUpsertWithIdIn() throws Exception {
Document query = json("_id: {$in: [1]}");
Document update = json("$push: {n: {_id: 2 ,u : 3}}, $inc: {c: 4}");
Document expected = json("_id: 1, n: [{_id: 2 ,u : 3}], c: 4");
UpdateResult updateResult = collection.updateOne(query, update, new UpdateOptions().upsert(true));
assertThat(updateResult.getModifiedCount()).isZero();
assertThat(updateResult.getMatchedCount()).isZero();
// the ID generation actually differs from official MongoDB which just
// create a random object id
Document actual = collection.find().first();
assertThat(actual).isEqualTo(expected);
}
@Test
public void testUpdateWithMultiplyOperator() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$mul: {a: 2}, $set: {b: 2}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, a: 0, b: 2"));
collection.updateOne(object, json("$mul: {b: 2.5}, $inc: {a: 0.5}"));
assertThat(collection.find(object).first()).isEqualTo(json("_id: 1, a: 0.5, b: 5.0"));
}
@Test
public void testUpdateWithIllegalMultiplyFails() throws Exception {
Document object = json("_id: 1, foo: 'x', bar: 1");
collection.insertOne(object);
try {
collection.updateOne(object, json("$mul: {_id: 2}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("_id");
}
try {
collection.updateOne(object, json("$mul: {foo: 2}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("cannot multiply value 'x'");
}
try {
collection.updateOne(object, json("$mul: {bar: 'x'}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("cannot multiply with non-numeric value");
}
}
@Test
public void testIsMaster() throws Exception {
Document isMaster = db.runCommand(new Document("isMaster", Integer.valueOf(1)));
assertThat(isMaster.getBoolean("ismaster")).isTrue();
assertThat(isMaster.getDate("localTime")).isInstanceOf(Date.class);
assertThat(isMaster.getInteger("maxBsonObjectSize")).isGreaterThan(1000);
assertThat(isMaster.getInteger("maxMessageSizeBytes")).isGreaterThan(isMaster.getInteger("maxBsonObjectSize"));
}
// https://github.com/foursquare/fongo/pull/26
// http://stackoverflow.com/questions/12403240/storing-null-vs-not-storing-the-key-at-all-in-mongodb
@Test
public void testFindWithNullOrNoFieldFilter() {
collection.insertOne(json("name: 'jon', group: 'group1'"));
collection.insertOne(json("name: 'leo', group: 'group1'"));
collection.insertOne(json("name: 'neil1', group: 'group2'"));
collection.insertOne(json("name: 'neil2', group: null"));
collection.insertOne(json("name: 'neil3'"));
// check {group: null} vs {group: {$exists: false}} filter
List<Document> objs = toArray(collection.find(json("group: null")));
assertThat(objs).as("should have two neils (neil2, neil3)").hasSize(2);
objs = toArray(collection.find(exists("group", false)));
assertThat(objs).as("should have one neils (neil3)").hasSize(1);
// same check but for fields which do not exist in DB
objs = toArray(collection.find(json("other: null")));
assertThat(objs).as("should return all documents").hasSize(5);
objs = toArray(collection.find(exists("other", false)));
assertThat(objs).as("should return all documents").hasSize(5);
}
@Test
public void testInsertsWithUniqueIndex() {
collection.createIndex(new Document("uniqueKeyField", 1), new IndexOptions().unique(true));
collection.insertOne(json("uniqueKeyField: 'abc1', afield: 'avalue'"));
collection.insertOne(json("uniqueKeyField: 'abc2', afield: 'avalue'"));
collection.insertOne(json("uniqueKeyField: 'abc3', afield: 'avalue'"));
try {
collection.insertOne(json("uniqueKeyField: 'abc2', afield: 'avalue'"));
fail("MongoWriteException expected");
} catch (MongoWriteException e) {
assertThat(e.getMessage()).contains("duplicate key error");
}
}
@Test
public void testInsertBinaryData() throws Exception {
collection.insertOne(new Document("test", new byte[] { 0x01, 0x02, 0x03 }));
}
// see https://github.com/bwaldvogel/mongo-java-server/issues/9
@Test
public void testUniqueIndexWithSubdocument() {
collection.createIndex(new Document("action.actionId", 1), new IndexOptions().unique(true));
collection.insertOne(json("action: 'abc1'"));
collection.insertOne(json("action: { actionId: 1 }"));
collection.insertOne(json("action: { actionId: 2 }"));
collection.insertOne(json("action: { actionId: 3 }"));
try {
collection.insertOne(json("action: { actionId: 1 }"));
fail("MongoWriteException expected");
} catch (MongoWriteException e) {
assertThat(e.getMessage()).contains("duplicate key error");
}
}
@Test
public void testAddNonUniqueIndexOnNonIdField() {
collection.createIndex(new Document("someField", 1), new IndexOptions().unique(false));
collection.insertOne(json("someField: 'abc'"));
collection.insertOne(json("someField: 'abc'"));
}
@Test
public void testCompoundUniqueIndicesNotSupportedAndThrowsException() {
try {
collection.createIndex(new Document("a", 1).append("b", 1), new IndexOptions().unique(true));
fail("MongoException expected");
} catch (MongoException e) {
// expected
}
}
@Test
public void testCursorOptionNoTimeout() throws Exception {
try (MongoCursor<Document> cursor = collection.find().noCursorTimeout(true).iterator()) {
assertFalse(cursor.hasNext());
}
}
@Test
public void testBulkInsert() throws Exception {
List<WriteModel<Document>> inserts = new ArrayList<>();
inserts.add(new InsertOneModel<>(json("_id: 1")));
inserts.add(new InsertOneModel<>(json("_id: 2")));
inserts.add(new InsertOneModel<>(json("_id: 3")));
BulkWriteResult result = collection.bulkWrite(inserts);
assertThat(result.getInsertedCount()).isEqualTo(3);
}
@Test
public void testBulkUpdateOrdered() throws Exception {
testBulkUpdate(true);
}
@Test
public void testBulkUpdateUnordered() throws Exception {
testBulkUpdate(false);
}
private void testBulkUpdate(boolean ordered) {
insertUpdateInBulk(ordered);
removeInBulk(ordered);
insertUpdateInBulkNoMatch(ordered);
}
@Test
public void testUpdateCurrentDateIllegalTypeSpecification() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
try {
collection.updateOne(object, json("$currentDate: {lastModified: null}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(2);
assertThat(e.getErrorMessage()).startsWith("NULL").contains("is not a valid type");
}
try {
collection.updateOne(object, json("$currentDate: {lastModified: 123.456}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(2);
assertThat(e.getErrorMessage()).startsWith("Double").contains("is not a valid type");
}
try {
collection.updateOne(object, json("$currentDate: {lastModified: 'foo'}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(2);
assertThat(e.getErrorMessage()).startsWith("String").contains("is not a valid type");
}
try {
collection.updateOne(object, json("$currentDate: {lastModified: {$type: 'foo'}}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getCode()).isEqualTo(2);
assertThat(e.getErrorMessage())
.startsWith("The '$type' string field is required to be 'date' or 'timestamp'");
}
assertThat(collection.find(object).first()).isEqualTo(object);
}
@Test
public void testUpdateCurrentDate() throws Exception {
Document object = json("_id: 1");
collection.insertOne(object);
collection.updateOne(object, json("$currentDate: {'x.lastModified': true}"));
assertThat(((Document) collection.find(object).first().get("x")).get("lastModified"))
.isInstanceOf(Date.class);
collection.updateOne(object, json("$currentDate: {'x.lastModified': {$type: 'date'}}"));
assertThat(((Document) collection.find(object).first().get("x")).get("lastModified"))
.isInstanceOf(Date.class);
collection.updateOne(object, json("$currentDate: {'x.lastModified': {$type: 'timestamp'}}"));
assertThat(((Document) collection.find(object).first().get("x")).get("lastModified"))
.isInstanceOf(BsonTimestamp.class);
}
@Test
public void testRenameField() throws Exception {
Document object = json("_id: 1, foo: 'x', bar: 'y'");
collection.insertOne(object);
collection.updateOne(json("_id: 1"), json("$rename: {foo: 'foo2', bar: 'bar2'}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, foo2: 'x', bar2: 'y'"));
collection.updateOne(json("_id: 1"), json("$rename: {'bar2': 'foo', foo2: 'bar'}"));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, bar: 'x', foo: 'y'"));
}
@Test
public void testRenameFieldIllegalValue() throws Exception {
Document object = json("_id: 1, foo: 'x', bar: 'y'");
collection.insertOne(object);
try {
collection.updateOne(json("_id: 1"), json("$rename: {foo: 12345}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("The 'to' field for $rename must be a string");
}
try {
collection.updateOne(json("_id: 1"), json("$rename: {'_id': 'id'}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("Mod on _id not allowed");
}
try {
collection.updateOne(json("_id: 1"), json("$rename: {foo: '_id'}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("Mod on _id not allowed");
}
try {
collection.updateOne(json("_id: 1"), json("$rename: {foo: 'bar', 'bar': 'bar2'}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("Cannot update 'bar' and 'bar' at the same time");
}
try {
collection.updateOne(json("_id: 1"), json("$rename: {bar: 'foo', bar2: 'foo'}"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getMessage()).contains("Cannot update 'foo' and 'foo' at the same time");
}
}
@Test
public void testRenameCollection() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
collection.renameCollection(new MongoNamespace(collection.getNamespace().getDatabaseName(), "other-collection-name"));
Collection<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", "other-collection-name");
assertThat(getCollection("other-collection-name").countDocuments()).isEqualTo(3);
}
@Test
public void testRenameCollection_targetAlreadyExists() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
MongoCollection<Document> otherCollection = db.getCollection("other-collection-name");
otherCollection.insertOne(json("_id: 1"));
try {
collection.renameCollection(new MongoNamespace(db.getName(), "other-collection-name"));
fail("MongoCommandException expected");
} catch (MongoCommandException e) {
assertThat(e.getErrorMessage()).isEqualTo("target namespace already exists");
}
List<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", collection.getNamespace().getCollectionName(),
"other-collection-name");
assertThat(collection.countDocuments()).isEqualTo(3);
assertThat(getCollection("other-collection-name").countDocuments()).isEqualTo(1);
}
@Test
public void testRenameCollection_dropTarget() throws Exception {
collection.insertOne(json("_id: 1"));
collection.insertOne(json("_id: 2"));
collection.insertOne(json("_id: 3"));
MongoCollection<Document> otherCollection = db.getCollection("other-collection-name");
otherCollection.insertOne(json("_id: 1"));
collection.renameCollection(new MongoNamespace(db.getName(), "other-collection-name"),
new RenameCollectionOptions().dropTarget(true));
List<String> collectionNames = toArray(db.listCollectionNames());
assertThat(collectionNames).containsOnly("system.indexes", "other-collection-name");
assertThat(getCollection("other-collection-name").countDocuments()).isEqualTo(3);
}
@Test
public void testListIndexes_empty() throws Exception {
assertThat(collection.listIndexes()).isEmpty();
}
@Test
public void testListIndexes() throws Exception {
collection.insertOne(json("_id: 1"));
db.getCollection("other").insertOne(json("_id: 1"));
collection.createIndex(json("bla: 1"));
List<Document> indexInfo = toArray(collection.listIndexes());
assertThat(indexInfo).containsOnly( //
json("name:'_id_', ns:'testdb.testcoll', key:{_id:1}"), //
json("name:'_id_', ns:'testdb.other', key:{_id:1}"), //
json("name:'bla_1', ns:'testdb.testcoll', key:{bla:1}"));
}
@Test
public void testFieldSelection_deselectId() {
collection.insertOne(json("_id: 1, order:1, visits: 2"));
Document document = collection.find(json("{}")).projection(json("_id: 0")).first();
assertThat(document).isEqualTo(json("order:1, visits:2"));
}
@Test
public void testFieldSelection_deselectOneField() {
Document obj = json("_id: 1, order:1, visits: 2, eid: 12345");
collection.insertOne(obj);
Document document = collection.find(new Document()).projection(json("visits: 0")).first();
assertThat(document).isEqualTo(json("_id:1, order:1, eid: 12345"));
}
@Test
public void testFieldSelection_deselectTwoFields() {
Document obj = json("_id: 1, order:1, visits: 2, eid: 12345");
collection.insertOne(obj);
Document document = collection.find(new Document()).projection(json("visits: 0, eid: 0")).first();
assertThat(document).isEqualTo(json("_id:1, order:1"));
}
@Test
public void testFieldSelection_selectAndDeselectFields() {
Document obj = json("_id: 1, order:1, visits: 2, eid: 12345");
collection.insertOne(obj);
Document document = collection.find(new Document()).projection(json("visits: 0, eid: 1")).first();
assertThat(document).isEqualTo(json("_id:1, eid: 12345"));
}
@Test
public void testPullWithInPattern() {
collection.insertOne(json("_id: 1, tags: ['aa', 'bb', 'ab', 'cc']"));
collection.updateOne(json("_id: 1"), pullByFilter(in("tags", Pattern.compile("a+"))));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, tags: ['bb', 'cc']"));
}
@Test
public void testPullWithInPatternAnchored() {
collection.insertOne(json("_id: 1, tags: ['aa', 'bb', 'ab', 'cc']"));
collection.updateOne(json("_id: 1"), pullByFilter(in("tags", Pattern.compile("^a+$"))));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, tags: ['bb', 'ab', 'cc']"));
}
@Test
public void testPullWithInNumbers() {
collection.insertOne(json("_id: 1, values: [1, 2, 2.5, 3.0, 4]"));
collection.updateOne(json("_id: 1"), pullByFilter(in("values", Arrays.asList(2.0, 3, 4L))));
assertThat(collection.find().first()).isEqualTo(json("_id: 1, values: [1, 2.5]"));
}
@Test
public void testDocumentWithHashMap() {
Map<String, String> value = new HashMap<>();
value.put("foo", "bar");
collection.insertOne(new Document("_id", 1).append("map", value));
Bson document = collection.find().first();
assertThat(document).isEqualTo(json("{_id: 1, map: {foo: 'bar'}}"));
}
@Test
public void testFindAndOfOrs() throws Exception {
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
collection.insertOne(new Document("_id", 1).append("published", true).append("startDate", dateFormat.parse("2015-03-01 13:20:05")));
collection.insertOne(new Document("_id", 2).append("published", true).append("expiration", dateFormat.parse("2020-12-31 18:00:00")));
collection.insertOne(new Document("_id", 3).append("published", true));
collection.insertOne(new Document("_id", 4).append("published", false));
collection.insertOne(new Document("_id", 5).append("published", true).append("startDate", dateFormat.parse("2017-01-01 00:00:00")));
collection.insertOne(new Document("_id", 6).append("published", true).append("expiration", dateFormat.parse("2016-01-01 00:00:00")));
Date now = dateFormat.parse("2016-01-01 00:00:00");
Bson query = and(
ne("published", false),
or(exists("startDate", false), lt("startDate", now)),
or(exists("expiration", false), gt("expiration", now))
);
List<Document> documents = toArray(collection.find(query).projection(json("_id: 1")));
assertThat(documents).containsOnly(json("_id: 1"), json("_id: 2"), json("_id: 3"));
}
@Test
public void testInOperatorWithNullValue() {
collection.insertMany(Arrays.asList(
json("_id: 1, a: 1"),
json("_id: 2, a: 2"),
json("_id: 3, a: 3"),
json("_id: 4, a: 4"),
json("_id: 5"))
);
Bson inQueryWithNull = in("a", 2, null, 3);
List<Document> results = toArray(collection.find(inQueryWithNull).projection(json("_id: 1")));
assertThat(results).containsExactly(
json("_id: 2"),
json("_id: 3"),
json("_id: 5")
);
}
@Test
public void testQueryWithReference() throws Exception {
collection.insertOne(json("_id: 1"));
String collectionName = collection.getNamespace().getCollectionName();
collection.insertOne(new Document("_id", 2).append("ref", new DBRef(collectionName, 1)));
collection.insertOne(new Document("_id", 3).append("ref", new DBRef(collectionName, 2)));
Document doc = collection.find(new Document("ref", new DBRef(collectionName, 1))).projection(json("_id: 1")).first();
assertThat(doc).isEqualTo(json("_id: 2"));
}
@Test
public void testQueryWithIllegalReference() throws Exception {
collection.insertOne(json("_id: 1"));
String collectionName = collection.getNamespace().getCollectionName();
collection.insertOne(new Document("_id", 2).append("ref", new DBRef(collectionName, 1)));
collection.insertOne(new Document("_id", 3).append("ref", new DBRef(collectionName, 2)));
try {
collection.find(json("ref: {$ref: 'coll'}")).first();
fail("MongoQueryException expected");
} catch (MongoQueryException e) {
assertThat(e.getCode()).isEqualTo(10068);
assertThat(e.getMessage()).contains("invalid operator: $ref");
}
}
@Test
public void testAndOrNorWithEmptyArray() throws Exception {
collection.insertOne(json("{}"));
assertMongoQueryException(and());
assertMongoQueryException(nor());
assertMongoQueryException(or());
}
@Test
public void testInsertLargeDocument() throws Exception {
insertAndFindLargeDocument(100, 1);
insertAndFindLargeDocument(1000, 2);
insertAndFindLargeDocument(10000, 3);
}
@Test
public void testInsertAndUpdateAsynchronously() throws Exception {
int numDocuments = 1000;
final CountDownLatch latch = new CountDownLatch(numDocuments);
final Queue<RuntimeException> errors = new LinkedBlockingQueue<>();
final Semaphore concurrentOperationsOnTheFly = new Semaphore(50); // prevent MongoWaitQueueFullException
for (int i = 1; i <= numDocuments; i++) {
final Document document = new Document("_id", i);
for (int j = 0; j < 10; j++) {
document.append("key-" + i + "-" + j, "value-" + i + "-" + j);
}
concurrentOperationsOnTheFly.acquire();
asyncCollection.insertOne(document, new SingleResultCallback<Void>() {
@Override
public void onResult(Void result, Throwable t) {
checkError("insert", t);
log.info("inserted {}", document);
final Document query = new Document("_id", document.getInteger("_id"));
asyncCollection.updateOne(query, Updates.set("updated", true), new SingleResultCallback<UpdateResult>() {
@Override
public void onResult(UpdateResult result, Throwable t) {
checkError("update", t);
log.info("updated {}: {}", query, result);
release();
}
});
}
private void checkError(String operation, Throwable t) {
if (t != null) {
log.error(operation + " of {} failed", document, t);
RuntimeException exception = new RuntimeException("Failed to " + operation + " " + document, t);
errors.add(exception);
release();
throw exception;
}
}
private void release() {
latch.countDown();
concurrentOperationsOnTheFly.release();
}
});
}
boolean success = latch.await(30, TimeUnit.SECONDS);
assertTrue(success);
if (!errors.isEmpty()) {
throw errors.poll();
}
log.info("finished");
for (int i = 1; i <= numDocuments; i++) {
Document query = new Document("_id", i);
Document document = collection.find(query).first();
assertThat(document).describedAs(query.toJson()).isNotNull();
assertThat(document.getBoolean("updated")).describedAs(document.toJson()).isTrue();
}
long count = collection.countDocuments();
assertThat(count).isEqualTo(numDocuments);
}
@Test
public void testAllQuery() throws Exception {
// see https://docs.mongodb.com/manual/reference/operator/query/all/
collection.insertOne(new Document("_id", new ObjectId("5234cc89687ea597eabee675"))
.append("code", "xyz")
.append("tags", Arrays.asList("school", "book", "bag", "headphone", "appliance"))
.append("qty", Arrays.asList(
new Document().append("size", "S").append("num", 10).append("color", "blue"),
new Document().append("size", "M").append("num", 45).append("color", "blue"),
new Document().append("size", "L").append("num", 100).append("color", "green")
)));
collection.insertOne(new Document("_id", new ObjectId("5234cc8a687ea597eabee676"))
.append("code", "abc")
.append("tags", Arrays.asList("appliance", "school", "book"))
.append("qty", Arrays.asList(
new Document().append("size", "6").append("num", 100).append("color", "green"),
new Document().append("size", "6").append("num", 50).append("color", "blue"),
new Document().append("size", "8").append("num", 100).append("color", "brown")
)));
collection.insertOne(new Document("_id", new ObjectId("5234ccb7687ea597eabee677"))
.append("code", "efg")
.append("tags", Arrays.asList("school", "book"))
.append("qty", Arrays.asList(
new Document().append("size", "S").append("num", 10).append("color", "blue"),
new Document().append("size", "M").append("num", 100).append("color", "blue"),
new Document().append("size", "L").append("num", 100).append("color", "green")
)));
collection.insertOne(new Document("_id", new ObjectId("52350353b2eff1353b349de9"))
.append("code", "ijk")
.append("tags", Arrays.asList("electronics", "school"))
.append("qty", Collections.singletonList(
new Document().append("size", "M").append("num", 100).append("color", "green")
)));
List<Document> documents = toArray(collection.find(json("{ tags: { $all: [ \"appliance\", \"school\", \"book\" ] } }")));
assertThat(documents).hasSize(2);
assertThat(documents.get(0).get("_id")).isEqualTo(new ObjectId("5234cc89687ea597eabee675"));
assertThat(documents.get(1).get("_id")).isEqualTo(new ObjectId("5234cc8a687ea597eabee676"));
}
@Test
public void testMatchesElementQuery() throws Exception {
collection.insertOne(json("_id: 1, results: [ 82, 85, 88 ]"));
collection.insertOne(json("_id: 2, results: [ 75, 88, 89 ]"));
List<Document> results = toArray(collection.find(json("results: { $elemMatch: { $gte: 80, $lt: 85 } }")));
assertThat(results).hasSize(1);
assertThat(results.get(0)).isEqualTo(json("\"_id\" : 1, \"results\" : [ 82, 85, 88 ]"));
}
@Test
public void testIllegalElementMatchQuery() throws Exception {
collection.insertOne(json("_id: 1, results: [ 82, 85, 88 ]"));
try {
collection.find(json("results: { $elemMatch: [ 85 ] }")).first();
fail("MongoQueryException expected");
} catch (MongoQueryException e) {
assertThat(e.getErrorCode()).isEqualTo(2);
assertThat(e.getErrorMessage()).isEqualTo("$elemMatch needs an Object");
}
try {
collection.find(json("results: { $elemMatch: 1 }")).first();
fail("MongoQueryException expected");
} catch (MongoQueryException e) {
assertThat(e.getErrorCode()).isEqualTo(2);
assertThat(e.getErrorMessage()).isEqualTo("$elemMatch needs an Object");
}
}
@Test
public void testQueryWithComment() throws Exception {
collection.insertOne(json("_id: 1, x: 2"));
collection.insertOne(json("_id: 2, x: 3"));
collection.insertOne(json("_id: 3, x: 4"));
List<Document> documents = toArray(collection.find(json("x: { $mod: [ 2, 0 ] }, $comment: \"Find even values.\"")));
assertThat(documents).hasSize(2);
assertThat(documents.get(0).get("_id")).isEqualTo(1);
assertThat(documents.get(1).get("_id")).isEqualTo(3);
}
private void insertAndFindLargeDocument(int numKeyValues, int id) {
Document document = new Document("_id", id);
for (int i = 0; i < numKeyValues; i++) {
document.put("key-" + i, "value-" + i);
}
collection.insertOne(document);
Document persistentDocument = collection.find(new Document("_id", id)).first();
assertThat(persistentDocument.keySet()).hasSize(numKeyValues + 1);
}
private void assertMongoQueryException(Bson filter) {
try {
collection.find(filter).first();
fail("MongoQueryException expected");
} catch (MongoQueryException e) {
assertThat(e.getCode()).isEqualTo(14816);
assertThat(e.getMessage()).contains("nonempty array");
}
}
private void insertUpdateInBulk(boolean ordered) {
List<WriteModel<Document>> ops = new ArrayList<>();
ops.add(new InsertOneModel<>(json("_id: 1, field: 'x'")));
ops.add(new InsertOneModel<>(json("_id: 2, field: 'x'")));
ops.add(new InsertOneModel<>(json("_id: 3, field: 'x'")));
ops.add(new UpdateManyModel<Document>(json("field: 'x'"), set("field", "y")));
BulkWriteResult result = collection.bulkWrite(ops, new BulkWriteOptions().ordered(ordered));
assertThat(result.getInsertedCount()).isEqualTo(3);
assertThat(result.getDeletedCount()).isEqualTo(0);
assertThat(result.getModifiedCount()).isEqualTo(3);
assertThat(result.getMatchedCount()).isEqualTo(3);
long totalDocuments = collection.countDocuments();
assertThat(totalDocuments).isEqualTo(3);
long documentsWithY = collection.countDocuments(json("field: 'y'"));
assertThat(documentsWithY).isEqualTo(3);
}
private void insertUpdateInBulkNoMatch(boolean ordered) {
collection.insertOne(json("foo: 'bar'"));
List<WriteModel<Document>> ops = new ArrayList<>();
ops.add(new UpdateOneModel<Document>(ne("foo", "bar"), set("field", "y")));
BulkWriteResult result = collection.bulkWrite(ops, new BulkWriteOptions().ordered(ordered));
assertThat(result.getInsertedCount()).isEqualTo(0);
assertThat(result.getDeletedCount()).isEqualTo(0);
assertThat(result.getModifiedCount()).isEqualTo(0);
assertThat(result.getMatchedCount()).isEqualTo(0);
}
private void removeInBulk(boolean ordered) {
DeleteManyModel<Document> deleteOp = new DeleteManyModel<>(json("field: 'y'"));
BulkWriteResult result = collection.bulkWrite(Collections.singletonList(deleteOp),
new BulkWriteOptions().ordered(ordered));
assertThat(result.getDeletedCount()).isEqualTo(3);
assertThat(collection.countDocuments()).isZero();
}
}
|
Use assertThatExceptionOfType instead of try/fail/catch
|
test-common/src/main/java/de/bwaldvogel/mongo/backend/AbstractBackendTest.java
|
Use assertThatExceptionOfType instead of try/fail/catch
|
|
Java
|
bsd-3-clause
|
0ccd2c15dc7e45f6b285fbc4eb15e2244a869203
| 0
|
fbastian/owltools,owlcollab/owltools,owlcollab/owltools,owlcollab/owltools,dhimmel/owltools,dhimmel/owltools,fbastian/owltools,owlcollab/owltools,owlcollab/owltools,fbastian/owltools,dhimmel/owltools,dhimmel/owltools,fbastian/owltools,fbastian/owltools,owlcollab/owltools,dhimmel/owltools,dhimmel/owltools,fbastian/owltools
|
package owltools.solrj;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
//import org.geneontology.lego.json.LegoShuntGraphTool;
import org.geneontology.lego.model.LegoLink;
import org.geneontology.lego.model.LegoNode;
import org.geneontology.lego.model.LegoTools.UnExpectedStructureException;
import org.geneontology.lego.model2.LegoGraph;
import org.geneontology.lego.model2.LegoUnitTools;
import org.geneontology.lego.model2.LegoUnit;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassExpression;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLPropertyExpression;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import owltools.gaf.Bioentity;
import owltools.gaf.EcoTools;
import owltools.gaf.ExtensionExpression;
import owltools.gaf.GafDocument;
import owltools.gaf.GeneAnnotation;
import owltools.gaf.TaxonTools;
import owltools.gaf.WithInfo;
import owltools.graph.OWLGraphEdge;
import owltools.graph.OWLGraphWrapper;
import owltools.graph.OWLQuantifiedProperty;
import owltools.graph.shunt.OWLShuntEdge;
import owltools.graph.shunt.OWLShuntGraph;
import owltools.graph.shunt.OWLShuntNode;
import owltools.io.OWLPrettyPrinter;
import com.google.gson.*;
/**
* A very specific class for the specific use case of loading in complex annotations from owl.
*/
public class ComplexAnnotationSolrDocumentLoader extends AbstractSolrLoader {
private static Logger LOG = Logger.getLogger(ComplexAnnotationSolrDocumentLoader.class);
int doc_limit_trigger = 1000; // the number of documents to add before pushing out to solr
int current_doc_number;
private OWLGraphWrapper currentGraph = null;
private OWLReasoner currentReasoner = null;
private String currentGroupID = null;
private String currentGroupLabel = null;
private Set<OWLNamedIndividual> legoIndividuals = null;
public ComplexAnnotationSolrDocumentLoader(String url, OWLGraphWrapper g, OWLReasoner r, Set<OWLNamedIndividual> individuals, String agID, String agLabel) throws MalformedURLException {
super(url);
//setGraph(g);
current_doc_number = 0;
currentGraph = g;
currentReasoner = r;
legoIndividuals = individuals;
currentGroupID = agID;
currentGroupLabel = agLabel;
}
@Override
public void load() throws SolrServerException, IOException {
LOG.info("Loading complex annotation documents...");
if( currentGraph == null ){
LOG.info("ERROR? current OWLGraphWrapper graph from Lego is not apparently defined...");
}else{
LegoUnitTools lTools = new LegoUnitTools(currentGraph, currentReasoner);
//Set<OWLNamedIndividual> individuals = legoGraph.getSourceOntology().getIndividualsInSignature();
LegoGraph lUnitTools = null;
try {
//lUnitTools = lTools.createLegoGraph(individuals);
lUnitTools = lTools.createLegoGraph(legoIndividuals);
} catch (UnExpectedStructureException e) {
LOG.error("LegoUnitTools did not initialize.");
return;
}
List<LegoLink> links = lUnitTools.getLinks();
List<LegoNode> nodes = lUnitTools.getNodes();
List<LegoUnit> units = lUnitTools.getUnits();
OWLShuntGraph shuntGraph = createShuntGraph(links, nodes);
// // Store node information for later access during assembly.
// //Map<String, LegoNode> nInfo = new HashMap<String, LegoNode>();
// Map<String,LegoNode> nodeInfo = new HashMap<String,LegoNode>();
// Map<String,List<String>> nodeLoc = new HashMap<String,List<String>>();
// for( LegoNode n : nodes ){
//
// // Resolve node ID and label.
// if( n != null ){
//
// OWLClassExpression ntype = n.getType();
// String nid = null;
// String nlbl = null;
// if( ! ntype.isAnonymous() ){
// nid = n.getType().asOWLClass().getIRI().toString();
// nlbl = bestLabel(n.getType());
// }else{
// // TODO: What case is this.
// nid = ntype.toString();
// nlbl = ntype.toString();
// }
// LOG.info("\nnode (id): " + nid);
// LOG.info("node (lbl): " + nlbl);
// nodeInfo.put(nid, n);
//
// OWLClass e = n.getActiveEntity();
// if( e != null ){
// String aid = n.getActiveEntity().getIRI().toString();
// String albl = currentGraph.getLabelOrDisplayId(n.getActiveEntity());
// LOG.info("node-a (id): " + aid);
// LOG.info("node-a (lbl): " + albl);
// }
// if( n.isBp() ){
// LOG.info("node is process^");
// }
//
// // Collect cell information if possible.
// List<String> llist = new ArrayList<String>();
// Collection<OWLClassExpression> cell_loc = n.getCellularLocation();
// for( OWLClassExpression cell_loc_cls : cell_loc ){
// // First, the trivial transfer to the final set.
// String loc_id = currentGraph.getIdentifier(cell_loc_cls);
// String loc_lbl = bestLabel(cell_loc_cls);
// LOG.info("node location: " + loc_lbl + " (" + loc_id + ")");
//
// llist.add(loc_lbl);
//
// //// Ensure
// //if( ! nodeLoc.containsKey(nid) ){
// //
// //}
// }
// nodeLoc.put(nid, llist);
// }
// }
// Collect the high-level group information: topo graph and group label/id
//LegoShuntGraphTool shuntTool = new LegoShuntGraphTool();
//OWLShuntGraph shuntGraph = shuntTool.renderLego(lNodes, graph);
// TODO: This next bit is all temporary until we get real labels in somehow.
String groupID = currentGroupID;
String groupLabel = currentGroupLabel;
// Iterate over the participant nodes and collect the unit information.
for( LegoUnit u : units ){
SolrInputDocument doc = collect_unit_info(u, groupID, groupLabel, shuntGraph);
if( doc != null ){
add(doc);
// Incremental commits.
current_doc_number++;
if( current_doc_number % doc_limit_trigger == 0 ){
LOG.info("Processed " + doc_limit_trigger + " general ontology docs at " + current_doc_number + " and committing...");
incrementalAddAndCommit();
}
}
}
}
// Get the remainder of the docs in.
LOG.info("Doing clean-up (final) commit at " + current_doc_number + " complex annotation documents...");
addAllAndCommit();
LOG.info("Done.");
}
private OWLShuntGraph createShuntGraph(List<LegoLink> links, List<LegoNode> nodes) {
// Assemble the group shunt graph from available information.
// Most of the interesting stuff is happening with the meta-information.
OWLShuntGraph shuntGraph = new OWLShuntGraph();
OWLPrettyPrinter pp = new OWLPrettyPrinter(currentGraph);
// nodes
for( LegoNode node : nodes ){
String uid = node.getId().toString();
LOG.info("\nunit id: " + uid);
OWLShuntNode shuntNode = new OWLShuntNode(uid, uid);
// Try and get some info in there.
Map<String,Object> metadata = new HashMap<String,Object>();
OWLClass enabledByClass = node.getActiveEntity();
if( enabledByClass != null ){
String iid = enabledByClass.getIRI().toString();
String ilbl = bestLabel(enabledByClass);
metadata.put("enabled_by", ilbl);
LOG.info("unit enabled_by (id): " + iid);
LOG.info("unit enabled_by (lbl): " + ilbl);
}
OWLClassExpression type = node.getType();
if (node.isBp()){
String processLbl;
if (type == null ) {
processLbl = "Unknown Process";
}else if (type.isAnonymous() == false) {
OWLClass processClass = type.asOWLClass();
String iid = processClass.getIRI().toString();
processLbl = bestLabel(processClass);
LOG.info("unit process (id): " + iid);
}else{
processLbl = pp.render(type);
}
metadata.put("process", processLbl);
LOG.info("unit process (lbl): " + processLbl);
}
if (node.isMf() || node.isCmf()) {
String activityLbl;
if (type == null) {
// use custom label or GO:0003674 'molecular function'
activityLbl = "Unknown Activity";
}else if (type.isAnonymous() == false) {
OWLClass ln_oc = type.asOWLClass();
String iid = ln_oc.getIRI().toString();
activityLbl = bestLabel(ln_oc);
LOG.info("unit activity (id): " + iid);
}else {
activityLbl = pp.render(type);
}
metadata.put("activity", activityLbl);
LOG.info("unit activity (lbl): " + activityLbl);
}
Collection<OWLClassExpression> locations = node.getCellularLocation();
if (locations != null && !locations.isEmpty()) {
List<String> locationLabels = new ArrayList<String>();
for (OWLClassExpression ce : locations) {
String locationlbl;
if( ce.isAnonymous() == false ){
OWLClass locationClass = ce.asOWLClass();
//String locationId = locationClass.getIRI().toString();
locationlbl = bestLabel(locationClass);
}else {
locationlbl = pp.render(ce);
}
locationLabels.add(locationlbl);
}
// Add locationLabels to meta data map
metadata.put("location", locationLabels);
LOG.info("unit location (lbl): " + StringUtils.join(locationLabels, ", "));
}
// TODO decide on if and how to include the other class expressions
Collection<OWLClassExpression> others = node.getUnknowns();
if (others != null && ! others.isEmpty()) {
List<String> unknownLabels = new ArrayList<String>();
for (OWLClassExpression ce : others) {
String lbl = null;
if (ce.isAnonymous() == false) {
OWLClass otherClass = ce.asOWLClass();
lbl = bestLabel(otherClass);
}else {
lbl = pp.render(ce);
}
unknownLabels.add(lbl);
}
metadata.put("unknown", unknownLabels);
}
// Set meta-data and add to node assembly.
shuntNode.setMetadata(metadata);
shuntGraph.addNode(shuntNode);
}
// edges
for( LegoLink l : links ){
String sid = l.getSource().getIRI().toString();
String oid = l.getNamedTarget().getIRI().toString();
String pid = l.getProperty().asOWLObjectProperty().getIRI().toString();
LOG.info("\nlink (sid): " + sid);
LOG.info("link (sid): " + oid);
LOG.info("link (sid): " + pid);
OWLShuntEdge shuntEdge = new OWLShuntEdge(sid, oid, pid);
shuntGraph.addEdge(shuntEdge);
}
return shuntGraph;
}
private String bestLabel(OWLObject oc){
String label = "???";
if( oc != null ){
label = currentGraph.getLabel(oc);
if( label == null ){
label = currentGraph.getIdentifier(oc);
}
}
return label;
}
/**
* Convert a (probably url) string into something not terrible to use in real life.
*
* @param id
* @return
*/
private String lessTerribleID(String id){
String newID = new String(id);
newID = StringUtils.replace(newID, ":", "_");
newID = StringUtils.replace(newID, "/", "_");
newID = StringUtils.replace(newID, "#", "_");
return newID;
}
/**
* Take args and add it index (no commits)
* Main wrapping for adding complex annotation documents to GOlr.
* @param ln
* @param groupLabel
* @param groupID
* @param shuntGraph
* @param ca_doc
*
* @return an input doc for add()
*/
public SolrInputDocument collect_unit_info(LegoUnit u, String groupID, String groupLabel, OWLShuntGraph shuntGraph) {
SolrInputDocument ca_doc = new SolrInputDocument();
// We'll be using the sam is_a-part_of a lot.
ArrayList<String> isap = new ArrayList<String>();
isap.add("BFO:0000050");
ca_doc.addField("document_category", "complex_annotation");
// annotation_unit
// annotation_unit_label
// TODO: This next bit is all temporary until we get real IDs and labels in somehow.
String unitID = u.getId().toString();
unitID = lessTerribleID(unitID);
ca_doc.addField("annotation_unit", unitID);
String unitLabel = u.toString(); // TODO: ???
//ca_doc.addField("annotation_unit_label", unitLabel);
ca_doc.addField("annotation_unit_label", "view");
// TODO: This sucks, but live with it for now for testing.
//ca_doc.addField("id", "???");
//ca_doc.addField("id", current_doc_number);
ca_doc.addField("id", unitID);
// annotation_group(_label)
ca_doc.addField("annotation_group", groupID);
ca_doc.addField("annotation_group_label", groupLabel);
// enabled_by(_label)
OWLClass oc = u.getEnabledBy();
String oc_id = currentGraph.getIdentifier(oc);
String oc_lbl = bestLabel(oc);
ca_doc.addField("enabled_by", oc_id);
ca_doc.addField("enabled_by_label", oc_lbl);
// process_class(_label)
// process_class_closure(_label)
// process_class_closure_map
OWLClassExpression process_ce = u.getProcess();
if( process_ce != null ){
// Get ready for the isa-part_of closure assembly.
OWLClass ln_oc = process_ce.asOWLClass();
ca_doc.addField("process_class", currentGraph.getIdentifier(ln_oc));
ca_doc.addField("process_class_label", bestLabel(ln_oc));
addClosureToDoc(isap, "process_class_closure", "process_class_closure_label", "process_class_closure_map", ln_oc, ca_doc);
}
// function_class(_label)
// function_class_closure(_label)
// function_class_closure_map
OWLClassExpression activity_ce = u.getActivity();
if( activity_ce != null ){
OWLClass ln_oc = activity_ce.asOWLClass();
ca_doc.addField("function_class", currentGraph.getIdentifier(ln_oc));
ca_doc.addField("function_class_label", bestLabel(ln_oc));
addClosureToDoc(isap, "function_class_closure", "function_class_closure_label", "function_class_closure_map", ln_oc, ca_doc);
}
// location_list(_label)
// location_list_map
// location_list_closure(_label)
// location_list_closure_map
// Caches for location_list.
Set<String> locIDSet = new HashSet<String>();
Set<String> locLabelSet = new HashSet<String>();
Map<String, String> locMap = new HashMap<String, String>();
// Caches for location_list_closure.
Set<String> locClosureIDSet = new HashSet<String>();
Set<String> locClosureLabelSet = new HashSet<String>();
Map<String, String> locClosureMap = new HashMap<String, String>();
// Collect painfully class by class.
Collection<OWLClass> cell_loc = u.getLocation();
for( OWLClassExpression cell_loc_cls : cell_loc ){
// First, the trivial transfer to the final set.
String loc_id = currentGraph.getIdentifier(cell_loc_cls);
String loc_lbl = bestLabel(cell_loc_cls);
//String loc_lbl = currentGraph.getLabelOrDisplayId(cell_loc_cls);
locIDSet.add(loc_id);
locLabelSet.add(loc_lbl);
locMap.put(loc_id, loc_lbl);
// Add closures to cache sets
List<String> loc_id_closure = currentGraph.getRelationIDClosure(cell_loc_cls, isap);
locClosureIDSet.addAll(loc_id_closure);
List<String> loc_label_closure = currentGraph.getRelationLabelClosure(cell_loc_cls, isap);
locClosureLabelSet.addAll(loc_label_closure);
Map<String, String> loc_closure_map = currentGraph.getRelationClosureMap(cell_loc_cls, isap);
locClosureMap.putAll(loc_closure_map);
}
// Process all collected caches into the document fields.
ca_doc.addField("location_list", locIDSet);
ca_doc.addField("location_list_label", locLabelSet);
ca_doc.addField("location_list_closure", locClosureIDSet);
ca_doc.addField("location_list_closure_label", locClosureLabelSet);
// Compile location maps to JSON.
if( ! locMap.isEmpty() ){
ca_doc.addField("location_list_map", gson.toJson(locMap));
}
if( ! locClosureMap.isEmpty() ){
ca_doc.addField("location_list_closure_map", gson.toJson(locClosureMap));
}
// topology_graph_json
//ca_doc.addField("topology_graph_json", shuntGraph.toJSON());
ca_doc.addField("topology_graph_json", shuntGraph.unsafeToJSON());
// LATER: panther_family(_label)
// LATER: taxon(_label)
// LATER: taxon_closure(_label)
// LATER: taxon_closure(_map)
// LATER: owl_blob_json
return ca_doc;
}
/*
* Add specified closure of OWLObject to the doc.
*/
private Map<String, String> addClosureToDoc(ArrayList<String> relations, String closureName, String closureNameLabel, String closureMap,
OWLObject cls, SolrInputDocument solr_doc){
// Add closures to doc; label and id.
List<String> idClosure = currentGraph.getRelationIDClosure(cls, relations);
List<String> labelClosure = currentGraph.getRelationLabelClosure(cls, relations);
solr_doc.addField(closureName, idClosure);
solr_doc.addField(closureNameLabel, labelClosure);
for( String tid : idClosure){
addFieldUnique(solr_doc, closureName, tid);
}
// Compile closure maps to JSON.
Map<String, String> cmap = currentGraph.getRelationClosureMap(cls, relations);
if( ! cmap.isEmpty() ){
String jsonized_cmap = gson.toJson(cmap);
solr_doc.addField(closureMap, jsonized_cmap);
}
return cmap;
}
}
|
OWLTools-Solr/src/main/java/owltools/solrj/ComplexAnnotationSolrDocumentLoader.java
|
package owltools.solrj;
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
//import org.geneontology.lego.json.LegoShuntGraphTool;
import org.geneontology.lego.model.LegoLink;
import org.geneontology.lego.model.LegoNode;
import org.geneontology.lego.model.LegoTools.UnExpectedStructureException;
import org.geneontology.lego.model2.LegoGraph;
import org.geneontology.lego.model2.LegoUnitTools;
import org.geneontology.lego.model2.LegoUnit;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassExpression;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLPropertyExpression;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import owltools.gaf.Bioentity;
import owltools.gaf.EcoTools;
import owltools.gaf.ExtensionExpression;
import owltools.gaf.GafDocument;
import owltools.gaf.GeneAnnotation;
import owltools.gaf.TaxonTools;
import owltools.gaf.WithInfo;
import owltools.graph.OWLGraphEdge;
import owltools.graph.OWLGraphWrapper;
import owltools.graph.OWLQuantifiedProperty;
import owltools.graph.shunt.OWLShuntEdge;
import owltools.graph.shunt.OWLShuntGraph;
import owltools.graph.shunt.OWLShuntNode;
import owltools.io.OWLPrettyPrinter;
import com.google.gson.*;
/**
* A very specific class for the specific use case of loading in complex annotations from owl.
*/
public class ComplexAnnotationSolrDocumentLoader extends AbstractSolrLoader {
private static Logger LOG = Logger.getLogger(ComplexAnnotationSolrDocumentLoader.class);
int doc_limit_trigger = 1000; // the number of documents to add before pushing out to solr
int current_doc_number;
private OWLGraphWrapper currentGraph = null;
private OWLReasoner currentReasoner = null;
private String currentGroupID = null;
private String currentGroupLabel = null;
private Set<OWLNamedIndividual> legoIndividuals = null;
public ComplexAnnotationSolrDocumentLoader(String url, OWLGraphWrapper g, OWLReasoner r, Set<OWLNamedIndividual> individuals, String agID, String agLabel) throws MalformedURLException {
super(url);
//setGraph(g);
current_doc_number = 0;
currentGraph = g;
currentReasoner = r;
legoIndividuals = individuals;
currentGroupID = agID;
currentGroupLabel = agLabel;
}
@Override
public void load() throws SolrServerException, IOException {
LOG.info("Loading complex annotation documents...");
if( currentGraph == null ){
LOG.info("ERROR? current OWLGraphWrapper graph from Lego is not apparently defined...");
}else{
LegoUnitTools lTools = new LegoUnitTools(currentGraph, currentReasoner);
//Set<OWLNamedIndividual> individuals = legoGraph.getSourceOntology().getIndividualsInSignature();
LegoGraph lUnitTools = null;
try {
//lUnitTools = lTools.createLegoGraph(individuals);
lUnitTools = lTools.createLegoGraph(legoIndividuals);
} catch (UnExpectedStructureException e) {
LOG.error("LegoUnitTools did not initialize.");
return;
}
List<LegoLink> links = lUnitTools.getLinks();
List<LegoNode> nodes = lUnitTools.getNodes();
List<LegoUnit> units = lUnitTools.getUnits();
OWLShuntGraph shuntGraph = createShuntGraph(links, nodes);
// // Store node information for later access during assembly.
// //Map<String, LegoNode> nInfo = new HashMap<String, LegoNode>();
// Map<String,LegoNode> nodeInfo = new HashMap<String,LegoNode>();
// Map<String,List<String>> nodeLoc = new HashMap<String,List<String>>();
// for( LegoNode n : nodes ){
//
// // Resolve node ID and label.
// if( n != null ){
//
// OWLClassExpression ntype = n.getType();
// String nid = null;
// String nlbl = null;
// if( ! ntype.isAnonymous() ){
// nid = n.getType().asOWLClass().getIRI().toString();
// nlbl = bestLabel(n.getType());
// }else{
// // TODO: What case is this.
// nid = ntype.toString();
// nlbl = ntype.toString();
// }
// LOG.info("\nnode (id): " + nid);
// LOG.info("node (lbl): " + nlbl);
// nodeInfo.put(nid, n);
//
// OWLClass e = n.getActiveEntity();
// if( e != null ){
// String aid = n.getActiveEntity().getIRI().toString();
// String albl = currentGraph.getLabelOrDisplayId(n.getActiveEntity());
// LOG.info("node-a (id): " + aid);
// LOG.info("node-a (lbl): " + albl);
// }
// if( n.isBp() ){
// LOG.info("node is process^");
// }
//
// // Collect cell information if possible.
// List<String> llist = new ArrayList<String>();
// Collection<OWLClassExpression> cell_loc = n.getCellularLocation();
// for( OWLClassExpression cell_loc_cls : cell_loc ){
// // First, the trivial transfer to the final set.
// String loc_id = currentGraph.getIdentifier(cell_loc_cls);
// String loc_lbl = bestLabel(cell_loc_cls);
// LOG.info("node location: " + loc_lbl + " (" + loc_id + ")");
//
// llist.add(loc_lbl);
//
// //// Ensure
// //if( ! nodeLoc.containsKey(nid) ){
// //
// //}
// }
// nodeLoc.put(nid, llist);
// }
// }
// Collect the high-level group information: topo graph and group label/id
//LegoShuntGraphTool shuntTool = new LegoShuntGraphTool();
//OWLShuntGraph shuntGraph = shuntTool.renderLego(lNodes, graph);
// TODO: This next bit is all temporary until we get real labels in somehow.
String groupID = currentGroupID;
String groupLabel = currentGroupLabel;
// Iterate over the participant nodes and collect the unit information.
for( LegoUnit u : units ){
SolrInputDocument doc = collect_unit_info(u, groupID, groupLabel, shuntGraph);
if( doc != null ){
add(doc);
// Incremental commits.
current_doc_number++;
if( current_doc_number % doc_limit_trigger == 0 ){
LOG.info("Processed " + doc_limit_trigger + " general ontology docs at " + current_doc_number + " and committing...");
incrementalAddAndCommit();
}
}
}
}
// Get the remainder of the docs in.
LOG.info("Doing clean-up (final) commit at " + current_doc_number + " complex annotation documents...");
addAllAndCommit();
LOG.info("Done.");
}
private OWLShuntGraph createShuntGraph(List<LegoLink> links, List<LegoNode> nodes) {
// Assemble the group shunt graph from available information.
// Most of the interesting stuff is happening with the meta-information.
OWLShuntGraph shuntGraph = new OWLShuntGraph();
OWLPrettyPrinter pp = new OWLPrettyPrinter(currentGraph);
// nodes
for( LegoNode node : nodes ){
String uid = node.getId().toString();
LOG.info("\nunit id: " + uid);
OWLShuntNode shuntNode = new OWLShuntNode(uid, uid);
// Try and get some info in there.
Map<String,Object> metadata = new HashMap<String,Object>();
OWLClass enabledByClass = node.getActiveEntity();
if( enabledByClass != null ){
String iid = enabledByClass.getIRI().toString();
String ilbl = bestLabel(enabledByClass);
metadata.put("enabled_by", ilbl);
LOG.info("unit enabled_by (id): " + iid);
LOG.info("unit enabled_by (lbl): " + ilbl);
}
OWLClassExpression type = node.getType();
if (node.isBp()){
String processLbl;
if (type == null ) {
processLbl = "Unknown Process";
}else if (type.isAnonymous() == false) {
OWLClass processClass = type.asOWLClass();
String iid = processClass.getIRI().toString();
processLbl = bestLabel(processClass);
LOG.info("unit process (id): " + iid);
}else{
processLbl = pp.render(type);
}
metadata.put("process", processLbl);
LOG.info("unit process (lbl): " + processLbl);
}
if (node.isMf() || node.isCmf()) {
String activityLbl;
if (type == null) {
// use custom label or GO:0003674 'molecular function'
activityLbl = "Unknown Activity";
}else if (type.isAnonymous() == false) {
OWLClass ln_oc = type.asOWLClass();
String iid = ln_oc.getIRI().toString();
activityLbl = bestLabel(ln_oc);
LOG.info("unit activity (id): " + iid);
}else {
activityLbl = pp.render(type);
}
metadata.put("activity", activityLbl);
LOG.info("unit activity (lbl): " + activityLbl);
}
Collection<OWLClassExpression> locations = node.getCellularLocation();
if (locations != null && !locations.isEmpty()) {
List<String> locationLabels = new ArrayList<String>();
for (OWLClassExpression ce : locations) {
String locationlbl;
if( ce.isAnonymous() == false ){
OWLClass locationClass = ce.asOWLClass();
//String locationId = locationClass.getIRI().toString();
locationlbl = bestLabel(locationClass);
}else {
locationlbl = pp.render(ce);
}
locationLabels.add(locationlbl);
}
// Add locationLabels to meta data map
metadata.put("location", locationLabels);
LOG.info("unit location (lbl): " + StringUtils.join(locationLabels, ", "));
}
// TODO decide on if and how to include the other class expressions
Collection<OWLClassExpression> others = node.getUnknowns();
if (others != null && !others.isEmpty()) {
for (OWLClassExpression ce : others) {
if (ce.isAnonymous() == false) {
OWLClass otherClass = ce.asOWLClass();
String lbl = bestLabel(otherClass);
}else {
String lbl = pp.render(ce);
}
}
}
shuntNode.setMetadata(metadata);
shuntGraph.addNode(shuntNode);
}
// edges
for( LegoLink l : links ){
String sid = l.getSource().getIRI().toString();
String oid = l.getNamedTarget().getIRI().toString();
String pid = l.getProperty().asOWLObjectProperty().getIRI().toString();
LOG.info("\nlink (sid): " + sid);
LOG.info("link (sid): " + oid);
LOG.info("link (sid): " + pid);
OWLShuntEdge shuntEdge = new OWLShuntEdge(sid, oid, pid);
shuntGraph.addEdge(shuntEdge);
}
return shuntGraph;
}
private String bestLabel(OWLObject oc){
String label = "???";
if( oc != null ){
label = currentGraph.getLabel(oc);
if( label == null ){
label = currentGraph.getIdentifier(oc);
}
}
return label;
}
/**
* Convert a (probably url) string into something not terrible to use in real life.
*
* @param id
* @return
*/
private String lessTerribleID(String id){
String newID = new String(id);
newID = StringUtils.replace(newID, ":", "_");
newID = StringUtils.replace(newID, "/", "_");
newID = StringUtils.replace(newID, "#", "_");
return newID;
}
/**
* Take args and add it index (no commits)
* Main wrapping for adding complex annotation documents to GOlr.
* @param ln
* @param groupLabel
* @param groupID
* @param shuntGraph
* @param ca_doc
*
* @return an input doc for add()
*/
public SolrInputDocument collect_unit_info(LegoUnit u, String groupID, String groupLabel, OWLShuntGraph shuntGraph) {
SolrInputDocument ca_doc = new SolrInputDocument();
// We'll be using the sam is_a-part_of a lot.
ArrayList<String> isap = new ArrayList<String>();
isap.add("BFO:0000050");
ca_doc.addField("document_category", "complex_annotation");
// annotation_unit
// annotation_unit_label
// TODO: This next bit is all temporary until we get real IDs and labels in somehow.
String unitID = u.getId().toString();
unitID = lessTerribleID(unitID);
ca_doc.addField("annotation_unit", unitID);
String unitLabel = u.toString(); // TODO: ???
//ca_doc.addField("annotation_unit_label", unitLabel);
ca_doc.addField("annotation_unit_label", "view");
// TODO: This sucks, but live with it for now for testing.
//ca_doc.addField("id", "???");
//ca_doc.addField("id", current_doc_number);
ca_doc.addField("id", unitID);
// annotation_group(_label)
ca_doc.addField("annotation_group", groupID);
ca_doc.addField("annotation_group_label", groupLabel);
// enabled_by(_label)
OWLClass oc = u.getEnabledBy();
String oc_id = currentGraph.getIdentifier(oc);
String oc_lbl = bestLabel(oc);
ca_doc.addField("enabled_by", oc_id);
ca_doc.addField("enabled_by_label", oc_lbl);
// process_class(_label)
// process_class_closure(_label)
// process_class_closure_map
OWLClassExpression process_ce = u.getProcess();
if( process_ce != null ){
// Get ready for the isa-part_of closure assembly.
OWLClass ln_oc = process_ce.asOWLClass();
ca_doc.addField("process_class", currentGraph.getIdentifier(ln_oc));
ca_doc.addField("process_class_label", bestLabel(ln_oc));
addClosureToDoc(isap, "process_class_closure", "process_class_closure_label", "process_class_closure_map", ln_oc, ca_doc);
}
// function_class(_label)
// function_class_closure(_label)
// function_class_closure_map
OWLClassExpression activity_ce = u.getActivity();
if( activity_ce != null ){
OWLClass ln_oc = activity_ce.asOWLClass();
ca_doc.addField("function_class", currentGraph.getIdentifier(ln_oc));
ca_doc.addField("function_class_label", bestLabel(ln_oc));
addClosureToDoc(isap, "function_class_closure", "function_class_closure_label", "function_class_closure_map", ln_oc, ca_doc);
}
// location_list(_label)
// location_list_map
// location_list_closure(_label)
// location_list_closure_map
// Caches for location_list.
Set<String> locIDSet = new HashSet<String>();
Set<String> locLabelSet = new HashSet<String>();
Map<String, String> locMap = new HashMap<String, String>();
// Caches for location_list_closure.
Set<String> locClosureIDSet = new HashSet<String>();
Set<String> locClosureLabelSet = new HashSet<String>();
Map<String, String> locClosureMap = new HashMap<String, String>();
// Collect painfully class by class.
Collection<OWLClass> cell_loc = u.getLocation();
for( OWLClassExpression cell_loc_cls : cell_loc ){
// First, the trivial transfer to the final set.
String loc_id = currentGraph.getIdentifier(cell_loc_cls);
String loc_lbl = bestLabel(cell_loc_cls);
//String loc_lbl = currentGraph.getLabelOrDisplayId(cell_loc_cls);
locIDSet.add(loc_id);
locLabelSet.add(loc_lbl);
locMap.put(loc_id, loc_lbl);
// Add closures to cache sets
List<String> loc_id_closure = currentGraph.getRelationIDClosure(cell_loc_cls, isap);
locClosureIDSet.addAll(loc_id_closure);
List<String> loc_label_closure = currentGraph.getRelationLabelClosure(cell_loc_cls, isap);
locClosureLabelSet.addAll(loc_label_closure);
Map<String, String> loc_closure_map = currentGraph.getRelationClosureMap(cell_loc_cls, isap);
locClosureMap.putAll(loc_closure_map);
}
// Process all collected caches into the document fields.
ca_doc.addField("location_list", locIDSet);
ca_doc.addField("location_list_label", locLabelSet);
ca_doc.addField("location_list_closure", locClosureIDSet);
ca_doc.addField("location_list_closure_label", locClosureLabelSet);
// Compile location maps to JSON.
if( ! locMap.isEmpty() ){
ca_doc.addField("location_list_map", gson.toJson(locMap));
}
if( ! locClosureMap.isEmpty() ){
ca_doc.addField("location_list_closure_map", gson.toJson(locClosureMap));
}
// topology_graph_json
//ca_doc.addField("topology_graph_json", shuntGraph.toJSON());
ca_doc.addField("topology_graph_json", shuntGraph.unsafeToJSON());
// LATER: panther_family(_label)
// LATER: taxon(_label)
// LATER: taxon_closure(_label)
// LATER: taxon_closure(_map)
// LATER: owl_blob_json
return ca_doc;
}
/*
* Add specified closure of OWLObject to the doc.
*/
private Map<String, String> addClosureToDoc(ArrayList<String> relations, String closureName, String closureNameLabel, String closureMap,
OWLObject cls, SolrInputDocument solr_doc){
// Add closures to doc; label and id.
List<String> idClosure = currentGraph.getRelationIDClosure(cls, relations);
List<String> labelClosure = currentGraph.getRelationLabelClosure(cls, relations);
solr_doc.addField(closureName, idClosure);
solr_doc.addField(closureNameLabel, labelClosure);
for( String tid : idClosure){
addFieldUnique(solr_doc, closureName, tid);
}
// Compile closure maps to JSON.
Map<String, String> cmap = currentGraph.getRelationClosureMap(cls, relations);
if( ! cmap.isEmpty() ){
String jsonized_cmap = gson.toJson(cmap);
solr_doc.addField(closureMap, jsonized_cmap);
}
return cmap;
}
}
|
added label dump for unknown bits as a placeholder
git-svn-id: f705032614e1ff11fed11a7e506afa6fa6966044@1594 18f1da76-1bb4-b526-5913-e828fe20442d
|
OWLTools-Solr/src/main/java/owltools/solrj/ComplexAnnotationSolrDocumentLoader.java
|
added label dump for unknown bits as a placeholder
|
|
Java
|
mit
|
d3e5abe437dd8bf2f376f7f176e9087bbb9cc1b8
| 0
|
aayvazyan-tgm/dLock,jklepp-tgm/dLock
|
package tgm.hit.rtn.dlock;
import java.io.Serializable;
import java.util.Objects;
public class Peer implements Serializable{
private int port;
private String host;
public Peer(int port, String host) {
this.port = port;
this.host = host;
}
/**
* Getter for property 'host'.
*
* @return Value for property 'host'.
*/
public String getHost() {
return host;
}
/**
* Setter for property 'host'.
*
* @param host Value to set for property 'host'.
*/
public void setHost(String host) {
this.host = host;
}
/**
* Getter for property 'port'.
*
* @return Value for property 'port'.
*/
public int getPort() {
return port;
}
/**
* Setter for property 'port'.
*
* @param port Value to set for property 'port'.
*/
public void setPort(int port) {
this.port = port;
}
/**
* Checks if both Peers contain the same information (Port and Host)
* @param otherPeer the Peer to compare to.
* @return true if they contain the same information, false otherwise
*/
@Override
public boolean equals(Object otherPeer) {
if(otherPeer instanceof Peer) {
Peer storedPeer=(Peer)otherPeer;
if (this.host.equals(storedPeer.host)
&& this.port == storedPeer.port) return true;
}
return false;
}
/** Generates a hash code, using the port and host*/
@Override
public int hashCode() {
return Objects.hash(this.port,this.host);
}
}
|
src/main/java/tgm/hit/rtn/dlock/Peer.java
|
package tgm.hit.rtn.dlock;
import java.io.Serializable;
import java.util.Objects;
public class Peer implements Serializable{
private int port;
private String host;
public Peer(int port, String host) {
this.port = port;
this.host = host;
}
/**
* Getter for property 'host'.
*
* @return Value for property 'host'.
*/
public String getHost() {
return host;
}
/**
* Setter for property 'host'.
*
* @param host Value to set for property 'host'.
*/
public void setHost(String host) {
host = host;
}
/**
* Getter for property 'port'.
*
* @return Value for property 'port'.
*/
public int getPort() {
return port;
}
/**
* Setter for property 'port'.
*
* @param port Value to set for property 'port'.
*/
public void setPort(int port) {
this.port = port;
}
/**
* Checks if both Peers contain the same information (Port and Host)
* @param otherPeer the Peer to compare to.
* @return true if they contain the same information, false otherwise
*/
@Override
public boolean equals(Object otherPeer) {
if(otherPeer instanceof Peer) {
Peer storedPeer=(Peer)otherPeer;
if (this.host.equals(storedPeer.host)
&& this.port == storedPeer.port) return true;
}
return false;
}
/** Generates a hash code, using the port and host*/
@Override
public int hashCode() {
return Objects.hash(this.port,this.host);
}
}
|
fixed wrong setter
|
src/main/java/tgm/hit/rtn/dlock/Peer.java
|
fixed wrong setter
|
|
Java
|
mit
|
fdf240d1579430b9977e068471d00ba2ed848e6b
| 0
|
CruGlobal/godtools-api,CruGlobal/godtools-api
|
package org.cru.godtools.api.packages;
import com.amazonaws.services.s3.model.S3Object;
import org.ccci.util.time.Clock;
import org.cru.godtools.domain.authentication.AuthorizationRecord;
import org.cru.godtools.domain.authentication.AuthorizationService;
import org.cru.godtools.s3.GodToolsS3Client;
import org.jboss.logging.Logger;
import org.xml.sax.SAXException;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
/**
* Contains RESTful endpoints for delivering GodTools "package" resources.
* - "packages" include translation XML files as well as images.
*
* For more information: https://github.com/CruGlobal/godtools-api/wiki/The-Packages-Endpoint
*
* Created by ryancarlson on 3/14/14.
*/
@Deprecated
@Path("/packages")
public class PackageResource
{
@Inject
AuthorizationService authService;
@Inject
GodToolsS3Client godToolsS3Client;
@Inject
Clock clock;
private Logger log = Logger.getLogger(this.getClass());
/**
* GET - get all packages for the language specified by @param languageCode.
*/
@GET
@Path("/{language}")
@Produces({"application/zip", "application/xml"})
public Response getAllPackagesForLanguage(@PathParam("language") String languageCode,
@HeaderParam("interpreter") Integer minimumInterpreterVersionHeader,
@QueryParam("compressed") String compressed,
@HeaderParam("Authorization") String authTokenHeader,
@QueryParam("Authorization") String authTokenParam) throws ParserConfigurationException, SAXException, IOException
{
log.info("Requesting all packages for language: " + languageCode);
AuthorizationRecord.checkAuthorization(authService.getAuthorizationRecord(authTokenParam, authTokenHeader), clock.currentDateTime());
S3Object packagesZippedFolder = godToolsS3Client.getPackagesZippedFolder(languageCode);
return Response
.ok(packagesZippedFolder.getObjectContent())
.type("application/zip")
.build();
}
/**
* GET - get all the package specified by @param packageCode for the language specified by @param languageCode.
*/
@GET
@Path("/{language}/{package}")
@Produces({"application/zip", "application/xml"})
public Response getPackage(@PathParam("language") String languageCode,
@PathParam("package") String packageCode,
@QueryParam("compressed") String compressed,
@HeaderParam("Authorization") String authTokenHeader,
@QueryParam("Authorization") String authTokenParam) throws Exception
{
log.info("Requesting package " + packageCode + " for language: " + languageCode);
AuthorizationRecord.checkAuthorization(authService.getAuthorizationRecord(authTokenParam, authTokenHeader), clock.currentDateTime());
return Response.status(Response.Status.NOT_FOUND).build();
}
}
|
src/main/java/org/cru/godtools/api/packages/PackageResource.java
|
package org.cru.godtools.api.packages;
import com.amazonaws.services.s3.model.S3Object;
import org.ccci.util.time.Clock;
import org.cru.godtools.domain.authentication.AuthorizationRecord;
import org.cru.godtools.domain.authentication.AuthorizationService;
import org.cru.godtools.s3.GodToolsS3Client;
import org.jboss.logging.Logger;
import org.xml.sax.SAXException;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
/**
* Contains RESTful endpoints for delivering GodTools "package" resources.
* - "packages" include translation XML files as well as images.
*
* For more information: https://github.com/CruGlobal/godtools-api/wiki/The-Packages-Endpoint
*
* Created by ryancarlson on 3/14/14.
*/
@Path("/packages")
public class PackageResource
{
@Inject
AuthorizationService authService;
@Inject
GodToolsS3Client godToolsS3Client;
@Inject
Clock clock;
private Logger log = Logger.getLogger(this.getClass());
/**
* GET - get all packages for the language specified by @param languageCode.
*/
@GET
@Path("/{language}")
@Produces({"application/zip", "application/xml"})
public Response getAllPackagesForLanguage(@PathParam("language") String languageCode,
@HeaderParam("interpreter") Integer minimumInterpreterVersionHeader,
@QueryParam("compressed") String compressed,
@HeaderParam("Authorization") String authTokenHeader,
@QueryParam("Authorization") String authTokenParam) throws ParserConfigurationException, SAXException, IOException
{
log.info("Requesting all packages for language: " + languageCode);
AuthorizationRecord.checkAuthorization(authService.getAuthorizationRecord(authTokenParam, authTokenHeader), clock.currentDateTime());
S3Object packagesZippedFolder = godToolsS3Client.getPackagesZippedFolder(languageCode);
return Response
.ok(packagesZippedFolder.getObjectContent())
.type("application/zip")
.build();
}
/**
* GET - get all the package specified by @param packageCode for the language specified by @param languageCode.
*/
@GET
@Path("/{language}/{package}")
@Produces({"application/zip", "application/xml"})
public Response getPackage(@PathParam("language") String languageCode,
@PathParam("package") String packageCode,
@QueryParam("compressed") String compressed,
@HeaderParam("Authorization") String authTokenHeader,
@QueryParam("Authorization") String authTokenParam) throws Exception
{
log.info("Requesting package " + packageCode + " for language: " + languageCode);
AuthorizationRecord.checkAuthorization(authService.getAuthorizationRecord(authTokenParam, authTokenHeader), clock.currentDateTime());
return Response.status(Response.Status.NOT_FOUND).build();
}
}
|
deprecate the package resource
|
src/main/java/org/cru/godtools/api/packages/PackageResource.java
|
deprecate the package resource
|
|
Java
|
mit
|
de2e87f68f0b52436dd76d6fdfda626fc6d6662c
| 0
|
juckele/vivarium,juckele/vivarium,juckele/vivarium
|
package io.vivarium.ga;
import io.vivarium.core.Blueprint;
import io.vivarium.core.Creature;
import io.vivarium.core.EntityType;
import io.vivarium.core.Species;
import io.vivarium.core.World;
import io.vivarium.serialization.SerializationEngine;
public class TimeToExtinctionFF extends SimulationBasedFitnessFunction
{
private int _initialPopulation;
private Blueprint _blueprint;
private double _simulationDuration;
public TimeToExtinctionFF(Blueprint blueprint, int initialPopulation, int simulationDuration)
{
this._blueprint = blueprint;
this._initialPopulation = initialPopulation;
this._simulationDuration = simulationDuration;
}
@Override
public double evaluate(Creature c)
{
// Build world
Blueprint instanceBlueprint = new SerializationEngine().makeCopy(_blueprint);
assert(instanceBlueprint.getSpecies().size() == 1);
Species instanceSpecies = instanceBlueprint.getSpecies().get(0);
instanceSpecies.setMutationRateExponent(Double.NEGATIVE_INFINITY);
Creature instanceCreature = new Creature(instanceSpecies, c);
World w = new World(instanceBlueprint);
for (int i = 0; i < _initialPopulation; i++)
{
w.addImmigrant(new Creature(instanceCreature));
}
// Run simulation
for (int i = 0; i < _simulationDuration; i++)
{
int count = w.getCount(EntityType.CREATURE);
if (count == 0)
{
return i / _simulationDuration;
}
w.tick();
}
return 1.0;
}
}
|
vivarium-ga/src/main/java/io/vivarium/ga/TimeToExtinctionFF.java
|
package io.vivarium.ga;
import io.vivarium.core.Blueprint;
import io.vivarium.core.Creature;
import io.vivarium.core.EntityType;
import io.vivarium.core.Species;
import io.vivarium.core.World;
import io.vivarium.serialization.SerializationEngine;
public class TimeToExtinctionFF extends SimulationBasedFitnessFunction
{
private int _initialPopulation;
private Blueprint _blueprint;
private double _simulationDuration;
public TimeToExtinctionFF(Blueprint blueprint, int initialPopulation, int simulationDuration)
{
this._blueprint = blueprint;
this._initialPopulation = initialPopulation;
this._simulationDuration = simulationDuration;
}
@Override
public double evaluate(Creature c)
{
// Build world
Blueprint instanceBlueprint = (Blueprint) new SerializationEngine().makeCopy(_blueprint);
assert(instanceBlueprint.getSpecies().size() == 1);
Species instanceSpecies = instanceBlueprint.getSpecies().get(0);
instanceSpecies.setMutationRateExponent(Double.NEGATIVE_INFINITY);
Creature instanceCreature = new Creature(instanceSpecies, c);
World w = new World(instanceBlueprint);
for (int i = 0; i < _initialPopulation; i++)
{
w.addImmigrant(new Creature(instanceCreature));
}
// Run simulation
for (int i = 0; i < _simulationDuration; i++)
{
int count = w.getCount(EntityType.CREATURE);
if (count == 0)
{
return i / _simulationDuration;
}
w.tick();
}
return 1.0;
}
}
|
Removed redundant cast
|
vivarium-ga/src/main/java/io/vivarium/ga/TimeToExtinctionFF.java
|
Removed redundant cast
|
|
Java
|
mit
|
225b89d3f0daf3dbc5fb32527ea583bffb2be3c4
| 0
|
viromedia/viro,viromedia/viro,viromedia/viro,viromedia/viro
|
/**
* Copyright © 2016 Viro Media. All rights reserved.
*/
package com.viromedia.bridge.component;
import android.graphics.Bitmap;
import android.os.Handler;
import android.os.Looper;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.viro.renderer.jni.ImageJni;
import com.viro.renderer.jni.TextureFormat;
import com.viro.renderer.jni.TextureJni;
import com.viromedia.bridge.component.node.Scene;
import com.viromedia.bridge.utility.ImageDownloadListener;
import com.viromedia.bridge.utility.ImageDownloader;
import com.viromedia.bridge.utility.ViroEvents;
public class Image360 extends Component {
private static final float[] sDefaultRotation = {0, 0, 0};
private ReadableMap mSourceMap;
private float[] mRotation = sDefaultRotation;
private ImageJni mLatestImage;
private TextureJni mLatestTexture;
private TextureFormat mFormat = TextureFormat.RGBA8;
private Handler mMainHandler;
private boolean mImageNeedsDownload;
public Image360(ReactApplicationContext context) {
super(context);
mMainHandler = new Handler(Looper.getMainLooper());
mImageNeedsDownload = false;
}
public void setSource(ReadableMap source) {
mSourceMap = source;
mImageNeedsDownload = true;
}
public void setRotation(ReadableArray rotation) {
if (rotation == null) {
mRotation = sDefaultRotation;
} else {
float[] rotationArr = {(float) rotation.getDouble(0),
(float) rotation.getDouble(1), (float) rotation.getDouble(2)};
mRotation = rotationArr;
}
if (mScene != null) {
mScene.setBackgroundRotation(mRotation);
}
}
@Override
public void onPropsSet() {
super.onPropsSet();
if (!mImageNeedsDownload || mSourceMap == null) {
return;
}
ImageDownloader downloader = new ImageDownloader(getContext());
downloader.setTextureFormat(mFormat);
imageDownloadDidStart();
downloader.getImageAsync(mSourceMap, new ImageDownloadListener() {
@Override
public void completed(final Bitmap result) {
mMainHandler.post(new Runnable() {
public void run() {
if (mLatestImage != null) {
mLatestImage.destroy();
}
if (mLatestTexture != null) {
mLatestTexture.destroy();
}
mLatestImage = new ImageJni(result, mFormat);
mLatestTexture = new TextureJni(mLatestImage, mFormat, false);
if (mScene != null) {
mScene.setBackgroundImageTexture(mLatestTexture);
mScene.setBackgroundRotation(mRotation);
}
imageDownloadDidFinish();
}
});
}
});
mImageNeedsDownload = false;
}
@Override
public void onTearDown() {
if (mLatestImage != null) {
mLatestImage.destroy();
mLatestImage = null;
}
if (mLatestTexture != null) {
mLatestTexture.destroy();
mLatestTexture = null;
}
}
@Override
public void setScene(Scene scene) {
super.setScene(scene);
if (mLatestTexture != null) {
mScene.setBackgroundImageTexture(mLatestTexture);
mScene.setBackgroundRotation(mRotation);
}
}
public void setFormat(String format) {
mFormat = TextureFormat.forString(format);
mImageNeedsDownload = true;
}
private void imageDownloadDidStart() {
mReactContext.getJSModule(RCTEventEmitter.class).receiveEvent(
getId(),
ViroEvents.ON_LOAD_START,
null
);
}
private void imageDownloadDidFinish() {
mReactContext.getJSModule(RCTEventEmitter.class).receiveEvent(
getId(),
ViroEvents.ON_LOAD_END,
null
);
}
}
|
android/viro_bridge/src/main/java/com/viromedia/bridge/component/Image360.java
|
/**
* Copyright © 2016 Viro Media. All rights reserved.
*/
package com.viromedia.bridge.component;
import android.graphics.Bitmap;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.viro.renderer.jni.ImageJni;
import com.viro.renderer.jni.TextureFormat;
import com.viro.renderer.jni.TextureJni;
import com.viromedia.bridge.component.node.Scene;
import com.viromedia.bridge.utility.ImageDownloadListener;
import com.viromedia.bridge.utility.ImageDownloader;
import com.viromedia.bridge.utility.ViroEvents;
public class Image360 extends Component {
private static final float[] sDefaultRotation = {0, 0, 0};
private ReadableMap mSourceMap;
private float[] mRotation = sDefaultRotation;
private ImageJni mLatestImage;
private TextureJni mLatestTexture;
private TextureFormat mFormat = TextureFormat.RGBA8;
public Image360(ReactApplicationContext context) {
super(context);
}
public void setSource(ReadableMap source) {
mSourceMap = source;
}
public void setRotation(ReadableArray rotation) {
if (rotation == null) {
mRotation = sDefaultRotation;
} else {
float[] rotationArr = {(float) rotation.getDouble(0),
(float) rotation.getDouble(1), (float) rotation.getDouble(2)};
mRotation = rotationArr;
}
}
@Override
public void onPropsSet() {
super.onPropsSet();
ImageDownloader downloader = new ImageDownloader(getContext());
downloader.setTextureFormat(mFormat);
if (mSourceMap != null) {
imageDownloadDidStart();
downloader.getImageAsync(mSourceMap, new ImageDownloadListener() {
@Override
public void completed(Bitmap result) {
if (mLatestImage != null) {
mLatestImage.destroy();
}
if (mLatestTexture != null) {
mLatestTexture.destroy();
}
mLatestImage = new ImageJni(result, mFormat);
mLatestTexture = new TextureJni(mLatestImage, mFormat, false);
if (mScene != null) {
mScene.setBackgroundImageTexture(mLatestTexture);
mScene.setBackgroundRotation(mRotation);
}
imageDownloadDidFinish();
}
});
}
}
@Override
public void onTearDown() {
if (mLatestImage != null) {
mLatestImage.destroy();
mLatestImage = null;
}
if (mLatestTexture != null) {
mLatestTexture.destroy();
mLatestTexture = null;
}
}
@Override
public void setScene(Scene scene) {
super.setScene(scene);
if (mLatestTexture != null) {
mScene.setBackgroundImageTexture(mLatestTexture);
mScene.setBackgroundRotation(mRotation);
}
}
public void setFormat(String format) {
mFormat = TextureFormat.forString(format);
}
private void imageDownloadDidStart() {
mReactContext.getJSModule(RCTEventEmitter.class).receiveEvent(
getId(),
ViroEvents.ON_LOAD_START,
null
);
}
private void imageDownloadDidFinish() {
mReactContext.getJSModule(RCTEventEmitter.class).receiveEvent(
getId(),
ViroEvents.ON_LOAD_END,
null
);
}
}
|
VIRO-949: threading crash in Image360
The textures were being updated in the async
background thread. Also optimized to not download
when just rotation changes.
Former-commit-id: e84146520c2f648b5d74847143841db5ac6d7359
|
android/viro_bridge/src/main/java/com/viromedia/bridge/component/Image360.java
|
VIRO-949: threading crash in Image360
|
|
Java
|
lgpl-2.1
|
4b0818122a52c2059b6c6947a99f0db41233b10f
| 0
|
concord-consortium/datagraph
|
/*
* Copyright (C) 2004 The Concord Consortium, Inc.,
* 10 Concord Crossing, Concord, MA 01741
*
* Web Site: http://www.concord.org
* Email: info@concord.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
/*
* Created on Mar 8, 2005
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package org.concord.datagraph.state;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Insets;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.EventObject;
import java.util.Vector;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.JToggleButton;
import javax.swing.SwingUtilities;
import javax.swing.text.ComponentView;
import org.concord.data.state.OTDataStore;
import org.concord.data.ui.DataFlowControlAction;
import org.concord.data.ui.DataFlowControlButton;
import org.concord.data.ui.DataFlowControlToolBar;
import org.concord.data.ui.DataStoreLabel;
import org.concord.data.ui.DataValueLabel;
import org.concord.datagraph.engine.ControllableDataGraphable;
import org.concord.datagraph.engine.DataGraphable;
import org.concord.datagraph.ui.DataGraph;
import org.concord.datagraph.ui.DrawDataGraphableAction;
import org.concord.datagraph.ui.SingleDataAxisGrid;
import org.concord.framework.data.stream.DataProducer;
import org.concord.framework.data.stream.ProducerDataStore;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.framework.otrunk.view.OTObjectView;
import org.concord.framework.otrunk.view.OTViewContainer;
import org.concord.graph.engine.GraphableList;
import org.concord.graph.engine.SelectableList;
import org.concord.graph.event.GraphableListListener;
import org.concord.graph.examples.GraphWindowToolBar;
import org.concord.graph.ui.Grid2D;
import org.concord.graph.ui.SingleAxisGrid;
import org.concord.graph.util.ui.ResourceLoader;
import org.concord.swing.SelectableToggleButton;
/**
* @author scott
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class OTDataCollectorView
implements OTObjectView, GraphableListListener
{
OTDataCollector dataCollector;
protected OTViewContainer viewContainer;
DataGraph dataGraph;
SelectableList notesLayer;
DataGraphable sourceGraphable;
DataProducer sourceProducer;
DataValueLabel valueLabel;
OTDataStore dataStore = null;
JDialog dialog;
OTDataAxis xOTAxis;
OTDataAxis yOTAxis;
public OTDataCollectorView(OTDataCollector collector, OTViewContainer container)
{
dataCollector = collector;
viewContainer = container;
}
/* (non-Javadoc)
* @see org.concord.framework.otrunk.view.OTObjectView#getComponent(boolean)
*/
public JComponent getComponent(boolean editable)
{
dataGraph = new DataGraph();
dataGraph.changeToDataGraphToolbar();
dataGraph.setAutoFitMode(DataGraph.AUTO_SCROLL_RUNNING_MODE);
xOTAxis = dataCollector.getXDataAxis();
yOTAxis = dataCollector.getYDataAxis();
OTObjectList pfGraphables = dataCollector.getGraphables();
DataFlowControlToolBar toolBar = null;
dataGraph.setLimitsAxisWorld(xOTAxis.getMin(), xOTAxis.getMax(),
yOTAxis.getMin(), yOTAxis.getMax());
GraphWindowToolBar gwToolbar = dataGraph.getToolBar();
if(gwToolbar != null) {
// FIXME
gwToolbar.setVisible(editable);
}
Grid2D grid = dataGraph.getGrid();
SingleDataAxisGrid sXAxis = (SingleDataAxisGrid)grid.getXGrid();
DataGraphStateManager.setupAxisLabel(sXAxis, xOTAxis);
SingleDataAxisGrid sYAxis = (SingleDataAxisGrid)grid.getYGrid();
DataGraphStateManager.setupAxisLabel(sYAxis, yOTAxis);
Vector realGraphables = new Vector();
// for each list item get the data producer object
// add it to the data graph
for(int i=0; i<pfGraphables.size(); i++) {
OTDataGraphable otGraphable = (OTDataGraphable)pfGraphables.get(i);
OTDataStore dataStore = (OTDataStore)otGraphable.getDataStore();
// dProducer.getDataDescription().setDt(0.1f);
DataGraphable realGraphable = null;
if(dataStore == null) {
System.err.println("Trying to display graphable with out a data store");
continue;
}
realGraphable = dataGraph.createDataGraphable(dataStore,
otGraphable.getXColumn(),
otGraphable.getYColumn());
// realGraphable.addGraphableListener(this);
realGraphable.setColor(new Color(otGraphable.getColor()));
realGraphable.setConnectPoints(otGraphable.getConnectPoints());
realGraphable.setShowCrossPoint(otGraphable.getDrawMarks());
realGraphable.setLabel(otGraphable.getName());
realGraphables.add(realGraphable);
dataGraph.addBackgroundDataGraphable(realGraphable);
}
OTDataGraphable source = dataCollector.getSource();
if(source != null) {
sourceProducer = (DataProducer)source.getDataProducer();
dataStore = (OTDataStore)source.getDataStore();
String title = dataCollector.getTitle();
if(title == null) {
title = source.getName();
}
if(title != null) {
dataGraph.setTitle(title);
}
// dProducer.getDataDescription().setDt(0.1f);
if(source.getControllable()) {
sourceGraphable = new ControllableDataGraphable();
sourceGraphable.setDataStore(dataStore,
source.getXColumn(),
source.getYColumn());
// TODO need to add the sketch components here
JPanel bottomPanel = new JPanel(new FlowLayout());
JButton clearButton = new JButton("Clear");
clearButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
dataGraph.reset();
}
});
DrawDataGraphableAction a = new DrawDataGraphableAction();
a.setDataGraphable((ControllableDataGraphable)sourceGraphable);
gwToolbar.addButton(new SelectableToggleButton(a), "Draw a function");
bottomPanel.add(clearButton);
dataGraph.add(bottomPanel, BorderLayout.SOUTH);
} else if(sourceProducer != null) {
// need to set the data store to be the data store for this
// graphable
if(dataStore != null && !dataCollector.getSingleValue()){
dataStore.setDataProducer(sourceProducer);
sourceGraphable = dataGraph.createDataGraphable(dataStore,
source.getXColumn(),
source.getYColumn());
} else {
sourceGraphable = dataGraph.createDataGraphable(sourceProducer,
source.getXColumn(),
source.getYColumn());
}
JPanel bottomPanel = new JPanel(new FlowLayout());
valueLabel = new DataValueLabel(sourceProducer);
valueLabel.setColumns(4);
bottomPanel.add(valueLabel);
if(!dataCollector.getSingleValue()) {
toolBar = createFlowToolBar();
bottomPanel.add(toolBar);
toolBar.addDataFlowObject((DataProducer)sourceProducer);
} else {
JButton record = new JButton("Record");
record.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
sourceProducer.stop();
float currentValue = valueLabel.getValue();
int lastSample = dataStore.getTotalNumSamples();
dataStore.setValueAt(lastSample, 0, new Float(currentValue));
dataGraph.reset();
dialog.hide();
}
});
JButton cancel = new JButton("Cancel");
cancel.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
sourceProducer.stop();
dataGraph.reset();
dialog.hide();
}
});
bottomPanel.add(record);
bottomPanel.add(cancel);
}
dataGraph.add(bottomPanel, BorderLayout.SOUTH);
}
if(sourceGraphable != null) {
sourceGraphable.setColor(new Color(source.getColor()));
sourceGraphable.setConnectPoints(source.getConnectPoints());
sourceGraphable.setShowCrossPoint(source.getDrawMarks());
sourceGraphable.setLabel(source.getName());
realGraphables.insertElementAt(sourceGraphable, 0);
dataGraph.addDataGraphable(sourceGraphable);
}
}
if(realGraphables.size() > 1) {
DataGraphableTree dTree = new DataGraphableTree();
// add legend to the left
for(int i=0; i<realGraphables.size(); i++){
dTree.addGraphable((DataGraphable)realGraphables.get(i));
}
dataGraph.add(dTree, BorderLayout.WEST);
}
GraphableList graphableList = dataGraph.getObjList();
graphableList.addGraphableListListener(this);
/*
JPanel graphWrapper = new JPanel(){
public void removeNotify()
{
System.err.println("got remove notify");
// FIXME need to only reset the sourceGraphable
// dataGraph.reset();
}
};
graphWrapper.setLayout(new BorderLayout());
graphWrapper.add(dataGraph, BorderLayout.CENTER);
return graphWrapper;
*/
if(dataCollector.getSingleValue()){
DataStoreLabel dataLabel = new DataStoreLabel(dataStore, 0);
if(!editable) return dataLabel;
JPanel svPanel = new JPanel(new FlowLayout());
dataLabel.setColumns(4);
svPanel.add(dataLabel);
JButton cDataButton = new JButton();
ImageIcon icon = ResourceLoader.getImageIcon("data_graph_button.gif", "Collect Data");
cDataButton.setIcon(icon);
cDataButton.setToolTipText(icon.getDescription());
cDataButton.setMargin(new Insets(2,2,2,2));
cDataButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
boolean needPack = false;
if(dialog == null) {
dialog = new JDialog();
dialog.setSize(400,400);
needPack = true;
}
dialog.getContentPane().setLayout(new BorderLayout());
dialog.getContentPane().removeAll();
dialog.getContentPane().add(dataGraph, BorderLayout.CENTER);
/*
if(needPack){
dialog.pack();
}
*/
dialog.show();
sourceProducer.start();
dataGraph.start();
}
});
svPanel.add(cDataButton);
return svPanel;
}
dataGraph.setPreferredSize(new Dimension(400,320));
return dataGraph;
}
public DataFlowControlToolBar createFlowToolBar()
{
DataFlowControlToolBar toolbar =
new DataFlowControlToolBar(false);
DataFlowControlButton b = null;
b = new DataFlowControlButton(DataFlowControlAction.FLOW_CONTROL_START);
toolbar.add(b);
b = new DataFlowControlButton(DataFlowControlAction.FLOW_CONTROL_STOP);
toolbar.add(b);
b = new DataFlowControlButton(DataFlowControlAction.FLOW_CONTROL_RESET);
b.setText("Clear");
toolbar.add(b);
toolbar.addDataFlowObject(dataGraph);
return toolbar;
}
/**
* This only works for graphables that came from a loaded
* pfgraphables. It doesn't yet handel cases where new
* graphables are created by some external thing
*
*/
public void updateState()
{
Grid2D grid = dataGraph.getGrid();
xOTAxis.setMin((float)dataGraph.getMinXAxisWorld());
xOTAxis.setMax((float)dataGraph.getMaxXAxisWorld());
yOTAxis.setMin((float)dataGraph.getMinYAxisWorld());
yOTAxis.setMax((float)dataGraph.getMaxYAxisWorld());
SingleAxisGrid sXAxis = grid.getXGrid();
if(sXAxis.getAxisLabel() != null){
xOTAxis.setLabel(sXAxis.getAxisLabel());
}
SingleAxisGrid sYAxis = grid.getYGrid();
if(sYAxis.getAxisLabel() != null){
yOTAxis.setLabel(sYAxis.getAxisLabel());
}
OTDataGraphable source = dataCollector.getSource();
Color c = sourceGraphable.getColor();
source.setColor(c.getRGB());
source.setConnectPoints(sourceGraphable.isConnectPoints());
source.setDrawMarks(sourceGraphable.isShowCrossPoint());
source.setXColumn(sourceGraphable.getChannelX());
source.setYColumn(sourceGraphable.getChannelY());
}
/* (non-Javadoc)
* @see org.concord.graph.event.GraphableListListener#listGraphableAdded(java.util.EventObject)
*/
public void listGraphableAdded(EventObject e)
{
}
/* (non-Javadoc)
* @see org.concord.graph.event.GraphableListListener#listGraphableChanged(java.util.EventObject)
*/
public void listGraphableChanged(EventObject e)
{
updateState();
}
/* (non-Javadoc)
* @see org.concord.graph.event.GraphableListListener#listGraphableRemoved(java.util.EventObject)
*/
public void listGraphableRemoved(EventObject e)
{
}
}
|
src/org/concord/datagraph/state/OTDataCollectorView.java
|
/*
* Copyright (C) 2004 The Concord Consortium, Inc.,
* 10 Concord Crossing, Concord, MA 01741
*
* Web Site: http://www.concord.org
* Email: info@concord.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
/*
* Created on Mar 8, 2005
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package org.concord.datagraph.state;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Insets;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.EventObject;
import java.util.Vector;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JPanel;
import javax.swing.JToggleButton;
import javax.swing.SwingUtilities;
import javax.swing.text.ComponentView;
import org.concord.data.state.OTDataStore;
import org.concord.data.ui.DataFlowControlAction;
import org.concord.data.ui.DataFlowControlButton;
import org.concord.data.ui.DataFlowControlToolBar;
import org.concord.data.ui.DataStoreLabel;
import org.concord.data.ui.DataValueLabel;
import org.concord.datagraph.engine.ControllableDataGraphable;
import org.concord.datagraph.engine.DataGraphable;
import org.concord.datagraph.ui.DataGraph;
import org.concord.datagraph.ui.DrawDataGraphableAction;
import org.concord.datagraph.ui.SingleDataAxisGrid;
import org.concord.framework.data.stream.DataProducer;
import org.concord.framework.data.stream.ProducerDataStore;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.framework.otrunk.view.OTObjectView;
import org.concord.framework.otrunk.view.OTViewContainer;
import org.concord.graph.engine.GraphableList;
import org.concord.graph.engine.SelectableList;
import org.concord.graph.event.GraphableListListener;
import org.concord.graph.examples.GraphWindowToolBar;
import org.concord.graph.ui.Grid2D;
import org.concord.graph.ui.SingleAxisGrid;
import org.concord.graph.util.ui.ResourceLoader;
import org.concord.swing.SelectableToggleButton;
/**
* @author scott
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class OTDataCollectorView
implements OTObjectView, GraphableListListener
{
OTDataCollector dataCollector;
protected OTViewContainer viewContainer;
DataGraph dataGraph;
SelectableList notesLayer;
DataGraphable sourceGraphable;
DataProducer sourceProducer;
DataValueLabel valueLabel;
OTDataStore dataStore = null;
JDialog dialog;
OTDataAxis xOTAxis;
OTDataAxis yOTAxis;
public OTDataCollectorView(OTDataCollector collector, OTViewContainer container)
{
dataCollector = collector;
viewContainer = container;
}
/* (non-Javadoc)
* @see org.concord.framework.otrunk.view.OTObjectView#getComponent(boolean)
*/
public JComponent getComponent(boolean editable)
{
dataGraph = new DataGraph();
dataGraph.changeToDataGraphToolbar();
dataGraph.setAutoFitMode(DataGraph.AUTO_SCROLL_RUNNING_MODE);
xOTAxis = dataCollector.getXDataAxis();
yOTAxis = dataCollector.getYDataAxis();
OTObjectList pfGraphables = dataCollector.getGraphables();
DataFlowControlToolBar toolBar = null;
dataGraph.setLimitsAxisWorld(xOTAxis.getMin(), xOTAxis.getMax(),
yOTAxis.getMin(), yOTAxis.getMax());
GraphWindowToolBar gwToolbar = dataGraph.getToolBar();
if(gwToolbar != null) {
// FIXME
gwToolbar.setVisible(editable);
}
Grid2D grid = dataGraph.getGrid();
SingleDataAxisGrid sXAxis = (SingleDataAxisGrid)grid.getXGrid();
DataGraphStateManager.setupAxisLabel(sXAxis, xOTAxis);
SingleDataAxisGrid sYAxis = (SingleDataAxisGrid)grid.getYGrid();
DataGraphStateManager.setupAxisLabel(sYAxis, yOTAxis);
Vector realGraphables = new Vector();
// for each list item get the data producer object
// add it to the data graph
for(int i=0; i<pfGraphables.size(); i++) {
OTDataGraphable otGraphable = (OTDataGraphable)pfGraphables.get(i);
OTDataStore dataStore = (OTDataStore)otGraphable.getDataStore();
// dProducer.getDataDescription().setDt(0.1f);
DataGraphable realGraphable = null;
if(dataStore == null) {
System.err.println("Trying to display graphable with out a data store");
continue;
}
realGraphable = dataGraph.createDataGraphable(dataStore,
otGraphable.getXColumn(),
otGraphable.getYColumn());
// realGraphable.addGraphableListener(this);
realGraphable.setColor(new Color(otGraphable.getColor()));
realGraphable.setConnectPoints(otGraphable.getConnectPoints());
realGraphable.setShowCrossPoint(otGraphable.getDrawMarks());
realGraphable.setLabel(otGraphable.getName());
realGraphables.add(realGraphable);
dataGraph.addBackgroundDataGraphable(realGraphable);
}
OTDataGraphable source = dataCollector.getSource();
if(source != null) {
sourceProducer = (DataProducer)source.getDataProducer();
dataStore = (OTDataStore)source.getDataStore();
String title = dataCollector.getTitle();
if(title == null) {
title = source.getName();
}
if(title != null) {
dataGraph.setTitle(title);
}
// dProducer.getDataDescription().setDt(0.1f);
if(source.getControllable()) {
sourceGraphable = new ControllableDataGraphable();
sourceGraphable.setDataStore(dataStore,
source.getXColumn(),
source.getYColumn());
// TODO need to add the sketch components here
JPanel bottomPanel = new JPanel(new FlowLayout());
JButton clearButton = new JButton("Clear");
clearButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
dataGraph.reset();
}
});
DrawDataGraphableAction a = new DrawDataGraphableAction();
a.setDataGraphable((ControllableDataGraphable)sourceGraphable);
gwToolbar.addButton(new SelectableToggleButton(a), "Draw a function");
bottomPanel.add(clearButton);
dataGraph.add(bottomPanel, BorderLayout.SOUTH);
} else if(sourceProducer != null) {
// need to set the data store to be the data store for this
// graphable
if(dataStore != null && !dataCollector.getSingleValue()){
dataStore.setDataProducer(sourceProducer);
sourceGraphable = dataGraph.createDataGraphable(dataStore,
source.getXColumn(),
source.getYColumn());
} else {
sourceGraphable = dataGraph.createDataGraphable(sourceProducer,
source.getXColumn(),
source.getYColumn());
}
JPanel bottomPanel = new JPanel(new FlowLayout());
valueLabel = new DataValueLabel(sourceProducer);
bottomPanel.add(valueLabel);
if(!dataCollector.getSingleValue()) {
toolBar = createFlowToolBar();
bottomPanel.add(toolBar);
toolBar.addDataFlowObject((DataProducer)sourceProducer);
} else {
JButton record = new JButton("Record");
record.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
sourceProducer.stop();
float currentValue = valueLabel.getValue();
int lastSample = dataStore.getTotalNumSamples();
dataStore.setValueAt(lastSample, 0, new Float(currentValue));
dataGraph.reset();
dialog.hide();
}
});
JButton cancel = new JButton("Cancel");
cancel.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
sourceProducer.stop();
dataGraph.reset();
dialog.hide();
}
});
bottomPanel.add(record);
bottomPanel.add(cancel);
}
dataGraph.add(bottomPanel, BorderLayout.SOUTH);
}
if(sourceGraphable != null) {
sourceGraphable.setColor(new Color(source.getColor()));
sourceGraphable.setConnectPoints(source.getConnectPoints());
sourceGraphable.setShowCrossPoint(source.getDrawMarks());
sourceGraphable.setLabel(source.getName());
realGraphables.insertElementAt(sourceGraphable, 0);
dataGraph.addDataGraphable(sourceGraphable);
}
}
if(realGraphables.size() > 1) {
DataGraphableTree dTree = new DataGraphableTree();
// add legend to the left
for(int i=0; i<realGraphables.size(); i++){
dTree.addGraphable((DataGraphable)realGraphables.get(i));
}
dataGraph.add(dTree, BorderLayout.WEST);
}
GraphableList graphableList = dataGraph.getObjList();
graphableList.addGraphableListListener(this);
/*
JPanel graphWrapper = new JPanel(){
public void removeNotify()
{
System.err.println("got remove notify");
// FIXME need to only reset the sourceGraphable
// dataGraph.reset();
}
};
graphWrapper.setLayout(new BorderLayout());
graphWrapper.add(dataGraph, BorderLayout.CENTER);
return graphWrapper;
*/
if(dataCollector.getSingleValue()){
DataStoreLabel dataLabel = new DataStoreLabel(dataStore, 0);
if(!editable) return dataLabel;
JPanel svPanel = new JPanel(new FlowLayout());
svPanel.add(dataLabel);
JButton cDataButton = new JButton();
cDataButton.setIcon(ResourceLoader.getImageIcon("data_graph_button.gif", "Collect Data"));
cDataButton.setMargin(new Insets(2,2,2,2));
cDataButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent event)
{
boolean needPack = false;
if(dialog == null) {
dialog = new JDialog();
dialog.setSize(400,400);
needPack = true;
}
dialog.getContentPane().setLayout(new BorderLayout());
dialog.getContentPane().removeAll();
dialog.getContentPane().add(dataGraph, BorderLayout.CENTER);
/*
if(needPack){
dialog.pack();
}
*/
dialog.show();
sourceProducer.start();
dataGraph.start();
}
});
svPanel.add(cDataButton);
return svPanel;
}
dataGraph.setPreferredSize(new Dimension(400,320));
return dataGraph;
}
public DataFlowControlToolBar createFlowToolBar()
{
DataFlowControlToolBar toolbar =
new DataFlowControlToolBar(false);
DataFlowControlButton b = null;
b = new DataFlowControlButton(DataFlowControlAction.FLOW_CONTROL_START);
toolbar.add(b);
b = new DataFlowControlButton(DataFlowControlAction.FLOW_CONTROL_STOP);
toolbar.add(b);
b = new DataFlowControlButton(DataFlowControlAction.FLOW_CONTROL_RESET);
b.setText("Clear");
toolbar.add(b);
toolbar.addDataFlowObject(dataGraph);
return toolbar;
}
/**
* This only works for graphables that came from a loaded
* pfgraphables. It doesn't yet handel cases where new
* graphables are created by some external thing
*
*/
public void updateState()
{
Grid2D grid = dataGraph.getGrid();
xOTAxis.setMin((float)dataGraph.getMinXAxisWorld());
xOTAxis.setMax((float)dataGraph.getMaxXAxisWorld());
yOTAxis.setMin((float)dataGraph.getMinYAxisWorld());
yOTAxis.setMax((float)dataGraph.getMaxYAxisWorld());
SingleAxisGrid sXAxis = grid.getXGrid();
if(sXAxis.getAxisLabel() != null){
xOTAxis.setLabel(sXAxis.getAxisLabel());
}
SingleAxisGrid sYAxis = grid.getYGrid();
if(sYAxis.getAxisLabel() != null){
yOTAxis.setLabel(sYAxis.getAxisLabel());
}
OTDataGraphable source = dataCollector.getSource();
Color c = sourceGraphable.getColor();
source.setColor(c.getRGB());
source.setConnectPoints(sourceGraphable.isConnectPoints());
source.setDrawMarks(sourceGraphable.isShowCrossPoint());
source.setXColumn(sourceGraphable.getChannelX());
source.setYColumn(sourceGraphable.getChannelY());
}
/* (non-Javadoc)
* @see org.concord.graph.event.GraphableListListener#listGraphableAdded(java.util.EventObject)
*/
public void listGraphableAdded(EventObject e)
{
}
/* (non-Javadoc)
* @see org.concord.graph.event.GraphableListListener#listGraphableChanged(java.util.EventObject)
*/
public void listGraphableChanged(EventObject e)
{
updateState();
}
/* (non-Javadoc)
* @see org.concord.graph.event.GraphableListListener#listGraphableRemoved(java.util.EventObject)
*/
public void listGraphableRemoved(EventObject e)
{
}
}
|
Added tooltip to collect data button and set collumn width on data label
git-svn-id: a0d2519504059b70a86a1ce51b726c2279190bad@3623 6e01202a-0783-4428-890a-84243c50cc2b
|
src/org/concord/datagraph/state/OTDataCollectorView.java
|
Added tooltip to collect data button and set collumn width on data label
|
|
Java
|
lgpl-2.1
|
4f55381b49bb8b52ec70c69778085830ff96cd75
| 0
|
cfallin/soot,cfallin/soot,mbenz89/soot,plast-lab/soot,mbenz89/soot,mbenz89/soot,anddann/soot,xph906/SootNew,xph906/SootNew,plast-lab/soot,xph906/SootNew,anddann/soot,cfallin/soot,anddann/soot,xph906/SootNew,plast-lab/soot,mbenz89/soot,cfallin/soot,anddann/soot
|
/* Soot - a J*va Optimization Framework
* Copyright (C) 2003 Ondrej Lhotak
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
package soot.jimple.spark.pag;
import java.util.HashSet;
import java.util.Set;
import soot.ArrayType;
import soot.Body;
import soot.Context;
import soot.EntryPoints;
import soot.G;
import soot.RefLikeType;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.Type;
import soot.Unit;
import soot.VoidType;
import soot.jimple.Stmt;
import soot.jimple.spark.builder.MethodNodeFactory;
import soot.util.NumberedString;
import soot.util.SingletonList;
import soot.util.queue.ChunkedQueue;
import soot.util.queue.QueueReader;
/** Part of a pointer assignment graph for a single method.
* @author Ondrej Lhotak
*/
public final class MethodPAG {
private PAG pag;
public PAG pag() { return pag; }
protected MethodPAG( PAG pag, SootMethod m ) {
this.pag = pag;
this.method = m;
this.nodeFactory = new MethodNodeFactory( pag, this );
}
private Set<Context> addedContexts;
/** Adds this method to the main PAG, with all VarNodes parameterized by
* varNodeParameter. */
public void addToPAG( Context varNodeParameter ) {
if( !hasBeenBuilt ) throw new RuntimeException();
if( varNodeParameter == null ) {
if( hasBeenAdded ) return;
hasBeenAdded = true;
} else {
if( addedContexts == null ) addedContexts = new HashSet<Context>();
if( !addedContexts.add( varNodeParameter ) ) return;
}
QueueReader<Node> reader = internalReader.clone();
while(reader.hasNext()) {
Node src = (Node) reader.next();
src = parameterize( src, varNodeParameter );
Node dst = (Node) reader.next();
dst = parameterize( dst, varNodeParameter );
pag.addEdge( src, dst );
}
reader = inReader.clone();
while(reader.hasNext()) {
Node src = (Node) reader.next();
Node dst = (Node) reader.next();
dst = parameterize( dst, varNodeParameter );
pag.addEdge( src, dst );
}
reader = outReader.clone();
while(reader.hasNext()) {
Node src = (Node) reader.next();
src = parameterize( src, varNodeParameter );
Node dst = (Node) reader.next();
pag.addEdge( src, dst );
}
}
public void addInternalEdge( Node src, Node dst ) {
if( src == null ) return;
internalEdges.add( src );
internalEdges.add( dst );
if (hasBeenAdded) {
pag.addEdge(src, dst);
}
}
public void addInEdge( Node src, Node dst ) {
if( src == null ) return;
inEdges.add( src );
inEdges.add( dst );
if (hasBeenAdded) {
pag.addEdge(src, dst);
}
}
public void addOutEdge( Node src, Node dst ) {
if( src == null ) return;
outEdges.add( src );
outEdges.add( dst );
if (hasBeenAdded) {
pag.addEdge(src, dst);
}
}
private final ChunkedQueue<Node> internalEdges = new ChunkedQueue<Node>();
private final ChunkedQueue<Node> inEdges = new ChunkedQueue<Node>();
private final ChunkedQueue<Node> outEdges = new ChunkedQueue<Node>();
private final QueueReader<Node> internalReader = internalEdges.reader();
private final QueueReader<Node> inReader = inEdges.reader();
private final QueueReader<Node> outReader = outEdges.reader();
SootMethod method;
public SootMethod getMethod() { return method; }
protected MethodNodeFactory nodeFactory;
public MethodNodeFactory nodeFactory() { return nodeFactory; }
public static MethodPAG v( PAG pag, SootMethod m ) {
MethodPAG ret = G.v().MethodPAG_methodToPag.get( m );
if( ret == null ) {
ret = new MethodPAG( pag, m );
G.v().MethodPAG_methodToPag.put( m, ret );
}
return ret;
}
public void build() {
if( hasBeenBuilt ) return;
hasBeenBuilt = true;
if( method.isNative() ) {
if( pag().getOpts().simulate_natives() ) {
buildNative();
}
} else {
if( method.isConcrete() && !method.isPhantom() ) {
buildNormal();
}
}
addMiscEdges();
}
protected VarNode parameterize( LocalVarNode vn, Context varNodeParameter ) {
SootMethod m = vn.getMethod();
if( m != method && m != null ) throw new RuntimeException( "VarNode "+vn+" with method "+m+" parameterized in method "+method );
//System.out.println( "parameterizing "+vn+" with "+varNodeParameter );
return pag().makeContextVarNode( vn, varNodeParameter );
}
protected FieldRefNode parameterize( FieldRefNode frn, Context varNodeParameter ) {
return pag().makeFieldRefNode(
(VarNode) parameterize( frn.getBase(), varNodeParameter ),
frn.getField() );
}
public Node parameterize( Node n, Context varNodeParameter ) {
if( varNodeParameter == null ) return n;
if( n instanceof LocalVarNode )
return parameterize( (LocalVarNode) n, varNodeParameter);
if( n instanceof FieldRefNode )
return parameterize( (FieldRefNode) n, varNodeParameter);
return n;
}
protected boolean hasBeenAdded = false;
protected boolean hasBeenBuilt = false;
protected void buildNormal() {
Body b = method.retrieveActiveBody();
for (Unit u : b.getUnits())
nodeFactory.handleStmt( (Stmt) u );
}
protected void buildNative() {
ValNode thisNode = null;
ValNode retNode = null;
if( !method.isStatic() ) {
thisNode = (ValNode) nodeFactory.caseThis();
}
if( method.getReturnType() instanceof RefLikeType ) {
retNode = (ValNode) nodeFactory.caseRet();
}
ValNode[] args = new ValNode[ method.getParameterCount() ];
for( int i = 0; i < method.getParameterCount(); i++ ) {
if( !( method.getParameterType(i) instanceof RefLikeType ) ) continue;
args[i] = (ValNode) nodeFactory.caseParm(i);
}
pag.nativeMethodDriver.process( method, thisNode, retNode, args );
}
private final static String mainSubSignature =
SootMethod.getSubSignature( "main", new SingletonList<Type>
( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v() );
protected void addMiscEdges() {
// Add node for parameter (String[]) in main method
final String signature = method.getSignature();
if( method.getSubSignature().equals( mainSubSignature )) {
addInEdge( pag().nodeFactory().caseArgv(), nodeFactory.caseParm(0) );
} else
if(signature.equals(
"<java.lang.Thread: void <init>(java.lang.ThreadGroup,java.lang.String)>" ) ) {
addInEdge( pag().nodeFactory().caseMainThread(), nodeFactory.caseThis() );
addInEdge( pag().nodeFactory().caseMainThreadGroup(), nodeFactory.caseParm( 0 ) );
} else
if (signature.equals(
"<java.lang.ref.Finalizer: void <init>(java.lang.Object)>")) {
addInEdge( nodeFactory.caseThis(), pag().nodeFactory().caseFinalizeQueue());
} else
if (signature.equals(
"<java.lang.ref.Finalizer: void runFinalizer()>")) {
addInEdge(pag.nodeFactory().caseFinalizeQueue(), nodeFactory.caseThis());
} else
if (signature.equals(
"<java.lang.ref.Finalizer: void access$100(java.lang.Object)>")) {
addInEdge(pag.nodeFactory().caseFinalizeQueue(), nodeFactory.caseParm(0));
} else
if (signature.equals(
"<java.lang.ClassLoader: void <init>()>")) {
addInEdge(pag.nodeFactory().caseDefaultClassLoader(), nodeFactory.caseThis());
} else
if (signature.equals("<java.lang.Thread: void exit()>")) {
addInEdge(pag.nodeFactory().caseMainThread(), nodeFactory.caseThis());
} else
if (signature.equals(
"<java.security.PrivilegedActionException: void <init>(java.lang.Exception)>")) {
addInEdge(pag.nodeFactory().caseThrow(), nodeFactory.caseParm(0));
addInEdge(pag.nodeFactory().casePrivilegedActionException(), nodeFactory.caseThis());
}
if (method.getNumberedSubSignature().equals(sigCanonicalize)) {
SootClass cl = method.getDeclaringClass();
while (true) {
if (cl.equals(Scene.v().getSootClass("java.io.FileSystem"))) {
addInEdge(pag.nodeFactory().caseCanonicalPath(), nodeFactory.caseRet());
}
if (!cl.hasSuperclass())
break;
cl = cl.getSuperclass();
}
}
boolean isImplicit = false;
for (SootMethod implicitMethod : EntryPoints.v().implicit()) {
if (implicitMethod.getNumberedSubSignature().equals(
method.getNumberedSubSignature())) {
isImplicit = true;
break;
}
}
if (isImplicit) {
SootClass c = method.getDeclaringClass();
outer: do {
while (!c.getName().equals("java.lang.ClassLoader")) {
if (!c.hasSuperclass()) {
break outer;
}
c = c.getSuperclass();
}
if (method.getName().equals("<init>"))
continue;
addInEdge(pag().nodeFactory().caseDefaultClassLoader(),
nodeFactory.caseThis());
addInEdge(pag().nodeFactory().caseMainClassNameString(),
nodeFactory.caseParm(0));
} while (false);
}
}
protected final NumberedString sigCanonicalize = Scene.v().getSubSigNumberer().
findOrAdd("java.lang.String canonicalize(java.lang.String)");
}
|
src/soot/jimple/spark/pag/MethodPAG.java
|
/* Soot - a J*va Optimization Framework
* Copyright (C) 2003 Ondrej Lhotak
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
package soot.jimple.spark.pag;
import java.util.HashSet;
import java.util.Set;
import soot.ArrayType;
import soot.Body;
import soot.Context;
import soot.EntryPoints;
import soot.G;
import soot.RefLikeType;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootMethod;
import soot.Type;
import soot.Unit;
import soot.VoidType;
import soot.jimple.Stmt;
import soot.jimple.spark.builder.MethodNodeFactory;
import soot.util.NumberedString;
import soot.util.SingletonList;
import soot.util.queue.ChunkedQueue;
import soot.util.queue.QueueReader;
/** Part of a pointer assignment graph for a single method.
* @author Ondrej Lhotak
*/
public final class MethodPAG {
private PAG pag;
public PAG pag() { return pag; }
protected MethodPAG( PAG pag, SootMethod m ) {
this.pag = pag;
this.method = m;
this.nodeFactory = new MethodNodeFactory( pag, this );
}
private Set<Context> addedContexts;
/** Adds this method to the main PAG, with all VarNodes parameterized by
* varNodeParameter. */
public void addToPAG( Context varNodeParameter ) {
if( !hasBeenBuilt ) throw new RuntimeException();
if( varNodeParameter == null ) {
if( hasBeenAdded ) return;
hasBeenAdded = true;
} else {
if( addedContexts == null ) addedContexts = new HashSet<Context>();
if( !addedContexts.add( varNodeParameter ) ) return;
}
QueueReader<Node> reader = internalReader.clone();
while(reader.hasNext()) {
Node src = (Node) reader.next();
src = parameterize( src, varNodeParameter );
Node dst = (Node) reader.next();
dst = parameterize( dst, varNodeParameter );
pag.addEdge( src, dst );
}
reader = inReader.clone();
while(reader.hasNext()) {
Node src = (Node) reader.next();
Node dst = (Node) reader.next();
dst = parameterize( dst, varNodeParameter );
pag.addEdge( src, dst );
}
reader = outReader.clone();
while(reader.hasNext()) {
Node src = (Node) reader.next();
src = parameterize( src, varNodeParameter );
Node dst = (Node) reader.next();
pag.addEdge( src, dst );
}
}
public void addInternalEdge( Node src, Node dst ) {
if( src == null ) return;
internalEdges.add( src );
internalEdges.add( dst );
if (hasBeenAdded) {
pag.addEdge(src, dst);
}
}
public void addInEdge( Node src, Node dst ) {
if( src == null ) return;
inEdges.add( src );
inEdges.add( dst );
if (hasBeenAdded) {
pag.addEdge(src, dst);
}
}
public void addOutEdge( Node src, Node dst ) {
if( src == null ) return;
outEdges.add( src );
outEdges.add( dst );
if (hasBeenAdded) {
pag.addEdge(src, dst);
}
}
private final ChunkedQueue<Node> internalEdges = new ChunkedQueue<Node>();
private final ChunkedQueue<Node> inEdges = new ChunkedQueue<Node>();
private final ChunkedQueue<Node> outEdges = new ChunkedQueue<Node>();
private final QueueReader<Node> internalReader = internalEdges.reader();
private final QueueReader<Node> inReader = inEdges.reader();
private final QueueReader<Node> outReader = outEdges.reader();
SootMethod method;
public SootMethod getMethod() { return method; }
protected MethodNodeFactory nodeFactory;
public MethodNodeFactory nodeFactory() { return nodeFactory; }
public static MethodPAG v( PAG pag, SootMethod m ) {
MethodPAG ret = G.v().MethodPAG_methodToPag.get( m );
if( ret == null ) {
ret = new MethodPAG( pag, m );
G.v().MethodPAG_methodToPag.put( m, ret );
}
return ret;
}
public void build() {
if( hasBeenBuilt ) return;
hasBeenBuilt = true;
if( method.isNative() ) {
if( pag().getOpts().simulate_natives() ) {
buildNative();
}
} else {
if( method.isConcrete() && !method.isPhantom() ) {
buildNormal();
}
}
addMiscEdges();
}
protected VarNode parameterize( LocalVarNode vn, Context varNodeParameter ) {
SootMethod m = vn.getMethod();
if( m != method && m != null ) throw new RuntimeException( "VarNode "+vn+" with method "+m+" parameterized in method "+method );
//System.out.println( "parameterizing "+vn+" with "+varNodeParameter );
return pag().makeContextVarNode( vn, varNodeParameter );
}
protected FieldRefNode parameterize( FieldRefNode frn, Context varNodeParameter ) {
return pag().makeFieldRefNode(
(VarNode) parameterize( frn.getBase(), varNodeParameter ),
frn.getField() );
}
public Node parameterize( Node n, Context varNodeParameter ) {
if( varNodeParameter == null ) return n;
if( n instanceof LocalVarNode )
return parameterize( (LocalVarNode) n, varNodeParameter);
if( n instanceof FieldRefNode )
return parameterize( (FieldRefNode) n, varNodeParameter);
return n;
}
protected boolean hasBeenAdded = false;
protected boolean hasBeenBuilt = false;
protected void buildNormal() {
Body b = method.retrieveActiveBody();
for (Unit u : b.getUnits())
nodeFactory.handleStmt( (Stmt) u );
}
protected void buildNative() {
ValNode thisNode = null;
ValNode retNode = null;
if( !method.isStatic() ) {
thisNode = (ValNode) nodeFactory.caseThis();
}
if( method.getReturnType() instanceof RefLikeType ) {
retNode = (ValNode) nodeFactory.caseRet();
}
ValNode[] args = new ValNode[ method.getParameterCount() ];
for( int i = 0; i < method.getParameterCount(); i++ ) {
if( !( method.getParameterType(i) instanceof RefLikeType ) ) continue;
args[i] = (ValNode) nodeFactory.caseParm(i);
}
pag.nativeMethodDriver.process( method, thisNode, retNode, args );
}
protected void addMiscEdges() {
// Add node for parameter (String[]) in main method
final String signature = method.getSignature();
if( method.getSubSignature().equals( SootMethod.getSubSignature( "main", new SingletonList<Type>
( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v() ) ) ) {
addInEdge( pag().nodeFactory().caseArgv(), nodeFactory.caseParm(0) );
} else
if(signature.equals(
"<java.lang.Thread: void <init>(java.lang.ThreadGroup,java.lang.String)>" ) ) {
addInEdge( pag().nodeFactory().caseMainThread(), nodeFactory.caseThis() );
addInEdge( pag().nodeFactory().caseMainThreadGroup(), nodeFactory.caseParm( 0 ) );
} else
if (signature.equals(
"<java.lang.ref.Finalizer: void <init>(java.lang.Object)>")) {
addInEdge( nodeFactory.caseThis(), pag().nodeFactory().caseFinalizeQueue());
} else
if (signature.equals(
"<java.lang.ref.Finalizer: void runFinalizer()>")) {
addInEdge(pag.nodeFactory().caseFinalizeQueue(), nodeFactory.caseThis());
} else
if (signature.equals(
"<java.lang.ref.Finalizer: void access$100(java.lang.Object)>")) {
addInEdge(pag.nodeFactory().caseFinalizeQueue(), nodeFactory.caseParm(0));
} else
if (signature.equals(
"<java.lang.ClassLoader: void <init>()>")) {
addInEdge(pag.nodeFactory().caseDefaultClassLoader(), nodeFactory.caseThis());
} else
if (signature.equals("<java.lang.Thread: void exit()>")) {
addInEdge(pag.nodeFactory().caseMainThread(), nodeFactory.caseThis());
} else
if (signature.equals(
"<java.security.PrivilegedActionException: void <init>(java.lang.Exception)>")) {
addInEdge(pag.nodeFactory().caseThrow(), nodeFactory.caseParm(0));
addInEdge(pag.nodeFactory().casePrivilegedActionException(), nodeFactory.caseThis());
}
if (method.getNumberedSubSignature().equals(sigCanonicalize)) {
SootClass cl = method.getDeclaringClass();
while (true) {
if (cl.equals(Scene.v().getSootClass("java.io.FileSystem"))) {
addInEdge(pag.nodeFactory().caseCanonicalPath(), nodeFactory.caseRet());
}
if (!cl.hasSuperclass())
break;
cl = cl.getSuperclass();
}
}
boolean isImplicit = false;
for (SootMethod implicitMethod : EntryPoints.v().implicit()) {
if (implicitMethod.getNumberedSubSignature().equals(
method.getNumberedSubSignature())) {
isImplicit = true;
break;
}
}
if (isImplicit) {
SootClass c = method.getDeclaringClass();
outer: do {
while (!c.getName().equals("java.lang.ClassLoader")) {
if (!c.hasSuperclass()) {
break outer;
}
c = c.getSuperclass();
}
if (method.getName().equals("<init>"))
continue;
addInEdge(pag().nodeFactory().caseDefaultClassLoader(),
nodeFactory.caseThis());
addInEdge(pag().nodeFactory().caseMainClassNameString(),
nodeFactory.caseParm(0));
} while (false);
}
}
protected final NumberedString sigCanonicalize = Scene.v().getSubSigNumberer().
findOrAdd("java.lang.String canonicalize(java.lang.String)");
}
|
tiny performance improvement
|
src/soot/jimple/spark/pag/MethodPAG.java
|
tiny performance improvement
|
|
Java
|
apache-2.0
|
21b24bb215d0ac202a00b247fd8e183f73022584
| 0
|
GerHobbelt/closure-compiler,tiobe/closure-compiler,tdelmas/closure-compiler,ChadKillingsworth/closure-compiler,shantanusharma/closure-compiler,shantanusharma/closure-compiler,monetate/closure-compiler,vobruba-martin/closure-compiler,Pimm/closure-compiler,google/closure-compiler,brad4d/closure-compiler,tiobe/closure-compiler,mprobst/closure-compiler,ChadKillingsworth/closure-compiler,anomaly/closure-compiler,google/closure-compiler,shantanusharma/closure-compiler,Yannic/closure-compiler,Yannic/closure-compiler,GerHobbelt/closure-compiler,MatrixFrog/closure-compiler,Pimm/closure-compiler,anomaly/closure-compiler,google/closure-compiler,anomaly/closure-compiler,shantanusharma/closure-compiler,monetate/closure-compiler,ChadKillingsworth/closure-compiler,Dominator008/closure-compiler,Yannic/closure-compiler,tdelmas/closure-compiler,monetate/closure-compiler,ChadKillingsworth/closure-compiler,Yannic/closure-compiler,MatrixFrog/closure-compiler,nawawi/closure-compiler,brad4d/closure-compiler,MatrixFrog/closure-compiler,GerHobbelt/closure-compiler,nawawi/closure-compiler,GerHobbelt/closure-compiler,google/closure-compiler,tdelmas/closure-compiler,brad4d/closure-compiler,mprobst/closure-compiler,monetate/closure-compiler,vobruba-martin/closure-compiler,nawawi/closure-compiler,tiobe/closure-compiler,Dominator008/closure-compiler,vobruba-martin/closure-compiler,mprobst/closure-compiler,tiobe/closure-compiler,anomaly/closure-compiler,tdelmas/closure-compiler,Dominator008/closure-compiler,vobruba-martin/closure-compiler,nawawi/closure-compiler,mprobst/closure-compiler,Pimm/closure-compiler,MatrixFrog/closure-compiler
|
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PACKAGE_PROPERTY_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PRIVATE_GLOBAL_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PRIVATE_PROPERTY_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PROPERTY_OVERRIDE_IN_FILE_WITH_FILEOVERVIEW_VISIBILITY;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PROTECTED_PROPERTY_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.CONST_PROPERTY_DELETED;
import static com.google.javascript.jscomp.CheckAccessControls.CONST_PROPERTY_REASSIGNED_VALUE;
import static com.google.javascript.jscomp.CheckAccessControls.CONVENTION_MISMATCH;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_CLASS;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_CLASS_REASON;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_NAME;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_NAME_REASON;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_PROP;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_PROP_REASON;
import static com.google.javascript.jscomp.CheckAccessControls.EXTEND_FINAL_CLASS;
import static com.google.javascript.jscomp.CheckAccessControls.PRIVATE_OVERRIDE;
import static com.google.javascript.jscomp.CheckAccessControls.VISIBILITY_MISMATCH;
import com.google.common.collect.ImmutableList;
/**
* Tests for {@link CheckAccessControls}.
*
* @author nicksantos@google.com (Nick Santos)
*/
public final class CheckAccessControlsTest extends TypeICompilerTestCase {
private static final DiagnosticGroup NTI_CONST =
new DiagnosticGroup(
GlobalTypeInfo.CONST_WITHOUT_INITIALIZER,
GlobalTypeInfo.COULD_NOT_INFER_CONST_TYPE,
GlobalTypeInfo.MISPLACED_CONST_ANNOTATION,
NewTypeInference.CONST_REASSIGNED,
NewTypeInference.CONST_PROPERTY_REASSIGNED,
NewTypeInference.CONST_PROPERTY_DELETED);
public CheckAccessControlsTest() {
super(CompilerTypeTestCase.DEFAULT_EXTERNS);
parseTypeInfo = true;
enableClosurePass();
enableRewriteClosureCode();
}
@Override
protected int getNumRepetitions() {
return 1;
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
return new CheckAccessControls(compiler, true);
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setWarningLevel(DiagnosticGroups.ACCESS_CONTROLS, CheckLevel.ERROR);
options.setWarningLevel(DiagnosticGroups.CONSTANT_PROPERTY, CheckLevel.ERROR);
// Disable NTI's native const checks so as to suppress duplicate warnings that
// prevent us from testing the const checks of CheckAccessControls itself.
options.setWarningLevel(NTI_CONST, CheckLevel.OFF);
return options;
}
private void testDepName(String js, String errorMessage) {
test(js, null, DEPRECATED_NAME_REASON, null, errorMessage);
}
private void testDepProp(String js, String errorMessage) {
test(js, null, DEPRECATED_PROP_REASON, null, errorMessage);
}
private void testDepClass(String js, String errorMessage) {
test(js, null, DEPRECATED_CLASS_REASON, null, errorMessage);
}
public void testDeprecatedFunctionNoReason() {
testError("/** @deprecated */ function f() {} function g() { f(); }", DEPRECATED_NAME);
}
public void testDeprecatedFunction() {
testDepName(
"/** @deprecated Some Reason */ function f() {} function g() { f(); }",
"Variable f has been deprecated: Some Reason");
}
public void testWarningOnDeprecatedConstVariable() {
testDepName(
"/** @deprecated Another reason */ var f = 4; function g() { alert(f); }",
"Variable f has been deprecated: Another reason");
}
public void testThatNumbersArentDeprecated() {
testSame("/** @deprecated */ var f = 4; var h = 3; function g() { alert(h); }");
}
public void testDeprecatedFunctionVariable() {
testDepName(
"/** @deprecated I like g... */ var f = function() {}; function g() { f(); }",
"Variable f has been deprecated: I like g...");
}
public void testNoWarningInGlobalScope() {
testSame("var goog = {}; goog.makeSingleton = function(x) {};"
+ "/** @deprecated */ function f() {} goog.makeSingleton(f);");
}
public void testNoWarningInGlobalScopeForCall() {
testDepName(
"/** @deprecated Some global scope */ function f() {} f();",
"Variable f has been deprecated: Some global scope");
}
public void testNoWarningInDeprecatedFunction() {
testSame("/** @deprecated */ function f() {} /** @deprecated */ function g() { f(); }");
}
public void testWarningInNormalClass() {
testDepName(
"/** @deprecated FooBar */ function f() {}"
+ "/** @constructor */ var Foo = function() {}; "
+ "Foo.prototype.bar = function() { f(); }",
"Variable f has been deprecated: FooBar");
}
public void testWarningForProperty1() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated A property is bad */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert((new Foo()).bar); };",
"Property bar of type Foo has been deprecated: A property is bad");
}
public void testWarningForProperty2() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated Zee prop, it is deprecated! */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert(this.bar); };",
"Property bar of type Foo has been deprecated: Zee prop, it is deprecated!");
}
public void testWarningForDeprecatedClass() {
testDepClass(
"/** @constructor \n* @deprecated Use the class 'Bar' */ function Foo() {} "
+ "function f() { new Foo(); }",
"Class Foo has been deprecated: Use the class 'Bar'");
}
public void testWarningForDeprecatedClassNoReason() {
testError(
"/** @constructor \n* @deprecated */ function Foo() {} " + "function f() { new Foo(); }",
DEPRECATED_CLASS);
}
public void testNoWarningForDeprecatedClassInstance() {
testSame("/** @constructor \n * @deprecated */ function Foo() {} "
+ "/** @param {Foo} x */ function f(x) { return x; }");
}
public void testWarningForDeprecatedSuperClass() {
testDepClass(
"/** @constructor \n * @deprecated Superclass to the rescue! */ function Foo() {} "
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "function f() { new SubFoo(); }",
"Class SubFoo has been deprecated: Superclass to the rescue!");
}
public void testWarningForDeprecatedSuperClass2() {
testDepClass(
"/** @constructor \n * @deprecated Its only weakness is Kryptoclass */ function Foo() {} "
+ "/** @const */ var namespace = {}; "
+ "/** @constructor \n * @extends {Foo} */ "
+ "namespace.SubFoo = function() {}; "
+ "function f() { new namespace.SubFoo(); }",
"Class namespace.SubFoo has been deprecated: Its only weakness is Kryptoclass");
}
public void testWarningForPrototypeProperty() {
// TODO(aravindpg): in NTI the string representation of prototype object types is less than
// ideal due to the way NTI represents them. Fix if possible.
String js =
"/** @constructor */ function Foo() {}"
+ "/** @deprecated It is now in production, use that model... */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert(Foo.prototype.bar); };";
this.mode = TypeInferenceMode.OTI_ONLY;
testDepProp(
js,
"Property bar of type Foo.prototype has been deprecated:"
+ " It is now in production, use that model...");
this.mode = TypeInferenceMode.NTI_ONLY;
testDepProp(
js,
"Property bar of type Object{bar:?, baz:function(this:Foo):?} has been deprecated:"
+ " It is now in production, use that model...");
}
public void testNoWarningForNumbers() {
testSame("/** @constructor */ function Foo() {}"
+ "/** @deprecated */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert(3); };");
}
public void testWarningForMethod1() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated There is a madness to this method */"
+ "Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); };",
"Property bar of type Foo has been deprecated: There is a madness to this method");
}
public void testWarningForMethod2() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated Stop the ringing! */ Foo.prototype.bar;"
+ "Foo.prototype.baz = function() { this.bar(); };",
"Property bar of type Foo has been deprecated: Stop the ringing!");
}
public void testNoWarningInDeprecatedClass() {
testSame("/** @deprecated */ function f() {} "
+ "/** @constructor \n * @deprecated */ "
+ "var Foo = function() {}; "
+ "Foo.prototype.bar = function() { f(); }");
}
public void testNoWarningOnDeclaration() {
testSame("/** @constructor */ function F() {\n"
+ " /**\n"
+ " * @type {number}\n"
+ " * @deprecated Use something else.\n"
+ " */\n"
+ " this.code;\n"
+ "}");
}
public void testNoWarningInDeprecatedClass2() {
testSame("/** @deprecated */ function f() {} "
+ "/** @constructor \n * @deprecated */ "
+ "var Foo = function() {}; "
+ "Foo.bar = function() { f(); }");
}
public void testNoWarningInDeprecatedStaticMethod() {
testSame("/** @deprecated */ function f() {} "
+ "/** @constructor */ "
+ "var Foo = function() {}; "
+ "/** @deprecated */ Foo.bar = function() { f(); }");
}
public void testWarningInStaticMethod() {
testDepName(
"/** @deprecated crazy! */ function f() {} "
+ "/** @constructor */ "
+ "var Foo = function() {}; "
+ "Foo.bar = function() { f(); }",
"Variable f has been deprecated: crazy!");
}
public void testDeprecatedObjLitKey() {
testDepProp(
"/** @const */ var f = {};"
+ "/** @deprecated It is literally not used anymore */ f.foo = 3;"
+ "function g() { return f.foo; }",
"Property foo of type f has been deprecated: It is literally not used anymore");
}
public void testWarningForSubclassMethod() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "/** @deprecated I have a parent class! */ SubFoo.prototype.bar = function() {};"
+ "function f() { (new SubFoo()).bar(); };",
"Property bar of type SubFoo has been deprecated: I have a parent class!");
}
public void testWarningForSuperClassWithDeprecatedSubclassMethod() {
testSame("/** @constructor */ function Foo() {}"
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "/** @deprecated \n * @override */ SubFoo.prototype.bar = "
+ "function() {};"
+ "function f() { (new Foo()).bar(); };");
}
public void testWarningForSuperclassMethod() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated I have a child class! */ Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "SubFoo.prototype.bar = function() {};"
+ "function f() { (new SubFoo()).bar(); };",
"Property bar of type SubFoo has been deprecated: I have a child class!");
}
public void testWarningForSuperclassMethod2() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated I have another child class... \n* @protected */"
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "/** @protected */SubFoo.prototype.bar = function() {};"
+ "function f() { (new SubFoo()).bar(); };",
"Property bar of type SubFoo has been deprecated: I have another child class...");
}
public void testWarningForBind() {
// NTI reports NTI_REDCLARED_PROPERTY here, which is as intended. If this were a new
// property and not the existing `bind`, then we'd report the deprecation warning as expected
// (see testAutoboxedDeprecatedProperty and testAutoboxedPrivateProperty).
this.mode = TypeInferenceMode.OTI_ONLY;
testDepProp(
"/** @deprecated I'm bound to this method... */ Function.prototype.bind = function() {};"
+ "(function() {}).bind();",
"Property bind of type function has been deprecated: I'm bound to this method...");
}
public void testWarningForDeprecatedClassInGlobalScope() {
testDepClass(
"/** @constructor \n * @deprecated I'm a very worldly object! */ var Foo = function() {};"
+ "new Foo();",
"Class Foo has been deprecated: I'm a very worldly object!");
}
public void testNoWarningForPrototypeCopying() {
testSame("/** @constructor */ var Foo = function() {};"
+ "Foo.prototype.bar = function() {};"
+ "/** @deprecated */ Foo.prototype.baz = Foo.prototype.bar;"
+ "(new Foo()).bar();");
}
public void testNoWarningOnDeprecatedPrototype() {
// This used to cause an NPE.
testSame("/** @constructor */ var Foo = function() {};"
+ "/** @deprecated */ Foo.prototype = {};"
+ "Foo.prototype.bar = function() {};");
}
public void testPrivateAccessForNames() {
testSame("/** @private */ function foo_() {}; foo_();");
testError(new String[] {"/** @private */ function foo_() {};", "foo_();"},
BAD_PRIVATE_GLOBAL_ACCESS);
}
public void testPrivateAccessForNames2() {
// Private by convention
testSame("function foo_() {}; foo_();");
testError(new String[] {"function foo_() {};", "foo_();"}, BAD_PRIVATE_GLOBAL_ACCESS);
}
public void testPrivateAccessForProperties1() {
testSame("/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); }; (new Foo).bar_();");
}
public void testPrivateAccessForProperties2() {
testSame(new String[] {
"/** @constructor */ function Foo() {}",
"/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); }; (new Foo).bar_();"});
}
public void testPrivateAccessForProperties3() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {}; (new Foo).bar_();",
"Foo.prototype.baz = function() { this.bar_(); };"});
}
public void testPrivateAccessForProperties4() {
// If a prototype property is defined via a computed access in a separate file from the
// constructor itself, then when running with NTI we fail to recognize that property as being a
// prototype property. This is enough of a corner case that we are fine with allowing it.
// If they are in the same file then things work as expected
// (see testPrivateAccessForProperties4b).
this.mode = TypeInferenceMode.OTI_ONLY;
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"Foo.prototype['baz'] = function() { (new Foo()).bar_(); };"});
}
public void testPrivateAccessForProperties4a() {
// Identical to 4 except the computed access
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"Foo.prototype.baz = function() { (new Foo()).bar_(); };"});
}
public void testPrivateAccessForProperties4b() {
testSame(
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype['baz'] = function() { (new Foo()).bar_(); };");
}
public void testPrivateAccessForProperties5() {
test(
new String[] {
LINE_JOINER.join(
"/** @constructor */",
"function Parent () {",
" /** @private */",
" this.prop = 'foo';",
"};"),
LINE_JOINER.join(
"/**",
" * @constructor",
" * @extends {Parent}",
" */",
"function Child() {",
" this.prop = 'asdf';",
"}",
"Child.prototype = new Parent();")
},
null,
BAD_PRIVATE_PROPERTY_ACCESS,
null,
"Access to private property prop of Parent not allowed here.");
}
public void testPrivateAccessForProperties6() {
test(
new String[] {
LINE_JOINER.join(
"goog.provide('x.y.z.Parent');",
"",
"/** @constructor */",
"x.y.z.Parent = function() {",
" /** @private */",
" this.prop = 'foo';",
"};"),
LINE_JOINER.join(
"goog.require('x.y.z.Parent');",
"",
"/**",
" * @constructor",
" * @extends {x.y.z.Parent}",
" */",
"function Child() {",
" this.prop = 'asdf';",
"}",
"Child.prototype = new x.y.z.Parent();")
},
null,
BAD_PRIVATE_PROPERTY_ACCESS,
null,
"Access to private property prop of x.y.z.Parent not allowed here.");
}
public void testPrivateAccess_googModule() {
test(
new String[] {
LINE_JOINER.join(
"goog.module('example.one');",
"/** @constructor */ function C() {};",
"/** @private */ C.prototype.m = function() {};",
"exports = C;"),
LINE_JOINER.join(
"goog.module('example.two');",
"var one = goog.require('example.one');",
"(new one()).m();"),
},
null,
BAD_PRIVATE_PROPERTY_ACCESS,
null,
// TODO(tbreisacher): The type name in the error message should be "example.one" instead of
// module$exports$example$one
"Access to private property m of module$exports$example$one not allowed here.");
}
public void testNoPrivateAccessForProperties1() {
testError(new String[] {
"/** @constructor */ function Foo() {} (new Foo).bar_();",
"/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); };"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties2() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); };",
"(new Foo).bar_();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties3() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor */ function OtherFoo() { (new Foo).bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties4() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() { this.bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties5() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.baz = function() { this.bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties6() {
// Overriding a private property with a non-private property
// in a different file causes problems.
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar_ = function() {};"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties6a() {
// Same as above, except with namespaced constructors
testError(new String[] {
"/** @const */ var ns = {};"
+ "/** @constructor */ ns.Foo = function() {}; "
+ "/** @private */ ns.Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {ns.Foo} */ "
+ "ns.SubFoo = function() {};"
+ "ns.SubFoo.prototype.bar_ = function() {};"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties7() {
// It's OK to override a private property with a non-private property
// in the same file, but you'll get yelled at when you try to use it.
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar_ = function() {};",
"SubFoo.prototype.baz = function() { this.bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties8() {
testError(new String[] {
"/** @constructor */ function Foo() { /** @private */ this.bar_ = 3; }",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() { /** @private */ this.bar_ = 3; };"},
PRIVATE_OVERRIDE);
}
public void testNoPrivateAccessForProperties9() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ bar_: 3"
+ "}",
"new Foo().bar_;"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties10() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ bar_: function() {}"
+ "}",
"new Foo().bar_();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties11() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ get bar_() { return 1; }"
+ "}",
"var a = new Foo().bar_;"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties12() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ set bar_(x) { this.barValue = x; }"
+ "}",
"new Foo().bar_ = 1;"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForNamespaces() {
testError(new String[] {
"/** @const */ var foo = {};\n"
+ "/** @private */ foo.bar_ = function() {};",
"foo.bar_();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testProtectedAccessForProperties1() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"Foo.prototype.baz = function() { this.bar(); };"});
}
public void testProtectedAccessForProperties2() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"/** @constructor \n * @extends {Foo} */"
+ "function SubFoo() { this.bar(); }"});
}
public void testProtectedAccessForProperties3() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"/** @constructor \n * @extends {Foo} */"
+ "function SubFoo() { }"
+ "SubFoo.baz = function() { (new Foo).bar(); }"});
}
public void testProtectedAccessForProperties4() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.bar = function() {};",
"/** @constructor \n * @extends {Foo} */"
+ "function SubFoo() { Foo.bar(); }"});
}
public void testProtectedAccessForProperties5() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() { this.bar(); }"});
}
public void testProtectedAccessForProperties6() {
testSame(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor */ goog.Foo = function() {};"
+ "/** @protected */ goog.Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {goog.Foo} */"
+ "goog.SubFoo = function() { this.bar(); };"});
}
public void testProtectedAccessForProperties7() {
testSame(new String[] {
"/** @constructor */ var Foo = function() {};"
+ "Foo.prototype = { /** @protected */ bar: function() {} }",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() { this.bar(); };"
+ "SubFoo.prototype = { moo: function() { this.bar(); }};"});
}
public void testProtectedAccessForProperties8() {
testSame(new String[] {
"/** @constructor */ var Foo = function() {};"
+ "Foo.prototype = { /** @protected */ bar: function() {} }",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() {};"
+ "SubFoo.prototype = { get moo() { this.bar(); }};"});
}
public void testProtectedAccessForProperties9() {
testSame(new String[] {
"/** @constructor */ var Foo = function() {};"
+ "Foo.prototype = { /** @protected */ bar: function() {} }",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() {};"
+ "SubFoo.prototype = { set moo(val) { this.x = this.bar(); }};"});
}
public void testProtectedAccessForProperties10() {
// NTI throws NTI_CTOR_IN_DIFFERENT_SCOPE
testSame(ImmutableList.of(
SourceFile.fromCode(
"foo.js",
"/** @constructor */ var Foo = function() {};"
+ "/** @protected */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
"sub_foo.js",
"/** @constructor @extends {Foo} */"
+ "var SubFoo = function() {};"
+ "(/** @suppress {newCheckTypes} */ function() {"
+ "SubFoo.prototype.baz = function() { this.bar(); }"
+ "})();")));
}
public void testProtectedAccessForProperties11() {
test(ImmutableList.of(
SourceFile.fromCode(
"foo.js",
LINE_JOINER.join(
"goog.provide('Foo');",
"/** @interface */ Foo = function() {};",
"/** @protected */ Foo.prop = {};")),
SourceFile.fromCode(
"bar.js",
LINE_JOINER.join(
"goog.require('Foo');",
"/** @constructor @implements {Foo} */",
"function Bar() { Foo.prop; };"))),
null, null);
}
public void testProtectedAccessForProperties12() {
test(ImmutableList.of(
SourceFile.fromCode(
"a.js",
LINE_JOINER.join(
"goog.provide('A');",
"/** @constructor */",
"var A = function() {",
" /**",
" * @type {?String}",
" * @protected",
" */",
" this.prop;",
"}")),
SourceFile.fromCode(
"b.js",
LINE_JOINER.join(
"goog.require('A');",
"/**",
" * @constructor",
" * @extends {A}",
" */",
"var B = function() {",
" this.prop.length;",
" this.prop.length;",
"};"))),
null, null);
}
// FYI: Java warns for the b1.method access in c.js.
// Instead of following that in NTI, we chose to follow the behavior of
// the old JSCompiler type checker, to make migration easier.
public void testProtectedAccessForProperties13() {
test(ImmutableList.of(
SourceFile.fromCode(
"a.js",
LINE_JOINER.join(
"goog.provide('A');",
"/** @constructor */",
"var A = function() {}",
"/** @protected */",
"A.prototype.method = function() {};")),
SourceFile.fromCode(
"b1.js",
LINE_JOINER.join(
"goog.require('A');",
"goog.provide('B1');",
"/** @constructor @extends {A} */",
"var B1 = function() {};",
"/** @override */",
"B1.prototype.method = function() {};")),
SourceFile.fromCode(
"b2.js",
LINE_JOINER.join(
"goog.require('A');",
"goog.provide('B2');",
"/** @constructor @extends {A} */",
"var B2 = function() {};",
"/** @override */",
"B2.prototype.method = function() {};")),
SourceFile.fromCode(
"c.js",
LINE_JOINER.join(
"goog.require('B1');",
"goog.require('B2');",
"/**",
" * @param {!B1} b1",
" * @constructor",
" * @extends {B2}",
" */",
"var C = function(b1) {",
" var x = b1.method();",
"};"))),
null, null);
}
public void testNoProtectedAccessForProperties1() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"(new Foo).bar();"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties2() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"/** @constructor */ function OtherFoo() { (new Foo).bar(); }"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties3() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubberFoo() { (new SubFoo).bar(); }"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties4() {
testError(new String[] {
"/** @constructor */ function Foo() { (new SubFoo).bar(); } ",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};",
},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties5() {
testError(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor */ goog.Foo = function() {};"
+ "/** @protected */ goog.Foo.prototype.bar = function() {};",
"/** @constructor */"
+ "goog.NotASubFoo = function() { (new goog.Foo).bar(); };"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties6() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @protected */ bar: 3"
+ "}",
"new Foo().bar;"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties7() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @protected */ bar: function() {}"
+ "}",
"new Foo().bar();"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testPackagePrivateAccessForNames() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */\n"
+ "function Parent() {\n"
+ "/** @package */\n"
+ "this.prop = 'foo';\n"
+ "}\n;"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/**"
+ " * @constructor\n"
+ " * @extends {Parent}\n"
+ " */\n"
+ "function Child() {\n"
+ " this.prop = 'asdf';\n"
+ "}\n"
+ "Child.prototype = new Parent();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testPackagePrivateAccessForProperties1() {
testSame("/** @constructor */ function Foo() {}"
+ "/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); }; (new Foo).bar();");
}
public void testPackagePrivateAccessForProperties2() {
testSame(ImmutableList.of(
SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {}"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); }; (new Foo).bar();")));
}
public void testPackagePrivateAccessForProperties3() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {}"
+ "/** @package */ Foo.prototype.bar = function() {}; (new Foo).bar();"),
SourceFile.fromCode(Compiler.joinPathParts("foo", "baz.js"),
"Foo.prototype.baz = function() { this.bar(); };")));
}
public void testPackagePrivateAccessForProperties4() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {}"
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("foo", "baz.js"),
"Foo.prototype['baz'] = function() { (new Foo()).bar(); };")));
}
public void testPackagePrivateAccessForProperties5() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */\n"
+ "function Parent () {\n"
+ " /** @package */\n"
+ " this.prop = 'foo';\n"
+ "};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/**\n"
+ " * @constructor\n"
+ " * @extends {Parent}\n"
+ " */\n"
+ "function Child() {\n"
+ " this.prop = 'asdf';\n"
+ "}\n"
+ "Child.prototype = new Parent();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties1() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} (new Foo).bar();"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); };")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties2() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); };"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "(new Foo).bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties3() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor */ function OtherFoo() { (new Foo).bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties4() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() { this.bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForNamespaces() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @const */ var foo = {};\n"
+ "/** @package */ foo.bar = function() {};"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties5() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.baz = function() { this.bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties6() {
// Overriding a private property with a non-package-private property
// in a different file causes problems.
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar = function() {};")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties7() {
// It's OK to override a package-private property with a
// non-package-private property in the same file, but you'll get
// yelled at when you try to use it.
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"SubFoo.prototype.baz = function() { this.bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void
testOverrideWithoutVisibilityRedeclInFileWithFileOverviewVisibilityNotAllowed_OneFile() {
testError(
"/**\n"
+ "* @fileoverview\n"
+ "* @package\n"
+ "*/\n"
+ "/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n",
BAD_PROPERTY_OVERRIDE_IN_FILE_WITH_FILEOVERVIEW_VISIBILITY);
}
public void testNamespacedFunctionDoesNotNeedVisibilityRedeclInFileWithFileOverviewVisibility() {
testSame("/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @return {string} */\n"
+ "foo.bar = function() { return 'asdf'; };");
}
public void
testOverrideWithoutVisibilityRedeclInFileWithFileOverviewVisibilityNotAllowed_TwoFiles() {
testError(new String[] {
"/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @protected */\n"
+ "Foo.prototype.protectedMethod = function() {};\n",
" /**\n"
+ "* @fileoverview \n"
+ "* @package\n"
+ "*/\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.protectedMethod = function() {};\n"},
BAD_PROPERTY_OVERRIDE_IN_FILE_WITH_FILEOVERVIEW_VISIBILITY);
}
public void testOverrideWithoutVisibilityRedeclInFileWithNoFileOverviewOk() {
testSame("/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n");
}
public void testOverrideWithoutVisibilityRedeclInFileWithNoFileOverviewVisibilityOk() {
testSame("/**\n"
+ " * @fileoverview\n"
+ " */\n"
+ "/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n");
}
public void testOverrideWithVisibilityRedeclInFileWithFileOverviewVisibilityOk_OneFile() {
testSame("/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override @private */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n");
}
public void testOverrideWithVisibilityRedeclInFileWithFileOverviewVisibilityOk_TwoFiles() {
testSame(new String[] {
"/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @protected */\n"
+ "Foo.prototype.protectedMethod = function() {};\n",
" /**\n"
+ "* @fileoverview\n"
+ "* @package\n"
+ "*/\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override @protected */\n"
+ "Bar.prototype.protectedMethod = function() {};\n"});
}
public void testPublicFileOverviewVisibilityDoesNotApplyToNameWithExplicitPackageVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "/** @constructor @package */ function Foo() {};"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "new Foo();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testPackageFileOverviewVisibilityDoesNotApplyToNameWithExplicitPublicVisibility() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor @public */ function Foo() {};"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "new Foo();")));
}
public void testPackageFileOverviewVisibilityAppliesToNameWithoutExplicitVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "var Foo = function() {};\n"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "new Foo();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void
testPackageFileOverviewVisibilityDoesNotApplyToPropertyWithExplicitPublicVisibility() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @public */\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")));
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace1() {
// Don't compare the generated JsDoc. It includes annotations we're not interested in,
// like @inherited.
compareJsDoc = false;
test(
ImmutableList.of(
SourceFile.fromCode("foo.js", "goog.provide('foo');"),
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('foo.bar');"),
SourceFile.fromCode("bar.js", "goog.require('foo')")),
ImmutableList.of(SourceFile.fromCode("foo.js", "var foo={};"),
SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"), "foo.bar={};"),
SourceFile.fromCode("bar.js", "")),
null, null);
compareJsDoc = true;
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace2() {
// Don't compare the generated JsDoc. It includes annotations we're not interested in,
// like @inherited.
compareJsDoc = false;
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('foo.bar');"),
SourceFile.fromCode("foo.js", "goog.provide('foo');"),
SourceFile.fromCode(
"bar.js",
"goog.require('foo');\n"
+ "var x = foo;")),
ImmutableList.of(SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"),
"var foo={};foo.bar={};"),
SourceFile.fromCode("foo.js", ""), SourceFile.fromCode("bar.js", "var x=foo")),
null, null);
compareJsDoc = true;
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace3() {
// Don't compare the generated JsDoc. It includes annotations we're not interested in,
// like @inherited.
compareJsDoc = false;
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('one.two');\n"
+ "one.two.three = function(){};"),
SourceFile.fromCode(
"baz.js",
"goog.require('one.two');\n"
+ "var x = one.two;")),
ImmutableList.of(
SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"),
"var one={};one.two={};one.two.three=function(){};"),
SourceFile.fromCode("baz.js", "var x=one.two")),
null, null);
compareJsDoc = true;
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace4() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('one.two');\n"
+ "one.two.three = function(){};"),
SourceFile.fromCode(
"baz.js",
"goog.require('one.two');\n"
+ "var x = one.two.three();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void
testPublicFileOverviewVisibilityDoesNotApplyToPropertyWithExplicitPackageVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @package */\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testPublicFileOverviewVisibilityAppliesToPropertyWithoutExplicitVisibility() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")));
}
public void testPackageFileOverviewVisibilityAppliesToPropertyWithoutExplicitVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testFileOverviewVisibilityComesFromDeclarationFileNotUseFile() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "var foo = new Foo();\n"
+ "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoExceptionsWithBadConstructors1() {
testSame(new String[] {"function Foo() { (new SubFoo).bar(); } "
+ "/** @constructor */ function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};"});
}
public void testNoExceptionsWithBadConstructors2() {
testSame(new String[] {"/** @constructor */ function Foo() {} "
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor */"
+ "function SubFoo() {}"
+ "/** @protected */ "
+ "SubFoo.prototype.bar = function() { (new Foo).bar(); };"});
}
public void testGoodOverrideOfProtectedProperty() {
testSame(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @inheritDoc */ SubFoo.prototype.bar = function() {};",
});
}
public void testBadOverrideOfProtectedProperty() {
testError(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @private */ SubFoo.prototype.bar = function() {};",
},
VISIBILITY_MISMATCH);
}
public void testBadOverrideOfPrivateProperty() {
testError(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @private */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};",
},
PRIVATE_OVERRIDE);
testSame(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @private */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @override \n *@suppress{visibility} */\n"
+ " SubFoo.prototype.bar = function() {};",
});
}
public void testAccessOfStaticMethodOnPrivateConstructor() {
testSame(new String[] {
"/** @constructor \n * @private */ function Foo() { } "
+ "Foo.create = function() { return new Foo(); };",
"Foo.create()",
});
}
public void testAccessOfStaticMethodOnPrivateQualifiedConstructor() {
testSame(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor \n * @private */ goog.Foo = function() { }; "
+ "goog.Foo.create = function() { return new goog.Foo(); };",
"goog.Foo.create()",
});
}
public void testInstanceofOfPrivateConstructor() {
testSame(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor \n * @private */ goog.Foo = function() { }; "
+ "goog.Foo.create = function() { return new goog.Foo(); };",
"goog instanceof goog.Foo",
});
}
public void testOkAssignmentOfDeprecatedProperty() {
testSame("/** @constructor */ function Foo() {"
+ " /** @deprecated */ this.bar = 3;"
+ "}");
}
public void testBadReadOfDeprecatedProperty() {
testDepProp(
"/** @constructor */ function Foo() {"
+ " /** @deprecated GRR */ this.bar = 3;"
+ " this.baz = this.bar;"
+ "}",
"Property bar of type Foo has been deprecated: GRR");
}
public void testAutoboxedDeprecatedProperty() {
test(DEFAULT_EXTERNS,
"/** @deprecated %s */ String.prototype.prop;"
+ "function f() { return 'x'.prop; }",
(String) null, DEPRECATED_PROP_REASON, null);
}
public void testAutoboxedPrivateProperty() {
test(
// externs
DEFAULT_EXTERNS + "/** @private */ String.prototype.prop;",
"function f() { return 'x'.prop; }",
(String) null, // no output
BAD_PRIVATE_PROPERTY_ACCESS, null);
}
public void testNullableDeprecatedProperty() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated */ Foo.prototype.length;"
+ "/** @param {?Foo} x */ function f(x) { return x.length; }",
DEPRECATED_PROP);
}
public void testNullablePrivateProperty() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.length;",
"/** @param {?Foo} x */ function f(x) { return x.length; }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testPrivatePropertyByConvention1() {
testError(new String[] {
"/** @constructor */ function Foo() {}\n"
+ "/** @type {number} */ Foo.prototype.length_;\n",
"/** @param {?Foo} x */ function f(x) { return x.length_; }\n"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testPrivatePropertyByConvention2() {
testError(new String[] {
"/** @constructor */ function Foo() {\n"
+ " /** @type {number} */ this.length_ = 1;\n"
+ "}\n"
+ "/** @type {number} */ Foo.prototype.length_;\n",
"/** @param {Foo} x */ function f(x) { return x.length_; }\n"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testDeclarationAndConventionConflict1() {
testError(
"/** @constructor */ function Foo() {} /** @protected */ Foo.prototype.length_;",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict2() {
testError(
"/** @constructor */ function Foo() {}\n"
+ "/** @public {number} */ Foo.prototype.length_;\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict3() {
testError(
"/** @constructor */ function Foo() { /** @protected */ this.length_ = 1;\n}\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict4a() {
testError(
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = { /** @protected */ length_: 1 }\n"
+ "new Foo().length_",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict4b() {
testError(
"/** @const */ var NS = {}; /** @constructor */ NS.Foo = function() {};"
+ "NS.Foo.prototype = { /** @protected */ length_: 1 };\n"
+ "(new NS.Foo()).length_;",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict5() {
testError(
"/** @constructor */ function Foo() {}\n"
+ "Foo.prototype = { /** @protected */ get length_() { return 1; } }\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict6() {
testError(
"/** @constructor */ function Foo() {}\n"
+ "Foo.prototype = { /** @protected */ set length_(x) { } }\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict7() {
testError("/** @public */ var Foo_;", CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict8() {
testError("/** @package */ var Foo_;", CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict9() {
testError("/** @protected */ var Foo_;", CONVENTION_MISMATCH);
}
public void testConstantProperty1a() {
testError(
"/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/** @constructor */ function B() {"
+ "/** @const */ this.bar = 3;this.bar += 4;}",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty1b() {
testError(
"/** @constructor */ function A() {"
+ "this.BAR = 3;}"
+ "/** @constructor */ function B() {"
+ "this.BAR = 3;this.BAR += 4;}",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty2a() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.prop = 2;"
+ "var foo = new Foo();"
+ "foo.prop = 3;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty2b() {
testError(
"/** @constructor */ function Foo() {}"
+ "Foo.prototype.PROP = 2;"
+ "var foo = new Foo();"
+ "foo.PROP = 3;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty3a() {
testSame("/** @constructor */ function Foo() {}\n"
+ "/** @type {number} */ Foo.prototype.PROP = 2;\n"
+ "/** @suppress {duplicate|const} */ Foo.prototype.PROP = 3;\n");
}
public void testConstantProperty3b() {
testSame("/** @constructor */ function Foo() {}\n"
+ "/** @const */ Foo.prototype.prop = 2;\n"
+ "/** @suppress {const} */ Foo.prototype.prop = 3;\n");
}
public void testNamespaceConstantProperty1() {
testError(
""
+ "/** @const */ var o = {};\n"
+ "/** @const */ o.x = 1;"
+ "o.x = 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testNamespaceConstantProperty2() {
// NTI requires an @const annotation on namespaces, as in testNamespaceConstantProperty1.
// This is the only difference between the two tests.
this.mode = TypeInferenceMode.OTI_ONLY;
testError(
"var o = {};\n"
+ "/** @const */ o.x = 1;\n"
+ "o.x = 2;\n",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testNamespaceConstantProperty2a() {
testSame("/** @const */ var o = {};\n"
+ "/** @const */ o.x = 1;\n"
+ "/** @const */ var o2 = {};\n"
+ "/** @const */ o2.x = 1;\n");
}
public void testNamespaceConstantProperty3() {
testError(
"/** @const */ var o = {};\n"
+ "/** @const */ o.x = 1;"
+ "o.x = 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty3a1() {
testSame("var o = { /** @const */ x: 1 };"
+ "o.x = 2;");
}
public void testConstantProperty3a2() {
// The old type checker should report this but it doesn't.
// NTI reports CONST_PROPERTY_REASSIGNED.
testSame("/** @const */ var o = { /** @const */ x: 1 };"
+ "o.x = 2;");
}
public void testConstantProperty3b1() {
// We should report this but we don't.
testSame("var o = { XYZ: 1 };"
+ "o.XYZ = 2;");
}
public void testConstantProperty3b2() {
// NTI reports NTI_REDECLARED_PROPERTY
this.mode = TypeInferenceMode.OTI_ONLY;
// The old type checker should report this but it doesn't.
testSame("/** @const */ var o = { XYZ: 1 };"
+ "o.XYZ = 2;");
}
public void testConstantProperty4() {
testError(
"/** @constructor */ function cat(name) {}"
+ "/** @const */ cat.test = 1;"
+ "cat.test *= 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty4b() {
testError(
"/** @constructor */ function cat(name) {}"
+ "cat.TEST = 1;"
+ "cat.TEST *= 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty5() {
testError(
"/** @constructor */ function Foo() { this.prop = 1;}"
+ "/** @const */ Foo.prototype.prop;"
+ "Foo.prototype.prop = 2",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty6() {
testError(
"/** @constructor */ function Foo() { this.prop = 1;}"
+ "/** @const */ Foo.prototype.prop = 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty7() {
testSame("/** @constructor */ function Foo() {} "
+ "Foo.prototype.bar_ = function() {};"
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "/** @const */ /** @override */ SubFoo.prototype.bar_ = function() {};"
+ "SubFoo.prototype.baz = function() { this.bar_(); }");
}
public void testConstantProperty8() {
testSame("/** @const */ var o = { /** @const */ x: 1 };"
+ "var y = o.x;");
}
public void testConstantProperty9() {
testSame("/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/** @constructor */ function B() {"
+ "this.bar = 4;}");
}
public void testConstantProperty10a() {
testSame("/** @constructor */ function Foo() { this.prop = 1;}"
+ "/** @const */ Foo.prototype.prop;");
}
public void testConstantProperty10b() {
// NTI reports NTI_REDECLARED_PROPERTY
this.mode = TypeInferenceMode.OTI_ONLY;
testSame("/** @constructor */ function Foo() { this.PROP = 1;}"
+ "Foo.prototype.PROP;");
}
public void testConstantProperty11() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.bar;"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo() { this.bar = 5; this.bar = 6; }",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty12() {
// NTI deliberately disallows this pattern (separate declaration and initialization
// of const properties). (b/30205953)
this.mode = TypeInferenceMode.OTI_ONLY;
testSame("/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.bar;"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo() { this.bar = 5; }"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo2() { this.bar = 5; }");
}
public void testConstantProperty13() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.bar;"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo() { this.bar = 5; }"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {SubFoo}\n"
+ " */ function SubSubFoo() { this.bar = 5; }",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty14() {
testError(
"/** @constructor */ function Foo() {"
+ "/** @const */ this.bar = 3; delete this.bar; }",
CONST_PROPERTY_DELETED);
}
public void testConstantPropertyInExterns() {
String externs =
DEFAULT_EXTERNS
+ "/** @constructor */ function Foo() {};\n"
+ "/** @const */ Foo.prototype.PROP;";
String js = "var f = new Foo(); f.PROP = 1; f.PROP = 2;";
test(externs, js, (String) null, CONST_PROPERTY_REASSIGNED_VALUE, null);
}
public void testConstantProperty15() {
testSame("/** @constructor */ function Foo() {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = Foo.CONST;");
}
public void testConstantProperty15a() {
testError(
"/** @constructor */ function Foo() { this.CONST = 100; };\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = 0;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty15b() {
testError(
"/** @constructor */ function Foo() {};\n"
+ "Foo.prototype.CONST = 100;\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = 0;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty15c() {
testError(
""
+ "/** @constructor */ function Bar() {this.CONST = 100;};\n"
+ "/** @constructor \n @extends {Bar} */ function Foo() {};\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = 0;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty16() {
testSame("/** @constructor */ function Foo() {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @constructor */ function Bar() {};\n"
+ "Bar.CONST = 100;\n");
}
public void testConstantProperty17() {
testSame("function Foo() {};\n"
+ "Foo.CONST = 100;\n"
+ "function Bar() {};\n"
+ "Bar.CONST = 100;\n");
}
public void testConstantProperty18() {
testSame("/** @param {string} a */\n"
+ "function Foo(a) {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @param {string} a */\n"
+ "function Bar(a) {};\n"
+ "Bar.CONST = 100;\n");
}
public void testConstantProperty19() {
testSame("/** @param {string} a */\n"
+ "function Foo(a) {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @param {number} a */\n"
+ "function Bar(a) {};\n"
+ "Bar.CONST = 100;\n");
}
public void testSuppressConstantProperty() {
testSame("/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/**\n"
+ " * @suppress {constantProperty}\n"
+ " * @constructor\n"
+ " */ function B() { /** @const */ this.bar = 3; this.bar += 4; }");
}
public void testSuppressConstantProperty2() {
testSame("/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/**\n"
+ " * @suppress {const}\n"
+ " * @constructor\n"
+ " */ function B() {"
+ "/** @const */ this.bar = 3;this.bar += 4;}");
}
public void testFinalClassCannotBeSubclassed() {
testError(
LINE_JOINER.join(
"/**",
" * @constructor",
" * @final",
" */ var Foo = function() {};",
"/**",
" * @constructor",
" * @extends {Foo}*",
" */ var Bar = function() {};"),
EXTEND_FINAL_CLASS);
testError(
LINE_JOINER.join(
"/**",
" * @constructor",
" * @final",
" */ function Foo() {};",
"/**",
" * @constructor",
" * @extends {Foo}*",
" */ function Bar() {};"),
EXTEND_FINAL_CLASS);
testSame(
LINE_JOINER.join(
"/**",
" * @constructor",
" * @const",
" */ var Foo = function() {};",
"/**",
" * @constructor",
" * @extends {Foo}",
" */ var Bar = function() {};"));
}
public void testCircularPrototypeLink() {
// NOTE: this does yield a useful warning, except we don't check for it in this test:
// WARNING - Cycle detected in inheritance chain of type Foo
// This warning already has a test: TypeCheckTest::testPrototypeLoop.
testError(
LINE_JOINER.join(
"/** @constructor @extends {Foo} */ function Foo() {}",
"/** @const */ Foo.prop = 1;",
"Foo.prop = 2;"),
CONST_PROPERTY_REASSIGNED_VALUE);
// In OTI this next test causes a stack overflow.
this.mode = TypeInferenceMode.NTI_ONLY;
testError(
LINE_JOINER.join(
"/** @constructor */ function Foo() {}",
"/** @type {!Foo} */ Foo.prototype = new Foo();",
"/** @const */ Foo.prop = 1;",
"Foo.prop = 2;"),
CONST_PROPERTY_REASSIGNED_VALUE);
}
}
|
test/com/google/javascript/jscomp/CheckAccessControlsTest.java
|
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PACKAGE_PROPERTY_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PRIVATE_GLOBAL_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PRIVATE_PROPERTY_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PROPERTY_OVERRIDE_IN_FILE_WITH_FILEOVERVIEW_VISIBILITY;
import static com.google.javascript.jscomp.CheckAccessControls.BAD_PROTECTED_PROPERTY_ACCESS;
import static com.google.javascript.jscomp.CheckAccessControls.CONST_PROPERTY_DELETED;
import static com.google.javascript.jscomp.CheckAccessControls.CONST_PROPERTY_REASSIGNED_VALUE;
import static com.google.javascript.jscomp.CheckAccessControls.CONVENTION_MISMATCH;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_CLASS;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_CLASS_REASON;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_NAME;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_NAME_REASON;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_PROP;
import static com.google.javascript.jscomp.CheckAccessControls.DEPRECATED_PROP_REASON;
import static com.google.javascript.jscomp.CheckAccessControls.EXTEND_FINAL_CLASS;
import static com.google.javascript.jscomp.CheckAccessControls.PRIVATE_OVERRIDE;
import static com.google.javascript.jscomp.CheckAccessControls.VISIBILITY_MISMATCH;
import com.google.common.collect.ImmutableList;
/**
* Tests for {@link CheckAccessControls}.
*
* @author nicksantos@google.com (Nick Santos)
*/
public final class CheckAccessControlsTest extends TypeICompilerTestCase {
private static final DiagnosticGroup NTI_CONST =
new DiagnosticGroup(
GlobalTypeInfo.CONST_WITHOUT_INITIALIZER,
GlobalTypeInfo.COULD_NOT_INFER_CONST_TYPE,
GlobalTypeInfo.MISPLACED_CONST_ANNOTATION,
NewTypeInference.CONST_REASSIGNED,
NewTypeInference.CONST_PROPERTY_REASSIGNED,
NewTypeInference.CONST_PROPERTY_DELETED);
public CheckAccessControlsTest() {
super(CompilerTypeTestCase.DEFAULT_EXTERNS);
parseTypeInfo = true;
enableClosurePass();
enableRewriteClosureCode();
}
@Override
protected int getNumRepetitions() {
return 1;
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
return new CheckAccessControls(compiler, true);
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setWarningLevel(DiagnosticGroups.ACCESS_CONTROLS, CheckLevel.ERROR);
options.setWarningLevel(DiagnosticGroups.CONSTANT_PROPERTY, CheckLevel.ERROR);
// Disable NTI's native const checks so as to suppress duplicate warnings that
// prevent us from testing the const checks of CheckAccessControls itself.
options.setWarningLevel(NTI_CONST, CheckLevel.OFF);
return options;
}
private void testDepName(String js, String errorMessage) {
test(js, null, DEPRECATED_NAME_REASON, null, errorMessage);
}
private void testDepProp(String js, String errorMessage) {
test(js, null, DEPRECATED_PROP_REASON, null, errorMessage);
}
private void testDepClass(String js, String errorMessage) {
test(js, null, DEPRECATED_CLASS_REASON, null, errorMessage);
}
public void testDeprecatedFunctionNoReason() {
testError("/** @deprecated */ function f() {} function g() { f(); }", DEPRECATED_NAME);
}
public void testDeprecatedFunction() {
testDepName(
"/** @deprecated Some Reason */ function f() {} function g() { f(); }",
"Variable f has been deprecated: Some Reason");
}
public void testWarningOnDeprecatedConstVariable() {
testDepName(
"/** @deprecated Another reason */ var f = 4; function g() { alert(f); }",
"Variable f has been deprecated: Another reason");
}
public void testThatNumbersArentDeprecated() {
testSame("/** @deprecated */ var f = 4; var h = 3; function g() { alert(h); }");
}
public void testDeprecatedFunctionVariable() {
testDepName(
"/** @deprecated I like g... */ var f = function() {}; function g() { f(); }",
"Variable f has been deprecated: I like g...");
}
public void testNoWarningInGlobalScope() {
testSame("var goog = {}; goog.makeSingleton = function(x) {};"
+ "/** @deprecated */ function f() {} goog.makeSingleton(f);");
}
public void testNoWarningInGlobalScopeForCall() {
testDepName(
"/** @deprecated Some global scope */ function f() {} f();",
"Variable f has been deprecated: Some global scope");
}
public void testNoWarningInDeprecatedFunction() {
testSame("/** @deprecated */ function f() {} /** @deprecated */ function g() { f(); }");
}
public void testWarningInNormalClass() {
testDepName(
"/** @deprecated FooBar */ function f() {}"
+ "/** @constructor */ var Foo = function() {}; "
+ "Foo.prototype.bar = function() { f(); }",
"Variable f has been deprecated: FooBar");
}
public void testWarningForProperty1() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated A property is bad */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert((new Foo()).bar); };",
"Property bar of type Foo has been deprecated: A property is bad");
}
public void testWarningForProperty2() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated Zee prop, it is deprecated! */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert(this.bar); };",
"Property bar of type Foo has been deprecated: Zee prop, it is deprecated!");
}
public void testWarningForDeprecatedClass() {
testDepClass(
"/** @constructor \n* @deprecated Use the class 'Bar' */ function Foo() {} "
+ "function f() { new Foo(); }",
"Class Foo has been deprecated: Use the class 'Bar'");
}
public void testWarningForDeprecatedClassNoReason() {
testError(
"/** @constructor \n* @deprecated */ function Foo() {} " + "function f() { new Foo(); }",
DEPRECATED_CLASS);
}
public void testNoWarningForDeprecatedClassInstance() {
testSame("/** @constructor \n * @deprecated */ function Foo() {} "
+ "/** @param {Foo} x */ function f(x) { return x; }");
}
public void testWarningForDeprecatedSuperClass() {
testDepClass(
"/** @constructor \n * @deprecated Superclass to the rescue! */ function Foo() {} "
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "function f() { new SubFoo(); }",
"Class SubFoo has been deprecated: Superclass to the rescue!");
}
public void testWarningForDeprecatedSuperClass2() {
testDepClass(
"/** @constructor \n * @deprecated Its only weakness is Kryptoclass */ function Foo() {} "
+ "/** @const */ var namespace = {}; "
+ "/** @constructor \n * @extends {Foo} */ "
+ "namespace.SubFoo = function() {}; "
+ "function f() { new namespace.SubFoo(); }",
"Class namespace.SubFoo has been deprecated: Its only weakness is Kryptoclass");
}
public void testWarningForPrototypeProperty() {
// TODO(aravindpg): in NTI the string representation of prototype object types is less than
// ideal due to the way NTI represents them. Fix if possible.
String js =
"/** @constructor */ function Foo() {}"
+ "/** @deprecated It is now in production, use that model... */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert(Foo.prototype.bar); };";
this.mode = TypeInferenceMode.OTI_ONLY;
testDepProp(
js,
"Property bar of type Foo.prototype has been deprecated:"
+ " It is now in production, use that model...");
this.mode = TypeInferenceMode.NTI_ONLY;
testDepProp(
js,
"Property bar of type Object{bar:?, baz:function(this:Foo):?} has been deprecated:"
+ " It is now in production, use that model...");
}
public void testNoWarningForNumbers() {
testSame("/** @constructor */ function Foo() {}"
+ "/** @deprecated */ Foo.prototype.bar = 3;"
+ "Foo.prototype.baz = function() { alert(3); };");
}
public void testWarningForMethod1() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated There is a madness to this method */"
+ "Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); };",
"Property bar of type Foo has been deprecated: There is a madness to this method");
}
public void testWarningForMethod2() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated Stop the ringing! */ Foo.prototype.bar;"
+ "Foo.prototype.baz = function() { this.bar(); };",
"Property bar of type Foo has been deprecated: Stop the ringing!");
}
public void testNoWarningInDeprecatedClass() {
testSame("/** @deprecated */ function f() {} "
+ "/** @constructor \n * @deprecated */ "
+ "var Foo = function() {}; "
+ "Foo.prototype.bar = function() { f(); }");
}
public void testNoWarningOnDeclaration() {
testSame("/** @constructor */ function F() {\n"
+ " /**\n"
+ " * @type {number}\n"
+ " * @deprecated Use something else.\n"
+ " */\n"
+ " this.code;\n"
+ "}");
}
public void testNoWarningInDeprecatedClass2() {
testSame("/** @deprecated */ function f() {} "
+ "/** @constructor \n * @deprecated */ "
+ "var Foo = function() {}; "
+ "Foo.bar = function() { f(); }");
}
public void testNoWarningInDeprecatedStaticMethod() {
testSame("/** @deprecated */ function f() {} "
+ "/** @constructor */ "
+ "var Foo = function() {}; "
+ "/** @deprecated */ Foo.bar = function() { f(); }");
}
public void testWarningInStaticMethod() {
testDepName(
"/** @deprecated crazy! */ function f() {} "
+ "/** @constructor */ "
+ "var Foo = function() {}; "
+ "Foo.bar = function() { f(); }",
"Variable f has been deprecated: crazy!");
}
public void testDeprecatedObjLitKey() {
testDepProp(
"/** @const */ var f = {};"
+ "/** @deprecated It is literally not used anymore */ f.foo = 3;"
+ "function g() { return f.foo; }",
"Property foo of type f has been deprecated: It is literally not used anymore");
}
public void testWarningForSubclassMethod() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "/** @deprecated I have a parent class! */ SubFoo.prototype.bar = function() {};"
+ "function f() { (new SubFoo()).bar(); };",
"Property bar of type SubFoo has been deprecated: I have a parent class!");
}
public void testWarningForSuperClassWithDeprecatedSubclassMethod() {
testSame("/** @constructor */ function Foo() {}"
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "/** @deprecated \n * @override */ SubFoo.prototype.bar = "
+ "function() {};"
+ "function f() { (new Foo()).bar(); };");
}
public void testWarningForSuperclassMethod() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated I have a child class! */ Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "SubFoo.prototype.bar = function() {};"
+ "function f() { (new SubFoo()).bar(); };",
"Property bar of type SubFoo has been deprecated: I have a child class!");
}
public void testWarningForSuperclassMethod2() {
testDepProp(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated I have another child class... \n* @protected */"
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ function SubFoo() {}"
+ "/** @protected */SubFoo.prototype.bar = function() {};"
+ "function f() { (new SubFoo()).bar(); };",
"Property bar of type SubFoo has been deprecated: I have another child class...");
}
public void testWarningForBind() {
// NTI reports NTI_REDCLARED_PROPERTY here, which is as intended. If this were a new
// property and not the existing `bind`, then we'd report the deprecation warning as expected
// (see testAutoboxedDeprecatedProperty and testAutoboxedPrivateProperty).
this.mode = TypeInferenceMode.OTI_ONLY;
testDepProp(
"/** @deprecated I'm bound to this method... */ Function.prototype.bind = function() {};"
+ "(function() {}).bind();",
"Property bind of type function has been deprecated: I'm bound to this method...");
}
public void testWarningForDeprecatedClassInGlobalScope() {
testDepClass(
"/** @constructor \n * @deprecated I'm a very worldly object! */ var Foo = function() {};"
+ "new Foo();",
"Class Foo has been deprecated: I'm a very worldly object!");
}
public void testNoWarningForPrototypeCopying() {
testSame("/** @constructor */ var Foo = function() {};"
+ "Foo.prototype.bar = function() {};"
+ "/** @deprecated */ Foo.prototype.baz = Foo.prototype.bar;"
+ "(new Foo()).bar();");
}
public void testNoWarningOnDeprecatedPrototype() {
// This used to cause an NPE.
testSame("/** @constructor */ var Foo = function() {};"
+ "/** @deprecated */ Foo.prototype = {};"
+ "Foo.prototype.bar = function() {};");
}
public void testPrivateAccessForNames() {
testSame("/** @private */ function foo_() {}; foo_();");
testError(new String[] {"/** @private */ function foo_() {};", "foo_();"},
BAD_PRIVATE_GLOBAL_ACCESS);
}
public void testPrivateAccessForNames2() {
// Private by convention
testSame("function foo_() {}; foo_();");
testError(new String[] {"function foo_() {};", "foo_();"}, BAD_PRIVATE_GLOBAL_ACCESS);
}
public void testPrivateAccessForProperties1() {
testSame("/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); }; (new Foo).bar_();");
}
public void testPrivateAccessForProperties2() {
testSame(new String[] {
"/** @constructor */ function Foo() {}",
"/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); }; (new Foo).bar_();"});
}
public void testPrivateAccessForProperties3() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {}; (new Foo).bar_();",
"Foo.prototype.baz = function() { this.bar_(); };"});
}
public void testPrivateAccessForProperties4() {
// If a prototype property is defined via a computed access in a separate file from the
// constructor itself, then when running with NTI we fail to recognize that property as being a
// prototype property. This is enough of a corner case that we are fine with allowing it.
// If they are in the same file then things work as expected
// (see testPrivateAccessForProperties4b).
this.mode = TypeInferenceMode.OTI_ONLY;
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"Foo.prototype['baz'] = function() { (new Foo()).bar_(); };"});
}
public void testPrivateAccessForProperties4a() {
// Identical to 4 except the computed access
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"Foo.prototype.baz = function() { (new Foo()).bar_(); };"});
}
public void testPrivateAccessForProperties4b() {
testSame(
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype['baz'] = function() { (new Foo()).bar_(); };");
}
public void testPrivateAccessForProperties5() {
testError(new String[] {
"/** @constructor */\n"
+ "function Parent () {\n"
+ " /** @private */\n"
+ " this.prop = 'foo';\n"
+ "};",
"/**\n"
+ " * @constructor\n"
+ " * @extends {Parent}\n"
+ " */\n"
+ "function Child() {\n"
+ " this.prop = 'asdf';\n"
+ "}\n"
+ "Child.prototype = new Parent();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testPrivateAccess_googModule() {
test(
new String[] {
LINE_JOINER.join(
"goog.module('example.one');",
"/** @constructor */ function C() {};",
"/** @private */ C.prototype.m = function() {};",
"exports = C;"),
LINE_JOINER.join(
"goog.module('example.two');",
"var one = goog.require('example.one');",
"(new one()).m();"),
},
null,
BAD_PRIVATE_PROPERTY_ACCESS,
null,
// TODO(tbreisacher): The type name in the error message should be "example.one" instead of
// module$exports$example$one
"Access to private property m of module$exports$example$one not allowed here.");
}
public void testNoPrivateAccessForProperties1() {
testError(new String[] {
"/** @constructor */ function Foo() {} (new Foo).bar_();",
"/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); };"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties2() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "Foo.prototype.baz = function() { this.bar_(); };",
"(new Foo).bar_();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties3() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor */ function OtherFoo() { (new Foo).bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties4() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() { this.bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties5() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.baz = function() { this.bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties6() {
// Overriding a private property with a non-private property
// in a different file causes problems.
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar_ = function() {};"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties6a() {
// Same as above, except with namespaced constructors
testError(new String[] {
"/** @const */ var ns = {};"
+ "/** @constructor */ ns.Foo = function() {}; "
+ "/** @private */ ns.Foo.prototype.bar_ = function() {};",
"/** @constructor \n * @extends {ns.Foo} */ "
+ "ns.SubFoo = function() {};"
+ "ns.SubFoo.prototype.bar_ = function() {};"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties7() {
// It's OK to override a private property with a non-private property
// in the same file, but you'll get yelled at when you try to use it.
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @private */ Foo.prototype.bar_ = function() {};"
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar_ = function() {};",
"SubFoo.prototype.baz = function() { this.bar_(); }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties8() {
testError(new String[] {
"/** @constructor */ function Foo() { /** @private */ this.bar_ = 3; }",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() { /** @private */ this.bar_ = 3; };"},
PRIVATE_OVERRIDE);
}
public void testNoPrivateAccessForProperties9() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ bar_: 3"
+ "}",
"new Foo().bar_;"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties10() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ bar_: function() {}"
+ "}",
"new Foo().bar_();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties11() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ get bar_() { return 1; }"
+ "}",
"var a = new Foo().bar_;"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForProperties12() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @private */ set bar_(x) { this.barValue = x; }"
+ "}",
"new Foo().bar_ = 1;"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testNoPrivateAccessForNamespaces() {
testError(new String[] {
"/** @const */ var foo = {};\n"
+ "/** @private */ foo.bar_ = function() {};",
"foo.bar_();"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testProtectedAccessForProperties1() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"Foo.prototype.baz = function() { this.bar(); };"});
}
public void testProtectedAccessForProperties2() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"/** @constructor \n * @extends {Foo} */"
+ "function SubFoo() { this.bar(); }"});
}
public void testProtectedAccessForProperties3() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"/** @constructor \n * @extends {Foo} */"
+ "function SubFoo() { }"
+ "SubFoo.baz = function() { (new Foo).bar(); }"});
}
public void testProtectedAccessForProperties4() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.bar = function() {};",
"/** @constructor \n * @extends {Foo} */"
+ "function SubFoo() { Foo.bar(); }"});
}
public void testProtectedAccessForProperties5() {
testSame(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @protected */ Foo.prototype.bar = function() {};"
+ "(new Foo).bar();",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() { this.bar(); }"});
}
public void testProtectedAccessForProperties6() {
testSame(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor */ goog.Foo = function() {};"
+ "/** @protected */ goog.Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {goog.Foo} */"
+ "goog.SubFoo = function() { this.bar(); };"});
}
public void testProtectedAccessForProperties7() {
testSame(new String[] {
"/** @constructor */ var Foo = function() {};"
+ "Foo.prototype = { /** @protected */ bar: function() {} }",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() { this.bar(); };"
+ "SubFoo.prototype = { moo: function() { this.bar(); }};"});
}
public void testProtectedAccessForProperties8() {
testSame(new String[] {
"/** @constructor */ var Foo = function() {};"
+ "Foo.prototype = { /** @protected */ bar: function() {} }",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() {};"
+ "SubFoo.prototype = { get moo() { this.bar(); }};"});
}
public void testProtectedAccessForProperties9() {
testSame(new String[] {
"/** @constructor */ var Foo = function() {};"
+ "Foo.prototype = { /** @protected */ bar: function() {} }",
"/** @constructor \n * @extends {Foo} */"
+ "var SubFoo = function() {};"
+ "SubFoo.prototype = { set moo(val) { this.x = this.bar(); }};"});
}
public void testProtectedAccessForProperties10() {
// NTI throws NTI_CTOR_IN_DIFFERENT_SCOPE
testSame(ImmutableList.of(
SourceFile.fromCode(
"foo.js",
"/** @constructor */ var Foo = function() {};"
+ "/** @protected */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
"sub_foo.js",
"/** @constructor @extends {Foo} */"
+ "var SubFoo = function() {};"
+ "(/** @suppress {newCheckTypes} */ function() {"
+ "SubFoo.prototype.baz = function() { this.bar(); }"
+ "})();")));
}
public void testProtectedAccessForProperties11() {
test(ImmutableList.of(
SourceFile.fromCode(
"foo.js",
LINE_JOINER.join(
"goog.provide('Foo');",
"/** @interface */ Foo = function() {};",
"/** @protected */ Foo.prop = {};")),
SourceFile.fromCode(
"bar.js",
LINE_JOINER.join(
"goog.require('Foo');",
"/** @constructor @implements {Foo} */",
"function Bar() { Foo.prop; };"))),
null, null);
}
public void testProtectedAccessForProperties12() {
test(ImmutableList.of(
SourceFile.fromCode(
"a.js",
LINE_JOINER.join(
"goog.provide('A');",
"/** @constructor */",
"var A = function() {",
" /**",
" * @type {?String}",
" * @protected",
" */",
" this.prop;",
"}")),
SourceFile.fromCode(
"b.js",
LINE_JOINER.join(
"goog.require('A');",
"/**",
" * @constructor",
" * @extends {A}",
" */",
"var B = function() {",
" this.prop.length;",
" this.prop.length;",
"};"))),
null, null);
}
// FYI: Java warns for the b1.method access in c.js.
// Instead of following that in NTI, we chose to follow the behavior of
// the old JSCompiler type checker, to make migration easier.
public void testProtectedAccessForProperties13() {
test(ImmutableList.of(
SourceFile.fromCode(
"a.js",
LINE_JOINER.join(
"goog.provide('A');",
"/** @constructor */",
"var A = function() {}",
"/** @protected */",
"A.prototype.method = function() {};")),
SourceFile.fromCode(
"b1.js",
LINE_JOINER.join(
"goog.require('A');",
"goog.provide('B1');",
"/** @constructor @extends {A} */",
"var B1 = function() {};",
"/** @override */",
"B1.prototype.method = function() {};")),
SourceFile.fromCode(
"b2.js",
LINE_JOINER.join(
"goog.require('A');",
"goog.provide('B2');",
"/** @constructor @extends {A} */",
"var B2 = function() {};",
"/** @override */",
"B2.prototype.method = function() {};")),
SourceFile.fromCode(
"c.js",
LINE_JOINER.join(
"goog.require('B1');",
"goog.require('B2');",
"/**",
" * @param {!B1} b1",
" * @constructor",
" * @extends {B2}",
" */",
"var C = function(b1) {",
" var x = b1.method();",
"};"))),
null, null);
}
public void testNoProtectedAccessForProperties1() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"(new Foo).bar();"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties2() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"/** @constructor */ function OtherFoo() { (new Foo).bar(); }"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties3() {
testError(new String[] {
"/** @constructor */ function Foo() {} "
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubberFoo() { (new SubFoo).bar(); }"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties4() {
testError(new String[] {
"/** @constructor */ function Foo() { (new SubFoo).bar(); } ",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};",
},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties5() {
testError(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor */ goog.Foo = function() {};"
+ "/** @protected */ goog.Foo.prototype.bar = function() {};",
"/** @constructor */"
+ "goog.NotASubFoo = function() { (new goog.Foo).bar(); };"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties6() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @protected */ bar: 3"
+ "}",
"new Foo().bar;"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testNoProtectedAccessForProperties7() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = {"
+ "/** @protected */ bar: function() {}"
+ "}",
"new Foo().bar();"},
BAD_PROTECTED_PROPERTY_ACCESS);
}
public void testPackagePrivateAccessForNames() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */\n"
+ "function Parent() {\n"
+ "/** @package */\n"
+ "this.prop = 'foo';\n"
+ "}\n;"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/**"
+ " * @constructor\n"
+ " * @extends {Parent}\n"
+ " */\n"
+ "function Child() {\n"
+ " this.prop = 'asdf';\n"
+ "}\n"
+ "Child.prototype = new Parent();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testPackagePrivateAccessForProperties1() {
testSame("/** @constructor */ function Foo() {}"
+ "/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); }; (new Foo).bar();");
}
public void testPackagePrivateAccessForProperties2() {
testSame(ImmutableList.of(
SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {}"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); }; (new Foo).bar();")));
}
public void testPackagePrivateAccessForProperties3() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {}"
+ "/** @package */ Foo.prototype.bar = function() {}; (new Foo).bar();"),
SourceFile.fromCode(Compiler.joinPathParts("foo", "baz.js"),
"Foo.prototype.baz = function() { this.bar(); };")));
}
public void testPackagePrivateAccessForProperties4() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {}"
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("foo", "baz.js"),
"Foo.prototype['baz'] = function() { (new Foo()).bar(); };")));
}
public void testPackagePrivateAccessForProperties5() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */\n"
+ "function Parent () {\n"
+ " /** @package */\n"
+ " this.prop = 'foo';\n"
+ "};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/**\n"
+ " * @constructor\n"
+ " * @extends {Parent}\n"
+ " */\n"
+ "function Child() {\n"
+ " this.prop = 'asdf';\n"
+ "}\n"
+ "Child.prototype = new Parent();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties1() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} (new Foo).bar();"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); };")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties2() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"
+ "Foo.prototype.baz = function() { this.bar(); };"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "(new Foo).bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties3() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor */ function OtherFoo() { (new Foo).bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties4() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() { this.bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForNamespaces() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @const */ var foo = {};\n"
+ "/** @package */ foo.bar = function() {};"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties5() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.baz = function() { this.bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties6() {
// Overriding a private property with a non-package-private property
// in a different file causes problems.
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar = function() {};")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoPackagePrivateAccessForProperties7() {
// It's OK to override a package-private property with a
// non-package-private property in the same file, but you'll get
// yelled at when you try to use it.
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/** @constructor */ function Foo() {} "
+ "/** @package */ Foo.prototype.bar = function() {};"
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "SubFoo.prototype.bar = function() {};"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"SubFoo.prototype.baz = function() { this.bar(); }")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void
testOverrideWithoutVisibilityRedeclInFileWithFileOverviewVisibilityNotAllowed_OneFile() {
testError(
"/**\n"
+ "* @fileoverview\n"
+ "* @package\n"
+ "*/\n"
+ "/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n",
BAD_PROPERTY_OVERRIDE_IN_FILE_WITH_FILEOVERVIEW_VISIBILITY);
}
public void testNamespacedFunctionDoesNotNeedVisibilityRedeclInFileWithFileOverviewVisibility() {
testSame("/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @return {string} */\n"
+ "foo.bar = function() { return 'asdf'; };");
}
public void
testOverrideWithoutVisibilityRedeclInFileWithFileOverviewVisibilityNotAllowed_TwoFiles() {
testError(new String[] {
"/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @protected */\n"
+ "Foo.prototype.protectedMethod = function() {};\n",
" /**\n"
+ "* @fileoverview \n"
+ "* @package\n"
+ "*/\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.protectedMethod = function() {};\n"},
BAD_PROPERTY_OVERRIDE_IN_FILE_WITH_FILEOVERVIEW_VISIBILITY);
}
public void testOverrideWithoutVisibilityRedeclInFileWithNoFileOverviewOk() {
testSame("/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n");
}
public void testOverrideWithoutVisibilityRedeclInFileWithNoFileOverviewVisibilityOk() {
testSame("/**\n"
+ " * @fileoverview\n"
+ " */\n"
+ "/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n");
}
public void testOverrideWithVisibilityRedeclInFileWithFileOverviewVisibilityOk_OneFile() {
testSame("/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @private */\n"
+ "Foo.prototype.privateMethod_ = function() {};\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override @private */\n"
+ "Bar.prototype.privateMethod_ = function() {};\n");
}
public void testOverrideWithVisibilityRedeclInFileWithFileOverviewVisibilityOk_TwoFiles() {
testSame(new String[] {
"/** @struct @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @protected */\n"
+ "Foo.prototype.protectedMethod = function() {};\n",
" /**\n"
+ "* @fileoverview\n"
+ "* @package\n"
+ "*/\n"
+ "/** @struct @constructor @extends {Foo} */\n"
+ "Bar = function() {};\n"
+ "/** @override @protected */\n"
+ "Bar.prototype.protectedMethod = function() {};\n"});
}
public void testPublicFileOverviewVisibilityDoesNotApplyToNameWithExplicitPackageVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "/** @constructor @package */ function Foo() {};"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "new Foo();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testPackageFileOverviewVisibilityDoesNotApplyToNameWithExplicitPublicVisibility() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor @public */ function Foo() {};"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "new Foo();")));
}
public void testPackageFileOverviewVisibilityAppliesToNameWithoutExplicitVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "var Foo = function() {};\n"),
SourceFile.fromCode(Compiler.joinPathParts("baz", "quux.js"), "new Foo();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void
testPackageFileOverviewVisibilityDoesNotApplyToPropertyWithExplicitPublicVisibility() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @public */\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")));
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace1() {
// Don't compare the generated JsDoc. It includes annotations we're not interested in,
// like @inherited.
compareJsDoc = false;
test(
ImmutableList.of(
SourceFile.fromCode("foo.js", "goog.provide('foo');"),
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('foo.bar');"),
SourceFile.fromCode("bar.js", "goog.require('foo')")),
ImmutableList.of(SourceFile.fromCode("foo.js", "var foo={};"),
SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"), "foo.bar={};"),
SourceFile.fromCode("bar.js", "")),
null, null);
compareJsDoc = true;
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace2() {
// Don't compare the generated JsDoc. It includes annotations we're not interested in,
// like @inherited.
compareJsDoc = false;
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('foo.bar');"),
SourceFile.fromCode("foo.js", "goog.provide('foo');"),
SourceFile.fromCode(
"bar.js",
"goog.require('foo');\n"
+ "var x = foo;")),
ImmutableList.of(SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"),
"var foo={};foo.bar={};"),
SourceFile.fromCode("foo.js", ""), SourceFile.fromCode("bar.js", "var x=foo")),
null, null);
compareJsDoc = true;
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace3() {
// Don't compare the generated JsDoc. It includes annotations we're not interested in,
// like @inherited.
compareJsDoc = false;
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('one.two');\n"
+ "one.two.three = function(){};"),
SourceFile.fromCode(
"baz.js",
"goog.require('one.two');\n"
+ "var x = one.two;")),
ImmutableList.of(
SourceFile.fromCode(Compiler.joinPathParts("foo", "bar.js"),
"var one={};one.two={};one.two.three=function(){};"),
SourceFile.fromCode("baz.js", "var x=one.two")),
null, null);
compareJsDoc = true;
}
public void testFileoverviewVisibilityDoesNotApplyToGoogProvidedNamespace4() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "goog.provide('one.two');\n"
+ "one.two.three = function(){};"),
SourceFile.fromCode(
"baz.js",
"goog.require('one.two');\n"
+ "var x = one.two.three();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void
testPublicFileOverviewVisibilityDoesNotApplyToPropertyWithExplicitPackageVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "/** @package */\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testPublicFileOverviewVisibilityAppliesToPropertyWithoutExplicitVisibility() {
testSame(ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")));
}
public void testPackageFileOverviewVisibilityAppliesToPropertyWithoutExplicitVisibility() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"var foo = new Foo();\n"
+ "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testFileOverviewVisibilityComesFromDeclarationFileNotUseFile() {
test(
ImmutableList.of(
SourceFile.fromCode(
Compiler.joinPathParts("foo", "bar.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @package\n"
+ " */\n"
+ "/** @constructor */\n"
+ "Foo = function() {};\n"
+ "Foo.prototype.bar = function() {};\n"),
SourceFile.fromCode(
Compiler.joinPathParts("baz", "quux.js"),
"/**\n"
+ " * @fileoverview\n"
+ " * @public\n"
+ " */\n"
+ "var foo = new Foo();\n"
+ "foo.bar();")),
null, BAD_PACKAGE_PROPERTY_ACCESS);
}
public void testNoExceptionsWithBadConstructors1() {
testSame(new String[] {"function Foo() { (new SubFoo).bar(); } "
+ "/** @constructor */ function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};"});
}
public void testNoExceptionsWithBadConstructors2() {
testSame(new String[] {"/** @constructor */ function Foo() {} "
+ "Foo.prototype.bar = function() {};"
+ "/** @constructor */"
+ "function SubFoo() {}"
+ "/** @protected */ "
+ "SubFoo.prototype.bar = function() { (new Foo).bar(); };"});
}
public void testGoodOverrideOfProtectedProperty() {
testSame(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @inheritDoc */ SubFoo.prototype.bar = function() {};",
});
}
public void testBadOverrideOfProtectedProperty() {
testError(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @protected */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @private */ SubFoo.prototype.bar = function() {};",
},
VISIBILITY_MISMATCH);
}
public void testBadOverrideOfPrivateProperty() {
testError(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @private */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @protected */ SubFoo.prototype.bar = function() {};",
},
PRIVATE_OVERRIDE);
testSame(new String[] {
"/** @constructor */ function Foo() { } "
+ "/** @private */ Foo.prototype.bar = function() {};",
"/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {}"
+ "/** @override \n *@suppress{visibility} */\n"
+ " SubFoo.prototype.bar = function() {};",
});
}
public void testAccessOfStaticMethodOnPrivateConstructor() {
testSame(new String[] {
"/** @constructor \n * @private */ function Foo() { } "
+ "Foo.create = function() { return new Foo(); };",
"Foo.create()",
});
}
public void testAccessOfStaticMethodOnPrivateQualifiedConstructor() {
testSame(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor \n * @private */ goog.Foo = function() { }; "
+ "goog.Foo.create = function() { return new goog.Foo(); };",
"goog.Foo.create()",
});
}
public void testInstanceofOfPrivateConstructor() {
testSame(new String[] {
"/** @const */ var goog = {};"
+ "/** @constructor \n * @private */ goog.Foo = function() { }; "
+ "goog.Foo.create = function() { return new goog.Foo(); };",
"goog instanceof goog.Foo",
});
}
public void testOkAssignmentOfDeprecatedProperty() {
testSame("/** @constructor */ function Foo() {"
+ " /** @deprecated */ this.bar = 3;"
+ "}");
}
public void testBadReadOfDeprecatedProperty() {
testDepProp(
"/** @constructor */ function Foo() {"
+ " /** @deprecated GRR */ this.bar = 3;"
+ " this.baz = this.bar;"
+ "}",
"Property bar of type Foo has been deprecated: GRR");
}
public void testAutoboxedDeprecatedProperty() {
test(DEFAULT_EXTERNS,
"/** @deprecated %s */ String.prototype.prop;"
+ "function f() { return 'x'.prop; }",
(String) null, DEPRECATED_PROP_REASON, null);
}
public void testAutoboxedPrivateProperty() {
test(
// externs
DEFAULT_EXTERNS + "/** @private */ String.prototype.prop;",
"function f() { return 'x'.prop; }",
(String) null, // no output
BAD_PRIVATE_PROPERTY_ACCESS, null);
}
public void testNullableDeprecatedProperty() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @deprecated */ Foo.prototype.length;"
+ "/** @param {?Foo} x */ function f(x) { return x.length; }",
DEPRECATED_PROP);
}
public void testNullablePrivateProperty() {
testError(new String[] {
"/** @constructor */ function Foo() {}"
+ "/** @private */ Foo.prototype.length;",
"/** @param {?Foo} x */ function f(x) { return x.length; }"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testPrivatePropertyByConvention1() {
testError(new String[] {
"/** @constructor */ function Foo() {}\n"
+ "/** @type {number} */ Foo.prototype.length_;\n",
"/** @param {?Foo} x */ function f(x) { return x.length_; }\n"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testPrivatePropertyByConvention2() {
testError(new String[] {
"/** @constructor */ function Foo() {\n"
+ " /** @type {number} */ this.length_ = 1;\n"
+ "}\n"
+ "/** @type {number} */ Foo.prototype.length_;\n",
"/** @param {Foo} x */ function f(x) { return x.length_; }\n"},
BAD_PRIVATE_PROPERTY_ACCESS);
}
public void testDeclarationAndConventionConflict1() {
testError(
"/** @constructor */ function Foo() {} /** @protected */ Foo.prototype.length_;",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict2() {
testError(
"/** @constructor */ function Foo() {}\n"
+ "/** @public {number} */ Foo.prototype.length_;\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict3() {
testError(
"/** @constructor */ function Foo() { /** @protected */ this.length_ = 1;\n}\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict4a() {
testError(
"/** @constructor */ function Foo() {}"
+ "Foo.prototype = { /** @protected */ length_: 1 }\n"
+ "new Foo().length_",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict4b() {
testError(
"/** @const */ var NS = {}; /** @constructor */ NS.Foo = function() {};"
+ "NS.Foo.prototype = { /** @protected */ length_: 1 };\n"
+ "(new NS.Foo()).length_;",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict5() {
testError(
"/** @constructor */ function Foo() {}\n"
+ "Foo.prototype = { /** @protected */ get length_() { return 1; } }\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict6() {
testError(
"/** @constructor */ function Foo() {}\n"
+ "Foo.prototype = { /** @protected */ set length_(x) { } }\n",
CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict7() {
testError("/** @public */ var Foo_;", CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict8() {
testError("/** @package */ var Foo_;", CONVENTION_MISMATCH);
}
public void testDeclarationAndConventionConflict9() {
testError("/** @protected */ var Foo_;", CONVENTION_MISMATCH);
}
public void testConstantProperty1a() {
testError(
"/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/** @constructor */ function B() {"
+ "/** @const */ this.bar = 3;this.bar += 4;}",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty1b() {
testError(
"/** @constructor */ function A() {"
+ "this.BAR = 3;}"
+ "/** @constructor */ function B() {"
+ "this.BAR = 3;this.BAR += 4;}",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty2a() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.prop = 2;"
+ "var foo = new Foo();"
+ "foo.prop = 3;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty2b() {
testError(
"/** @constructor */ function Foo() {}"
+ "Foo.prototype.PROP = 2;"
+ "var foo = new Foo();"
+ "foo.PROP = 3;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty3a() {
testSame("/** @constructor */ function Foo() {}\n"
+ "/** @type {number} */ Foo.prototype.PROP = 2;\n"
+ "/** @suppress {duplicate|const} */ Foo.prototype.PROP = 3;\n");
}
public void testConstantProperty3b() {
testSame("/** @constructor */ function Foo() {}\n"
+ "/** @const */ Foo.prototype.prop = 2;\n"
+ "/** @suppress {const} */ Foo.prototype.prop = 3;\n");
}
public void testNamespaceConstantProperty1() {
testError(
""
+ "/** @const */ var o = {};\n"
+ "/** @const */ o.x = 1;"
+ "o.x = 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testNamespaceConstantProperty2() {
// NTI requires an @const annotation on namespaces, as in testNamespaceConstantProperty1.
// This is the only difference between the two tests.
this.mode = TypeInferenceMode.OTI_ONLY;
testError(
"var o = {};\n"
+ "/** @const */ o.x = 1;\n"
+ "o.x = 2;\n",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testNamespaceConstantProperty2a() {
testSame("/** @const */ var o = {};\n"
+ "/** @const */ o.x = 1;\n"
+ "/** @const */ var o2 = {};\n"
+ "/** @const */ o2.x = 1;\n");
}
public void testNamespaceConstantProperty3() {
testError(
"/** @const */ var o = {};\n"
+ "/** @const */ o.x = 1;"
+ "o.x = 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty3a1() {
testSame("var o = { /** @const */ x: 1 };"
+ "o.x = 2;");
}
public void testConstantProperty3a2() {
// The old type checker should report this but it doesn't.
// NTI reports CONST_PROPERTY_REASSIGNED.
testSame("/** @const */ var o = { /** @const */ x: 1 };"
+ "o.x = 2;");
}
public void testConstantProperty3b1() {
// We should report this but we don't.
testSame("var o = { XYZ: 1 };"
+ "o.XYZ = 2;");
}
public void testConstantProperty3b2() {
// NTI reports NTI_REDECLARED_PROPERTY
this.mode = TypeInferenceMode.OTI_ONLY;
// The old type checker should report this but it doesn't.
testSame("/** @const */ var o = { XYZ: 1 };"
+ "o.XYZ = 2;");
}
public void testConstantProperty4() {
testError(
"/** @constructor */ function cat(name) {}"
+ "/** @const */ cat.test = 1;"
+ "cat.test *= 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty4b() {
testError(
"/** @constructor */ function cat(name) {}"
+ "cat.TEST = 1;"
+ "cat.TEST *= 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty5() {
testError(
"/** @constructor */ function Foo() { this.prop = 1;}"
+ "/** @const */ Foo.prototype.prop;"
+ "Foo.prototype.prop = 2",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty6() {
testError(
"/** @constructor */ function Foo() { this.prop = 1;}"
+ "/** @const */ Foo.prototype.prop = 2;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty7() {
testSame("/** @constructor */ function Foo() {} "
+ "Foo.prototype.bar_ = function() {};"
+ "/** @constructor \n * @extends {Foo} */ "
+ "function SubFoo() {};"
+ "/** @const */ /** @override */ SubFoo.prototype.bar_ = function() {};"
+ "SubFoo.prototype.baz = function() { this.bar_(); }");
}
public void testConstantProperty8() {
testSame("/** @const */ var o = { /** @const */ x: 1 };"
+ "var y = o.x;");
}
public void testConstantProperty9() {
testSame("/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/** @constructor */ function B() {"
+ "this.bar = 4;}");
}
public void testConstantProperty10a() {
testSame("/** @constructor */ function Foo() { this.prop = 1;}"
+ "/** @const */ Foo.prototype.prop;");
}
public void testConstantProperty10b() {
// NTI reports NTI_REDECLARED_PROPERTY
this.mode = TypeInferenceMode.OTI_ONLY;
testSame("/** @constructor */ function Foo() { this.PROP = 1;}"
+ "Foo.prototype.PROP;");
}
public void testConstantProperty11() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.bar;"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo() { this.bar = 5; this.bar = 6; }",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty12() {
// NTI deliberately disallows this pattern (separate declaration and initialization
// of const properties). (b/30205953)
this.mode = TypeInferenceMode.OTI_ONLY;
testSame("/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.bar;"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo() { this.bar = 5; }"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo2() { this.bar = 5; }");
}
public void testConstantProperty13() {
testError(
"/** @constructor */ function Foo() {}"
+ "/** @const */ Foo.prototype.bar;"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {Foo}\n"
+ " */ function SubFoo() { this.bar = 5; }"
+ "/**\n"
+ " * @constructor\n"
+ " * @extends {SubFoo}\n"
+ " */ function SubSubFoo() { this.bar = 5; }",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty14() {
testError(
"/** @constructor */ function Foo() {"
+ "/** @const */ this.bar = 3; delete this.bar; }",
CONST_PROPERTY_DELETED);
}
public void testConstantPropertyInExterns() {
String externs =
DEFAULT_EXTERNS
+ "/** @constructor */ function Foo() {};\n"
+ "/** @const */ Foo.prototype.PROP;";
String js = "var f = new Foo(); f.PROP = 1; f.PROP = 2;";
test(externs, js, (String) null, CONST_PROPERTY_REASSIGNED_VALUE, null);
}
public void testConstantProperty15() {
testSame("/** @constructor */ function Foo() {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = Foo.CONST;");
}
public void testConstantProperty15a() {
testError(
"/** @constructor */ function Foo() { this.CONST = 100; };\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = 0;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty15b() {
testError(
"/** @constructor */ function Foo() {};\n"
+ "Foo.prototype.CONST = 100;\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = 0;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty15c() {
testError(
""
+ "/** @constructor */ function Bar() {this.CONST = 100;};\n"
+ "/** @constructor \n @extends {Bar} */ function Foo() {};\n"
+ "/** @type {Foo} */\n"
+ "var foo = new Foo();\n"
+ "/** @type {number} */\n"
+ "foo.CONST = 0;",
CONST_PROPERTY_REASSIGNED_VALUE);
}
public void testConstantProperty16() {
testSame("/** @constructor */ function Foo() {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @constructor */ function Bar() {};\n"
+ "Bar.CONST = 100;\n");
}
public void testConstantProperty17() {
testSame("function Foo() {};\n"
+ "Foo.CONST = 100;\n"
+ "function Bar() {};\n"
+ "Bar.CONST = 100;\n");
}
public void testConstantProperty18() {
testSame("/** @param {string} a */\n"
+ "function Foo(a) {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @param {string} a */\n"
+ "function Bar(a) {};\n"
+ "Bar.CONST = 100;\n");
}
public void testConstantProperty19() {
testSame("/** @param {string} a */\n"
+ "function Foo(a) {};\n"
+ "Foo.CONST = 100;\n"
+ "/** @param {number} a */\n"
+ "function Bar(a) {};\n"
+ "Bar.CONST = 100;\n");
}
public void testSuppressConstantProperty() {
testSame("/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/**\n"
+ " * @suppress {constantProperty}\n"
+ " * @constructor\n"
+ " */ function B() { /** @const */ this.bar = 3; this.bar += 4; }");
}
public void testSuppressConstantProperty2() {
testSame("/** @constructor */ function A() {"
+ "/** @const */ this.bar = 3;}"
+ "/**\n"
+ " * @suppress {const}\n"
+ " * @constructor\n"
+ " */ function B() {"
+ "/** @const */ this.bar = 3;this.bar += 4;}");
}
public void testFinalClassCannotBeSubclassed() {
testError(
LINE_JOINER.join(
"/**",
" * @constructor",
" * @final",
" */ var Foo = function() {};",
"/**",
" * @constructor",
" * @extends {Foo}*",
" */ var Bar = function() {};"),
EXTEND_FINAL_CLASS);
testError(
LINE_JOINER.join(
"/**",
" * @constructor",
" * @final",
" */ function Foo() {};",
"/**",
" * @constructor",
" * @extends {Foo}*",
" */ function Bar() {};"),
EXTEND_FINAL_CLASS);
testSame(
LINE_JOINER.join(
"/**",
" * @constructor",
" * @const",
" */ var Foo = function() {};",
"/**",
" * @constructor",
" * @extends {Foo}",
" */ var Bar = function() {};"));
}
public void testCircularPrototypeLink() {
// NOTE: this does yield a useful warning, except we don't check for it in this test:
// WARNING - Cycle detected in inheritance chain of type Foo
// This warning already has a test: TypeCheckTest::testPrototypeLoop.
testError(
LINE_JOINER.join(
"/** @constructor @extends {Foo} */ function Foo() {}",
"/** @const */ Foo.prop = 1;",
"Foo.prop = 2;"),
CONST_PROPERTY_REASSIGNED_VALUE);
// In OTI this next test causes a stack overflow.
this.mode = TypeInferenceMode.NTI_ONLY;
testError(
LINE_JOINER.join(
"/** @constructor */ function Foo() {}",
"/** @type {!Foo} */ Foo.prototype = new Foo();",
"/** @const */ Foo.prop = 1;",
"Foo.prop = 2;"),
CONST_PROPERTY_REASSIGNED_VALUE);
}
}
|
In CheckAccessControlsTest, check that the error message is as expected.
This is to guard against regressions in an upcoming change which will change that message in some cases.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=138539813
|
test/com/google/javascript/jscomp/CheckAccessControlsTest.java
|
In CheckAccessControlsTest, check that the error message is as expected.
|
|
Java
|
apache-2.0
|
fb2b8780f5552175806aa1502398ee3479a8b875
| 0
|
angcyo/RLibrary,angcyo/RLibrary
|
package com.angcyo.uiview.widget;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.support.annotation.DrawableRes;
import android.support.annotation.Px;
import android.support.v4.content.ContextCompat;
import android.text.Layout;
import android.text.Selection;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.TextPaint;
import android.text.TextUtils;
import android.text.method.LinkMovementMethod;
import android.text.method.MovementMethod;
import android.text.style.CharacterStyle;
import android.text.style.ClickableSpan;
import android.text.style.ImageSpan;
import android.util.AttributeSet;
import android.util.Patterns;
import android.view.MotionEvent;
import android.view.View;
import android.widget.TextView;
import com.angcyo.library.utils.L;
import com.angcyo.uiview.R;
import com.angcyo.uiview.skin.SkinHelper;
import com.angcyo.uiview.utils.RTextPaint;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Copyright (C) 2016,深圳市红鸟网络科技股份有限公司 All rights reserved.
* 项目名称:
* 类的描述:支持显示@显示, 支持显示 带logo的网页链接, 支持显示表情, 支持折叠显示.
* 创建人员:Robi
* 创建时间:2017/04/24 15:48
* 修改人员:Robi
* 修改时间:2017/04/24 15:48
* 修改备注:
* Version: 1.0.0
*/
public class RExTextView extends RTextView {
/**
* 网址url正则
*/
public final static Pattern patternUrl = Patterns.WEB_URL;//Pattern.compile("(http|ftp|https):\\/\\/[\\w\\-_]+(\\.[\\w\\-_]+)+([\\w\\-\\.:+#]*[\\w\\-+#])?");
/**
* @成员,正则
*/
public final static Pattern patternMention = Pattern.compile("<m id='(\\d+)'>([^<>]+)</m>");
/**
* 数字正则
*/
public final static Pattern patternNumber = Pattern.compile("^\\d+$");
/**
* 电话号码正则
*/
public final static Pattern patternPhone = Pattern.compile("\\d{3}-\\d{8}|\\d{3}-\\d{7}|\\d{4}-\\d{8}|\\d{4}-\\d{7}|1+[34578]+\\d{9}|\\d{8}|\\d{7}");
protected ImageTextSpan.OnImageSpanClick mOnImageSpanClick;
private int maxShowLine = -1;//最大显示多少行, 当超过时, 会显示...全部
private String foldString;
private int mImageSpanTextColor = ImageTextSpan.getDefaultColor();
public RExTextView(Context context) {
super(context);
}
public RExTextView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RExTextView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* 判断 str是否是数字
*/
public static boolean isNumber(String str) {
Pattern compile = Pattern.compile("^\\d+$");
Matcher matcher = compile.matcher(str);
//matcher.group(matcher.groupCount())
return matcher.find();
}
public void setImageSpanTextColor(int imageSpanTextColor) {
mImageSpanTextColor = imageSpanTextColor;
}
@Override
protected void initView() {
super.initView();
foldString = getResources().getString(R.string.see_all);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
//setMovementMethod(ImageClickMethod.getInstance());
}
@Override
protected MovementMethod getDefaultMovementMethod() {
return ImageClickMethod.getInstance();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
return ImageClickMethod.isTouchInSpan;
}
/**
* 设置允许显示的最大行数
*/
public void setMaxShowLine(int maxShowLine) {
this.maxShowLine = maxShowLine;
if (maxShowLine < 0) {
setMaxLines(Integer.MAX_VALUE);
} else {
setEllipsize(TextUtils.TruncateAt.END);
setMaxLines(maxShowLine);
}
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Layout layout = getLayout();
// if (layout != null) {
// int lines = layout.getLineCount();
// L.e("call: onDraw([canvas])-> line Count:" + lines);
// if (lines > 0) {
// //返回折叠的字符数
// if (layout.getEllipsisCount(lines - 1) > 0) {
// L.e("call: onDraw([canvas])-> getEllipsisCount:");
// }
// }
// }
}
public void setOnImageSpanClick(ImageTextSpan.OnImageSpanClick onImageSpanClick) {
mOnImageSpanClick = onImageSpanClick;
}
@Override
public void setText(CharSequence text, BufferType type) {
if (isInEditMode()) {
super.setText(text, type);
return;
}
if (TextUtils.isEmpty(text)) {
super.setText(text, type);
} else {
SpannableStringBuilder spanBuilder = new SpannableStringBuilder(text);
patternUrl(spanBuilder, text);//优先匹配
patternMention(spanBuilder, text);
patternPhone(spanBuilder, text);
afterPattern(spanBuilder, text);
super.setText(spanBuilder, type);
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
Layout layout = getLayout();
if (maxShowLine > 0 && layout != null) {
int lines = layout.getLineCount();
if (lines > 0) {
if (lines > maxShowLine) {
//需要折叠
CharSequence sequence = getText();
if (sequence instanceof Spannable) {
String more = "...";
String foldString = getFoldString();
if (sequence.length() <= more.length() + foldString.length()) {
setMaxShowLine(-1);//换行字符太多的情况
return;
}
Spannable spannable = (Spannable) sequence;
int lineStart = layout.getLineStart(maxShowLine);//返回第几行的第一个字符, 在字符串中的index
int startPosition = lineStart - more.length() - foldString.length();
if (startPosition < 0) {
spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), getCurrentTextColor(), more),
lineStart - 1, lineStart, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
return;
}
int start = findStartPosition(spannable, startPosition);
int offset = more.length();//(sequence.length() % 2 == 0) ? 4 : 3;
if (!TextUtils.isEmpty(more)) {
spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), getCurrentTextColor(), more),
start, start + offset, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (!TextUtils.isEmpty(foldString)) {
spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), foldString),
start + offset, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), foldString),
// layout.getLineStart(maxShowLine - 1), layout.getLineStart(maxShowLine - 1) + foldString.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
//setMeasuredDimension(getMeasuredWidth(), (int) (getMeasuredHeight() + density() * 140));
}
}
}
}
//int lastLineHeight = getLastLineHeight();
//float descent = getPaint().descent();
//setMeasuredDimension(getMeasuredWidth(), (int) (getMeasuredHeight() + density() * 40));
}
private int getLastLineHeight() {
Layout layout = getLayout();
if (layout != null) {
int lineCount = layout.getLineCount();
if (lineCount > 0) {
//行的底部距离view顶部的高度, 最后一行的LineTop通常会等于View的height
return layout.getLineTop(lineCount) - layout.getLineTop(lineCount - 1);
}
}
return 0;
}
@Override
public boolean canScrollVertically(int direction) {
if (maxShowLine > 0) {
return false;
}
return super.canScrollVertically(direction);
}
@Override
public void scrollTo(@Px int x, @Px int y) {
if (maxShowLine > 0) {
return;
}
super.scrollTo(x, y);
}
private int findStartPosition(Spannable spannable, int startWidthPosition) {
CharacterStyle[] oldSpans = spannable.getSpans(startWidthPosition, spannable.length(), CharacterStyle.class);
int position = startWidthPosition;
for (CharacterStyle oldSpan : oldSpans) {
int spanStart = spannable.getSpanStart(oldSpan);
int spanEnd = spannable.getSpanEnd(oldSpan);
if (spanStart <= startWidthPosition && spanEnd > startWidthPosition) {
position = spanStart;
}
if (spanStart >= startWidthPosition) {
spannable.removeSpan(oldSpan);
}
}
return position;
}
private String getFoldString() {
return foldString;
}
public void setFoldString(String foldString) {
this.foldString = foldString;
}
/**
* 子类处理
*/
protected void afterPattern(SpannableStringBuilder spanBuilder, CharSequence text) {
}
/**
* 匹配Url链接
*/
protected void patternUrl(SpannableStringBuilder builder, CharSequence input) {
Matcher matcher = patternUrl.matcher(input);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
CharSequence text = matcher.group();//input.subSequence(start, end);
builder.setSpan(new ImageTextSpan(getContext(),
ImageTextSpan.initDrawable(getContext(),
R.drawable.base_link_ico, getTextSize()),
getContext().getString(R.string.url_link_tip),
text.toString())
.setOnImageSpanClick(mOnImageSpanClick)
.setTextColor(mImageSpanTextColor),
start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
/**
* 匹配@联系人
*/
protected void patternMention(SpannableStringBuilder builder, CharSequence input) {
//<m id='60763'>@爱你是一种习惯i<\/m> <m id='61145'>@爱情水深王八多<\/m> <m id='61536'>@爱苦、但亦甜<\/m>
//String p ;//"<m id='\\d+'>\\w+</m>";
Matcher matcher = patternMention.matcher(input);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
if (!isInOtherSpan(builder, input.length(), start, end)) {
builder.setSpan(new ImageTextSpan(getContext(), ImageTextSpan.initDrawable(getTextSize()), matcher.group(2), matcher.group(1))
.setOnImageSpanClick(mOnImageSpanClick)
.setTextColor(mImageSpanTextColor),
start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
/**
* 匹配 电话号码
*/
protected void patternPhone(SpannableStringBuilder builder, CharSequence input) {
Matcher matcher = patternPhone.matcher(input);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
if (!isInOtherSpan(builder, input.length(), start, end)) {
builder.setSpan(new ImageTextSpan(getContext(), ImageTextSpan.initDrawable(getTextSize()),
matcher.group(), matcher.group())
.setOnImageSpanClick(mOnImageSpanClick)
.setTextColor(mImageSpanTextColor),
start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
/**
* 判断 需要检测的开始位置,结束位置, 是否已经在其他span中
*/
private boolean isInOtherSpan(SpannableStringBuilder builder, int length, int startPosition, int endPosition) {
ImageTextSpan[] imageTextSpans = builder.getSpans(0, length, ImageTextSpan.class);
List<int[]> spanRange = new ArrayList<>();
for (ImageTextSpan span : imageTextSpans) {
int spanStart = builder.getSpanStart(span);
int spanEnd = builder.getSpanEnd(span);
spanRange.add(new int[]{spanStart, spanEnd});
}
boolean result = false;
for (int[] range : spanRange) {
if (startPosition >= range[0] && startPosition <= range[1]) {
result = true;
break;
}
if (endPosition >= range[0] && endPosition <= range[1]) {
result = true;
break;
}
}
return result;
}
/**
* 支持只显示图片, 只显示文本, 支持图片混合显示, 支持点击事件.
* 需要配合 {@link ImageClickMethod} 才能实现点击
*/
public static class ImageTextSpan extends ImageSpan {
static float downX = -1, downY = -1;
static boolean isTouchDown = false;
OnImageSpanClick mOnImageSpanClick;
private String mShowContent = "";//需要绘制的文本
private Context mContext;
private int mImageSize;//保存计算出来的图片宽度
private int space;//文本与图片之间的距离
private int textColor;//文本颜色
private Rect tempRect = new Rect();
private String url;//链接
private Rect mTextBounds;
private int mSpanWidth;
/**
* 是否可以点击
*/
private boolean canClick = true;
/**
* 是否激活点击效果
*/
private boolean enableTouchEffect = false;
/**
* 构造一个只用来显示文本的ImageSpan
*/
public ImageTextSpan(Context context, float textSize, String showContent) {
this(context, textSize, -1, showContent);
}
public ImageTextSpan(Context context, float textSize, int textColor, String showContent) {
super(initDrawable(textSize), ALIGN_BASELINE);
this.mShowContent = showContent;
init(context);
setCanClick(false);
if (textColor != -1) {
setTextColor(textColor);
}
}
public ImageTextSpan(Context context, Drawable d, String showContent, String url) {
super(d, ALIGN_BASELINE);
this.url = url;
mShowContent = showContent;
init(context);
}
public ImageTextSpan(Context context, @DrawableRes int resourceId, String show, String url) {
super(context, resourceId, ALIGN_BASELINE);
this.mShowContent = show;
this.url = url;
init(context);
}
/**
* 根据文本大小, 自动设置图片的高度
*/
public static Drawable initDrawable(Context context, @DrawableRes int resourceId, float textSize) {
Drawable drawable = ContextCompat.getDrawable(context, resourceId);
int height = drawable.getIntrinsicHeight();
int width = drawable.getIntrinsicWidth();
TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
textPaint.setTextSize(textSize);
float textHeight = textPaint.descent() - textPaint.ascent(); //(int) RTextPaint.getTextHeight(textPaint);
// if (textHeight > height) {
// int offset = textHeight - height + textPaint.getFontMetricsInt().descent / 2;
// InsetDrawable insetDrawable = new InsetDrawable(drawable, 0, offset, 0, 0);
// insetDrawable.setBounds(0, 0, width, textHeight);
// return insetDrawable;
// } else {
// drawable.setBounds(0, 0, width, height);
// return drawable;
// }
//drawable.setBounds(0, 0, width, (int) Math.max(height, textHeight));
drawable.setBounds(0, 0, width, (int) -textPaint.ascent()/*(int) Math.max(height, textHeight)*/);
return drawable;
}
/**
* 用来只显示文本的ImageSpan
*/
public static Drawable initDrawable(float textSize) {
Drawable drawable = new ColorDrawable(Color.WHITE);
TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
textPaint.setTextSize(textSize);
int textHeight = (int) (textPaint.descent());// - textPaint.ascent());//(int) RTextPaint.getTextHeight(textPaint);
//drawable.setBounds(0, 0, 1, textHeight);
drawable.setBounds(0, 0, 0, 0);
return drawable;
}
public static int getDefaultColor() {
return Color.parseColor("#507daf");
}
public ImageTextSpan setOnImageSpanClick(OnImageSpanClick onImageSpanClick) {
mOnImageSpanClick = onImageSpanClick;
if (mOnImageSpanClick == null) {
canClick = false;
} else {
canClick = true;
}
return this;
}
private void init(Context context) {
mContext = context;
space = (int) (2 * mContext.getResources().getDisplayMetrics().density);
setDefaultTextColor();
}
@Override
public int getSize(Paint paint, CharSequence text, int start, int end, Paint.FontMetricsInt fm) {
if (TextUtils.isEmpty(mShowContent)) {
mImageSize = super.getSize(paint, text, start, end, fm);
mSpanWidth = mImageSize;
return mSpanWidth;
} else {
String string = mShowContent;
mTextBounds = getTextBounds(paint, string);
mImageSize = super.getSize(paint, text, start, end, fm);
mSpanWidth = mImageSize + space + mTextBounds.width() + space;
return mSpanWidth;
}
}
@Override
public void draw(Canvas canvas, CharSequence text, int start, int end, float x, int top, int y, int bottom, Paint paint) {
if (TextUtils.isEmpty(mShowContent)) {
super.draw(canvas, text, start, end, x, top, y, bottom, paint);
} else {
tempRect.set((int) x, top, ((int) (x + mSpanWidth + space + space)), bottom);
if (isTouchDown && tempRect.contains(((int) downX), (int) downY)) {
if (enableTouchEffect) {
paint.setColor(SkinHelper.getTranColor(textColor, 0x80));
canvas.drawRect(tempRect, paint);
} else {
paint.setColor(Color.TRANSPARENT);
canvas.drawRect(tempRect, paint);
}
}
super.draw(canvas, text, start, end, x, top, y, bottom, paint);
paint.setColor(textColor);//默认是黑色
int height = bottom - top;//绘制区域的高度
String string = mShowContent;
int textHeight = (int) RTextPaint.getTextHeight(paint);
//文本在图片的中间绘制
float textY;
textY = y /*+ textHeight / 2 + height / 2 */ /*- paint.getFontMetricsInt().descent*/;
if (y == bottom) {
textY = y - paint.descent();
}
// if (paint.getFontMetricsInt().descent > 0) {
// textY = top + textHeight / 2 + height / 2 - paint.getFontMetricsInt().descent / 2;
// } else {
// textY = top + textHeight / 2 + height / 2 - paint.getFontMetricsInt().descent;
// }
if (top != y) {
canvas.drawText(string,
x + mImageSize + space,
textY,
paint);
}
}
}
public ImageTextSpan setDefaultTextColor() {
setTextColor(getDefaultColor());//默认的文本颜色
return this;
}
public ImageTextSpan setTextColor(int textColor) {
this.textColor = textColor;
return this;
}
public int getShowTextLength() {
return mShowContent.length();
}
public boolean isCanClick() {
return canClick;
}
public ImageTextSpan setCanClick(boolean canClick) {
this.canClick = canClick;
return this;
}
/**
* 单击事件
*/
public void onClick(TextView view) {
L.e("call: onClick([view])-> " + mShowContent + " : " + url);
if (mOnImageSpanClick != null) {
if (!mOnImageSpanClick.onClick(view, mShowContent, url)) {
if (patternUrl.matcher(url).matches()) {
mOnImageSpanClick.onUrlClick(view, url);
} else if (patternPhone.matcher(url).matches()) {
mOnImageSpanClick.onPhoneClick(view, url);
} else if (patternNumber.matcher(url).matches()) {
mOnImageSpanClick.onMentionClick(view, url);
}
}
}
}
public void onTouchUp(final TextView view) {
isTouchDown = false;
downX = -1;
downY = -1;
view.postInvalidate();//解决在RecyclerView中, 会出现点击背景不消失的BUG
}
public void onTouchDown(final TextView view, float x, float y) {
isTouchDown = true;
downX = x;
downY = y;
view.postDelayed(new Runnable() {
@Override
public void run() {
onTouchUp(view);
}
}, 300);//300毫秒后,自动取消
}
public void onTouchCancel(TextView view, float x, float y) {
onTouchUp(view);
}
public Rect getTextBounds(Paint paint, String text) {
tempRect.set(0, 0, 0, 0);
if (TextUtils.isEmpty(text)) {
return tempRect;
}
paint.getTextBounds(text, 0, text.length(), tempRect);
return tempRect;
}
@Override
public String getSource() {
return mShowContent;
}
public static abstract class OnImageSpanClick {
public void onUrlClick(TextView view, String url) {
}
public void onMentionClick(TextView view, String mention) {
}
public void onPhoneClick(TextView view, String phone) {
}
/**
* @return 返回false 时, {@link OnImageSpanClick#onUrlClick(TextView, String)}
* 和{@link OnImageSpanClick#onMentionClick(TextView, String)}才会回调
*/
public boolean onClick(TextView view, String showContent, String url) {
return false;
}
}
}
public static class ImageClickMethod extends LinkMovementMethod {
/**
* 是否在Span上点击了
*/
public static boolean isTouchInSpan = false;
private static ImageClickMethod sInstance;
public static ImageClickMethod getInstance() {
if (sInstance == null)
sInstance = new ImageClickMethod();
return sInstance;
}
@Override
public boolean onTouchEvent(TextView widget, Spannable buffer, MotionEvent event) {
int action = event.getAction();
if (action == MotionEvent.ACTION_UP ||
action == MotionEvent.ACTION_DOWN ||
action == MotionEvent.ACTION_MOVE ||
action == MotionEvent.ACTION_CANCEL) {
int x = (int) event.getX();
int y = (int) event.getY();
x -= widget.getTotalPaddingLeft();
y -= widget.getTotalPaddingTop();
x += widget.getScrollX();
y += widget.getScrollY();
Layout layout = widget.getLayout();
int line = layout.getLineForVertical(y);
int off = layout.getOffsetForHorizontal(line, x);
ImageTextSpan[] link = buffer.getSpans(off, off, ImageTextSpan.class);
if (link.length > 0) {
ImageTextSpan imageTextSpan = link[0];
int spanStart = buffer.getSpanStart(imageTextSpan);
int spanEnd = buffer.getSpanEnd(imageTextSpan);
int showTextLength = imageTextSpan.getShowTextLength();
int top = layout.getLineTop(line);
int bottom = layout.getLineTop(line + 1);
float left = layout.getPrimaryHorizontal(spanStart);
float right = layout.getPrimaryHorizontal(spanStart + showTextLength);
if (imageTextSpan.isCanClick() && (x >= left && x <= right) /*(off >= spanStart && off <= spanStart + showTextLength)*/) {
if (action == MotionEvent.ACTION_UP) {
imageTextSpan.onTouchUp(widget);
imageTextSpan.onClick(widget);
isTouchInSpan = false;
} else if (action == MotionEvent.ACTION_DOWN) {
isTouchInSpan = true;
imageTextSpan.onTouchDown(widget, event.getX(), event.getY());
Selection.setSelection(buffer,
spanStart,
spanEnd);
} else if (action == MotionEvent.ACTION_MOVE) {
//link[0].onTouchMove(widget, event.getX(), event.getY());
return super.onTouchEvent(widget, buffer, event);
} else if (action == MotionEvent.ACTION_CANCEL) {
isTouchInSpan = false;
imageTextSpan.onTouchCancel(widget, event.getX(), event.getY());
return super.onTouchEvent(widget, buffer, event);
}
return true;
} else {
Selection.removeSelection(buffer);
}
} else {
Selection.removeSelection(buffer);
}
}
return super.onTouchEvent(widget, buffer, event);
}
}
/**
* 普通的文本点击span
*/
@Deprecated
public static class ClickableTextSpan extends ClickableSpan {
String show, content;
private int textColor;//文本颜色
public ClickableTextSpan(String show, String content) {
this.show = show;
textColor = Color.parseColor("#507daf");//默认的文本颜色
}
@Override
public void onClick(View widget) {
L.i("onClick @: " + content);
}
@Override
public void updateDrawState(TextPaint ds) {
//设置背景色
ds.bgColor = SkinHelper.getTranColor(textColor, 0x80);
//设置前景色
//ds.setColor(getResources().getColor(R.color.theme_color_accent));
ds.setColor(textColor);
}
}
}
|
uiview/src/main/java/com/angcyo/uiview/widget/RExTextView.java
|
package com.angcyo.uiview.widget;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.support.annotation.DrawableRes;
import android.support.annotation.Px;
import android.support.v4.content.ContextCompat;
import android.text.Layout;
import android.text.Selection;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.TextPaint;
import android.text.TextUtils;
import android.text.method.LinkMovementMethod;
import android.text.method.MovementMethod;
import android.text.style.CharacterStyle;
import android.text.style.ClickableSpan;
import android.text.style.ImageSpan;
import android.util.AttributeSet;
import android.util.Patterns;
import android.view.MotionEvent;
import android.view.View;
import android.widget.TextView;
import com.angcyo.library.utils.L;
import com.angcyo.uiview.R;
import com.angcyo.uiview.skin.SkinHelper;
import com.angcyo.uiview.utils.RTextPaint;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Copyright (C) 2016,深圳市红鸟网络科技股份有限公司 All rights reserved.
* 项目名称:
* 类的描述:支持显示@显示, 支持显示 带logo的网页链接, 支持显示表情, 支持折叠显示.
* 创建人员:Robi
* 创建时间:2017/04/24 15:48
* 修改人员:Robi
* 修改时间:2017/04/24 15:48
* 修改备注:
* Version: 1.0.0
*/
public class RExTextView extends RTextView {
/**
* 网址url正则
*/
public final static Pattern patternUrl = Patterns.WEB_URL;//Pattern.compile("(http|ftp|https):\\/\\/[\\w\\-_]+(\\.[\\w\\-_]+)+([\\w\\-\\.:+#]*[\\w\\-+#])?");
/**
* @成员,正则
*/
public final static Pattern patternMention = Pattern.compile("<m id='(\\d+)'>([^<>]+)</m>");
/**
* 数字正则
*/
public final static Pattern patternNumber = Pattern.compile("^\\d+$");
/**
* 电话号码正则
*/
public final static Pattern patternPhone = Pattern.compile("\\d{3}-\\d{8}|\\d{3}-\\d{7}|\\d{4}-\\d{8}|\\d{4}-\\d{7}|1+[34578]+\\d{9}|\\d{8}|\\d{7}");
protected ImageTextSpan.OnImageSpanClick mOnImageSpanClick;
private int maxShowLine = -1;//最大显示多少行, 当超过时, 会显示...全部
private String foldString;
private int mImageSpanTextColor = ImageTextSpan.getDefaultColor();
public RExTextView(Context context) {
super(context);
}
public RExTextView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RExTextView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* 判断 str是否是数字
*/
public static boolean isNumber(String str) {
Pattern compile = Pattern.compile("^\\d+$");
Matcher matcher = compile.matcher(str);
//matcher.group(matcher.groupCount())
return matcher.find();
}
public void setImageSpanTextColor(int imageSpanTextColor) {
mImageSpanTextColor = imageSpanTextColor;
}
@Override
protected void initView() {
super.initView();
foldString = getResources().getString(R.string.see_all);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
//setMovementMethod(ImageClickMethod.getInstance());
}
@Override
protected MovementMethod getDefaultMovementMethod() {
return ImageClickMethod.getInstance();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
return ImageClickMethod.isTouchInSpan;
}
/**
* 设置允许显示的最大行数
*/
public void setMaxShowLine(int maxShowLine) {
this.maxShowLine = maxShowLine;
if (maxShowLine < 0) {
setMaxLines(Integer.MAX_VALUE);
} else {
setEllipsize(TextUtils.TruncateAt.END);
setMaxLines(maxShowLine);
}
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Layout layout = getLayout();
// if (layout != null) {
// int lines = layout.getLineCount();
// L.e("call: onDraw([canvas])-> line Count:" + lines);
// if (lines > 0) {
// //返回折叠的字符数
// if (layout.getEllipsisCount(lines - 1) > 0) {
// L.e("call: onDraw([canvas])-> getEllipsisCount:");
// }
// }
// }
}
public void setOnImageSpanClick(ImageTextSpan.OnImageSpanClick onImageSpanClick) {
mOnImageSpanClick = onImageSpanClick;
}
@Override
public void setText(CharSequence text, BufferType type) {
if (isInEditMode()) {
super.setText(text, type);
return;
}
if (TextUtils.isEmpty(text)) {
super.setText(text, type);
} else {
SpannableStringBuilder spanBuilder = new SpannableStringBuilder(text);
patternUrl(spanBuilder, text);//优先匹配
patternMention(spanBuilder, text);
patternPhone(spanBuilder, text);
afterPattern(spanBuilder, text);
super.setText(spanBuilder, type);
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
Layout layout = getLayout();
if (maxShowLine > 0 && layout != null) {
int lines = layout.getLineCount();
if (lines > 0) {
if (lines > maxShowLine) {
//需要折叠
CharSequence sequence = getText();
if (sequence instanceof Spannable) {
String more = "...";
String foldString = getFoldString();
if (sequence.length() <= more.length() + foldString.length()) {
setMaxShowLine(-1);//换行字符太多的情况
return;
}
Spannable spannable = (Spannable) sequence;
int lineStart = layout.getLineStart(maxShowLine);//返回第几行的第一个字符, 在字符串中的index
int startPosition = lineStart - more.length() - foldString.length();
if (startPosition < 0) {
spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), getCurrentTextColor(), more),
lineStart - 1, lineStart, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
return;
}
int start = findStartPosition(spannable, startPosition);
int offset = more.length();//(sequence.length() % 2 == 0) ? 4 : 3;
if (!TextUtils.isEmpty(more)) {
spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), getCurrentTextColor(), more),
start, start + offset, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (!TextUtils.isEmpty(foldString)) {
spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), foldString),
start + offset, spannable.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
// spannable.setSpan(new ImageTextSpan(getContext(), getTextSize(), foldString),
// layout.getLineStart(maxShowLine - 1), layout.getLineStart(maxShowLine - 1) + foldString.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
//setMeasuredDimension(getMeasuredWidth(), (int) (getMeasuredHeight() + density() * 140));
}
}
}
}
//int lastLineHeight = getLastLineHeight();
//float descent = getPaint().descent();
//setMeasuredDimension(getMeasuredWidth(), (int) (getMeasuredHeight() + density() * 40));
}
private int getLastLineHeight() {
Layout layout = getLayout();
if (layout != null) {
int lineCount = layout.getLineCount();
if (lineCount > 0) {
//行的底部距离view顶部的高度, 最后一行的LineTop通常会等于View的height
return layout.getLineTop(lineCount) - layout.getLineTop(lineCount - 1);
}
}
return 0;
}
@Override
public boolean canScrollVertically(int direction) {
if (maxShowLine > 0) {
return false;
}
return super.canScrollVertically(direction);
}
@Override
public void scrollTo(@Px int x, @Px int y) {
if (maxShowLine > 0) {
return;
}
super.scrollTo(x, y);
}
private int findStartPosition(Spannable spannable, int startWidthPosition) {
CharacterStyle[] oldSpans = spannable.getSpans(startWidthPosition, spannable.length(), CharacterStyle.class);
int position = startWidthPosition;
for (CharacterStyle oldSpan : oldSpans) {
int spanStart = spannable.getSpanStart(oldSpan);
int spanEnd = spannable.getSpanEnd(oldSpan);
if (spanStart <= startWidthPosition && spanEnd > startWidthPosition) {
position = spanStart;
}
if (spanStart >= startWidthPosition) {
spannable.removeSpan(oldSpan);
}
}
return position;
}
private String getFoldString() {
return foldString;
}
public void setFoldString(String foldString) {
this.foldString = foldString;
}
/**
* 子类处理
*/
protected void afterPattern(SpannableStringBuilder spanBuilder, CharSequence text) {
}
/**
* 匹配Url链接
*/
protected void patternUrl(SpannableStringBuilder builder, CharSequence input) {
Matcher matcher = patternUrl.matcher(input);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
CharSequence text = matcher.group();//input.subSequence(start, end);
builder.setSpan(new ImageTextSpan(getContext(),
ImageTextSpan.initDrawable(getContext(),
R.drawable.base_link_ico, getTextSize()),
getContext().getString(R.string.url_link_tip),
text.toString())
.setOnImageSpanClick(mOnImageSpanClick)
.setTextColor(mImageSpanTextColor),
start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
/**
* 匹配@联系人
*/
protected void patternMention(SpannableStringBuilder builder, CharSequence input) {
//<m id='60763'>@爱你是一种习惯i<\/m> <m id='61145'>@爱情水深王八多<\/m> <m id='61536'>@爱苦、但亦甜<\/m>
//String p ;//"<m id='\\d+'>\\w+</m>";
Matcher matcher = patternMention.matcher(input);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
if (!isInOtherSpan(builder, input.length(), start, end)) {
builder.setSpan(new ImageTextSpan(getContext(), ImageTextSpan.initDrawable(getTextSize()), matcher.group(2), matcher.group(1))
.setOnImageSpanClick(mOnImageSpanClick)
.setTextColor(mImageSpanTextColor),
start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
/**
* 匹配 电话号码
*/
protected void patternPhone(SpannableStringBuilder builder, CharSequence input) {
Matcher matcher = patternPhone.matcher(input);
while (matcher.find()) {
int start = matcher.start();
int end = matcher.end();
if (!isInOtherSpan(builder, input.length(), start, end)) {
builder.setSpan(new ImageTextSpan(getContext(), ImageTextSpan.initDrawable(getTextSize()),
matcher.group(), matcher.group())
.setOnImageSpanClick(mOnImageSpanClick)
.setTextColor(mImageSpanTextColor),
start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
/**
* 判断 需要检测的开始位置,结束位置, 是否已经在其他span中
*/
private boolean isInOtherSpan(SpannableStringBuilder builder, int length, int startPosition, int endPosition) {
ImageTextSpan[] imageTextSpans = builder.getSpans(0, length, ImageTextSpan.class);
List<int[]> spanRange = new ArrayList<>();
for (ImageTextSpan span : imageTextSpans) {
int spanStart = builder.getSpanStart(span);
int spanEnd = builder.getSpanEnd(span);
spanRange.add(new int[]{spanStart, spanEnd});
}
boolean result = false;
for (int[] range : spanRange) {
if (startPosition >= range[0] && startPosition <= range[1]) {
result = true;
break;
}
if (endPosition >= range[0] && endPosition <= range[1]) {
result = true;
break;
}
}
return result;
}
/**
* 支持只显示图片, 只显示文本, 支持图片混合显示, 支持点击事件.
* 需要配合 {@link ImageClickMethod} 才能实现点击
*/
public static class ImageTextSpan extends ImageSpan {
static float downX = -1, downY = -1;
static boolean isTouchDown = false;
OnImageSpanClick mOnImageSpanClick;
private String mShowContent = "";//需要绘制的文本
private Context mContext;
private int mImageSize;//保存计算出来的图片宽度
private int space;//文本与图片之间的距离
private int textColor;//文本颜色
private Rect tempRect = new Rect();
private String url;//链接
private Rect mTextBounds;
private int mSpanWidth;
/**
* 是否可以点击
*/
private boolean canClick = true;
/**
* 是否激活点击效果
*/
private boolean enableTouchEffect = false;
/**
* 构造一个只用来显示文本的ImageSpan
*/
public ImageTextSpan(Context context, float textSize, String showContent) {
this(context, textSize, -1, showContent);
}
public ImageTextSpan(Context context, float textSize, int textColor, String showContent) {
super(initDrawable(textSize), ALIGN_BASELINE);
this.mShowContent = showContent;
init(context);
setCanClick(false);
if (textColor != -1) {
setTextColor(textColor);
}
}
public ImageTextSpan(Context context, Drawable d, String showContent, String url) {
super(d, ALIGN_BASELINE);
this.url = url;
mShowContent = showContent;
init(context);
}
public ImageTextSpan(Context context, @DrawableRes int resourceId, String show, String url) {
super(context, resourceId, ALIGN_BASELINE);
this.mShowContent = show;
this.url = url;
init(context);
}
/**
* 根据文本大小, 自动设置图片的高度
*/
public static Drawable initDrawable(Context context, @DrawableRes int resourceId, float textSize) {
Drawable drawable = ContextCompat.getDrawable(context, resourceId);
int height = drawable.getIntrinsicHeight();
int width = drawable.getIntrinsicWidth();
TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
textPaint.setTextSize(textSize);
float textHeight = textPaint.descent() - textPaint.ascent(); //(int) RTextPaint.getTextHeight(textPaint);
// if (textHeight > height) {
// int offset = textHeight - height + textPaint.getFontMetricsInt().descent / 2;
// InsetDrawable insetDrawable = new InsetDrawable(drawable, 0, offset, 0, 0);
// insetDrawable.setBounds(0, 0, width, textHeight);
// return insetDrawable;
// } else {
// drawable.setBounds(0, 0, width, height);
// return drawable;
// }
//drawable.setBounds(0, 0, width, (int) Math.max(height, textHeight));
drawable.setBounds(0, 0, width, (int) Math.max(height, textHeight));
return drawable;
}
/**
* 用来只显示文本的ImageSpan
*/
public static Drawable initDrawable(float textSize) {
Drawable drawable = new ColorDrawable(Color.WHITE);
TextPaint textPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
textPaint.setTextSize(textSize);
int textHeight = (int) (textPaint.descent());// - textPaint.ascent());//(int) RTextPaint.getTextHeight(textPaint);
//drawable.setBounds(0, 0, 1, textHeight);
drawable.setBounds(0, 0, 0, 0);
return drawable;
}
public static int getDefaultColor() {
return Color.parseColor("#507daf");
}
public ImageTextSpan setOnImageSpanClick(OnImageSpanClick onImageSpanClick) {
mOnImageSpanClick = onImageSpanClick;
if (mOnImageSpanClick == null) {
canClick = false;
} else {
canClick = true;
}
return this;
}
private void init(Context context) {
mContext = context;
space = (int) (2 * mContext.getResources().getDisplayMetrics().density);
setDefaultTextColor();
}
@Override
public int getSize(Paint paint, CharSequence text, int start, int end, Paint.FontMetricsInt fm) {
if (TextUtils.isEmpty(mShowContent)) {
mImageSize = super.getSize(paint, text, start, end, fm);
mSpanWidth = mImageSize;
return mSpanWidth;
} else {
String string = mShowContent;
mTextBounds = getTextBounds(paint, string);
mImageSize = super.getSize(paint, text, start, end, fm);
mSpanWidth = mImageSize + space + mTextBounds.width() + space;
return mSpanWidth;
}
}
@Override
public void draw(Canvas canvas, CharSequence text, int start, int end, float x, int top, int y, int bottom, Paint paint) {
if (TextUtils.isEmpty(mShowContent)) {
super.draw(canvas, text, start, end, x, top, y, bottom, paint);
} else {
tempRect.set((int) x, top, ((int) (x + mSpanWidth + space + space)), bottom);
if (isTouchDown && tempRect.contains(((int) downX), (int) downY)) {
if (enableTouchEffect) {
paint.setColor(SkinHelper.getTranColor(textColor, 0x80));
canvas.drawRect(tempRect, paint);
} else {
paint.setColor(Color.TRANSPARENT);
canvas.drawRect(tempRect, paint);
}
}
super.draw(canvas, text, start, end, x, top, y, bottom, paint);
paint.setColor(textColor);//默认是黑色
int height = bottom - top;//绘制区域的高度
String string = mShowContent;
int textHeight = (int) RTextPaint.getTextHeight(paint);
//文本在图片的中间绘制
float textY;
textY = y /*+ textHeight / 2 + height / 2 */ /*- paint.getFontMetricsInt().descent*/;
if (y == bottom) {
textY = y - paint.descent();
}
// if (paint.getFontMetricsInt().descent > 0) {
// textY = top + textHeight / 2 + height / 2 - paint.getFontMetricsInt().descent / 2;
// } else {
// textY = top + textHeight / 2 + height / 2 - paint.getFontMetricsInt().descent;
// }
if (top != y) {
canvas.drawText(string,
x + mImageSize + space,
textY,
paint);
}
}
}
public ImageTextSpan setDefaultTextColor() {
setTextColor(getDefaultColor());//默认的文本颜色
return this;
}
public ImageTextSpan setTextColor(int textColor) {
this.textColor = textColor;
return this;
}
public int getShowTextLength() {
return mShowContent.length();
}
public boolean isCanClick() {
return canClick;
}
public ImageTextSpan setCanClick(boolean canClick) {
this.canClick = canClick;
return this;
}
/**
* 单击事件
*/
public void onClick(TextView view) {
L.e("call: onClick([view])-> " + mShowContent + " : " + url);
if (mOnImageSpanClick != null) {
if (!mOnImageSpanClick.onClick(view, mShowContent, url)) {
if (patternUrl.matcher(url).matches()) {
mOnImageSpanClick.onUrlClick(view, url);
} else if (patternPhone.matcher(url).matches()) {
mOnImageSpanClick.onPhoneClick(view, url);
} else if (patternNumber.matcher(url).matches()) {
mOnImageSpanClick.onMentionClick(view, url);
}
}
}
}
public void onTouchUp(final TextView view) {
isTouchDown = false;
downX = -1;
downY = -1;
view.postInvalidate();//解决在RecyclerView中, 会出现点击背景不消失的BUG
}
public void onTouchDown(final TextView view, float x, float y) {
isTouchDown = true;
downX = x;
downY = y;
view.postDelayed(new Runnable() {
@Override
public void run() {
onTouchUp(view);
}
}, 300);//300毫秒后,自动取消
}
public void onTouchCancel(TextView view, float x, float y) {
onTouchUp(view);
}
public Rect getTextBounds(Paint paint, String text) {
tempRect.set(0, 0, 0, 0);
if (TextUtils.isEmpty(text)) {
return tempRect;
}
paint.getTextBounds(text, 0, text.length(), tempRect);
return tempRect;
}
@Override
public String getSource() {
return mShowContent;
}
public static abstract class OnImageSpanClick {
public void onUrlClick(TextView view, String url) {
}
public void onMentionClick(TextView view, String mention) {
}
public void onPhoneClick(TextView view, String phone) {
}
/**
* @return 返回false 时, {@link OnImageSpanClick#onUrlClick(TextView, String)}
* 和{@link OnImageSpanClick#onMentionClick(TextView, String)}才会回调
*/
public boolean onClick(TextView view, String showContent, String url) {
return false;
}
}
}
public static class ImageClickMethod extends LinkMovementMethod {
/**
* 是否在Span上点击了
*/
public static boolean isTouchInSpan = false;
private static ImageClickMethod sInstance;
public static ImageClickMethod getInstance() {
if (sInstance == null)
sInstance = new ImageClickMethod();
return sInstance;
}
@Override
public boolean onTouchEvent(TextView widget, Spannable buffer, MotionEvent event) {
int action = event.getAction();
if (action == MotionEvent.ACTION_UP ||
action == MotionEvent.ACTION_DOWN ||
action == MotionEvent.ACTION_MOVE ||
action == MotionEvent.ACTION_CANCEL) {
int x = (int) event.getX();
int y = (int) event.getY();
x -= widget.getTotalPaddingLeft();
y -= widget.getTotalPaddingTop();
x += widget.getScrollX();
y += widget.getScrollY();
Layout layout = widget.getLayout();
int line = layout.getLineForVertical(y);
int off = layout.getOffsetForHorizontal(line, x);
ImageTextSpan[] link = buffer.getSpans(off, off, ImageTextSpan.class);
if (link.length > 0) {
ImageTextSpan imageTextSpan = link[0];
int spanStart = buffer.getSpanStart(imageTextSpan);
int spanEnd = buffer.getSpanEnd(imageTextSpan);
int showTextLength = imageTextSpan.getShowTextLength();
int top = layout.getLineTop(line);
int bottom = layout.getLineTop(line + 1);
float left = layout.getPrimaryHorizontal(spanStart);
float right = layout.getPrimaryHorizontal(spanStart + showTextLength);
if (imageTextSpan.isCanClick() && (x >= left && x <= right) /*(off >= spanStart && off <= spanStart + showTextLength)*/) {
if (action == MotionEvent.ACTION_UP) {
imageTextSpan.onTouchUp(widget);
imageTextSpan.onClick(widget);
isTouchInSpan = false;
} else if (action == MotionEvent.ACTION_DOWN) {
isTouchInSpan = true;
imageTextSpan.onTouchDown(widget, event.getX(), event.getY());
Selection.setSelection(buffer,
spanStart,
spanEnd);
} else if (action == MotionEvent.ACTION_MOVE) {
//link[0].onTouchMove(widget, event.getX(), event.getY());
return super.onTouchEvent(widget, buffer, event);
} else if (action == MotionEvent.ACTION_CANCEL) {
isTouchInSpan = false;
imageTextSpan.onTouchCancel(widget, event.getX(), event.getY());
return super.onTouchEvent(widget, buffer, event);
}
return true;
} else {
Selection.removeSelection(buffer);
}
} else {
Selection.removeSelection(buffer);
}
}
return super.onTouchEvent(widget, buffer, event);
}
}
/**
* 普通的文本点击span
*/
@Deprecated
public static class ClickableTextSpan extends ClickableSpan {
String show, content;
private int textColor;//文本颜色
public ClickableTextSpan(String show, String content) {
this.show = show;
textColor = Color.parseColor("#507daf");//默认的文本颜色
}
@Override
public void onClick(View widget) {
L.i("onClick @: " + content);
}
@Override
public void updateDrawState(TextPaint ds) {
//设置背景色
ds.bgColor = SkinHelper.getTranColor(textColor, 0x80);
//设置前景色
//ds.setColor(getResources().getColor(R.color.theme_color_accent));
ds.setColor(textColor);
}
}
}
|
fix image span show.
|
uiview/src/main/java/com/angcyo/uiview/widget/RExTextView.java
|
fix image span show.
|
|
Java
|
apache-2.0
|
bd1f09b6d250c9b0e92dd31a3eaf7f0d21809d24
| 0
|
enternoescape/opendct,enternoescape/opendct,enternoescape/opendct
|
/*
* Copyright 2016 The OpenDCT Authors. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (c) 2010 Nicolas George
* Copyright (c) 2011 Stefano Sabatini
* Copyright (c) 2014 Andrey Utkin
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package opendct.video.ffmpeg;
import opendct.config.Config;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bytedeco.javacpp.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import static opendct.video.ffmpeg.FFmpegUtil.*;
import static org.bytedeco.javacpp.avcodec.*;
import static org.bytedeco.javacpp.avfilter.*;
import static org.bytedeco.javacpp.avformat.*;
import static org.bytedeco.javacpp.avutil.*;
public class FFmpegTranscoder implements FFmpegStreamProcessor {
private static final Logger logger = LogManager.getLogger(FFmpegTranscoder.class);
private static final long WRAP_0 = 8589934592L;
private static final long WRAP_1 = 8589934592L / 2L;
private static final long WRAP_0_LOW = WRAP_0 - 90000;
private static final long WRAP_0_HIGH = WRAP_0 + 90000;
private static final long WRAP_1_LOW = WRAP_1 - 90000;
private static final long WRAP_1_HIGH = WRAP_1 + 90000;
private static final int TS_TIME_BASE = 90000;
private boolean switching = false;
private long switchTimeout = 0;
private FFmpegWriter newWriter = null;
private FFmpegWriter newWriter2 = null;
private String newFilename = null;
private final Object switchLock = new Object();
private static Map<Pointer, Integer> permissionMap = new HashMap<>();
private static int permissionWeight = 0;
private static int transcodeLimit =
Config.getInteger("consumer.ffmpeg.transcode_limit",
(Runtime.getRuntime().availableProcessors() - 1) * 2);
private static final float dts_delta_threshold = 10;
private long firstDtsByStreamIndex[] = new long[0];
private long firstPtsByStreamIndex[] = new long[0];
private long lastDtsByStreamIndex[] = new long[0];
private long lastPtsByStreamIndex[] = new long[0];
boolean mpegTsCbrEnabled = false;
private AtomicInteger encodedFrames[] = new AtomicInteger[0];
private long startTime = 0;
private AVDictionary muxerDict;
private FFmpegContext ctx = null;
private boolean interlaced = false;
private FilteringContext filter_ctx[] = new FilteringContext[0];
private class FilteringContext {
private AVFilterContext buffersink_ctx;
private AVFilterContext buffersrc_ctx;
private AVFilterGraph filter_graph;
}
/**
* Request permission to use transcoding resources.
*
* @param opaque A unique pointer for the requesting FFmpeg context.
* @param weight The assigned weight to the transcoding job.
* @return <i>true</i> if the transcoding is allowed to proceed.
*/
public static synchronized boolean getTranscodePermission(Pointer opaque, int weight) {
if (opaque == null) {
return false;
}
Integer checkWeight = permissionMap.get(opaque);
// The capture device is asking for permission, but the transcode limit has been reached.
// The stream can only be remuxed now.
if (checkWeight == null && permissionWeight + weight > transcodeLimit) {
return false;
} else if (checkWeight != null) {
returnTranscodePermission(opaque);
if (permissionWeight + weight > transcodeLimit) {
return false;
}
}
permissionWeight += weight;
permissionMap.put(opaque, weight);
return true;
}
public static synchronized void returnTranscodePermission(Pointer opaque) {
if (opaque == null) {
return;
}
Integer weight = permissionMap.get(opaque);
if (weight != null) {
permissionWeight -= weight;
permissionMap.remove(opaque);
}
}
@Override
public boolean switchOutput(String newFilename, FFmpegWriter writer, FFmpegWriter writer2) {
synchronized (switchLock) {
logger.info("SWITCH started.");
this.newFilename = newFilename;
newWriter = writer;
newWriter2 = writer2;
switchTimeout = System.currentTimeMillis() + 10000;
switching = true;
while (switching && !ctx.isInterrupted()) {
try {
switchLock.wait(500);
// The timeout will also manage a situation whereby this is called and
// the other end is not currently running.
if (switching && System.currentTimeMillis() > switchTimeout) {
logger.warn("SWITCH timed out.");
return false;
}
} catch (InterruptedException e) {
logger.info("SWITCH wait was interrupted.");
}
}
}
return true;
}
@Override
public void initStreamOutput(FFmpegContext ctx, String outputFilename,
FFmpegWriter writer, FFmpegWriter writer2)
throws FFmpegException, InterruptedException {
initStreamOutput(ctx, outputFilename, writer, writer2, true);
}
/**
* Initialize output stream after detection has already been performed.
*
* @param ctx The FFmpeg context with inputs populated from input stream detection.
* @param outputFilename The name of the file that will be written. This is only a hint to the
* muxer about what file format is desired.
* @param writer The writer to be used for output.
* @param firstRun If <i>false</i> this will skip various things such as interlace detection and
* displaying the input stream information.
* @throws FFmpegException This is thrown if there are any problems that cannot be handled.
* @throws InterruptedException This is thrown if initialization is is interrupted.
*/
public void initStreamOutput(FFmpegContext ctx, String outputFilename,
FFmpegWriter writer, FFmpegWriter writer2, boolean firstRun)
throws FFmpegException, InterruptedException {
mpegTsCbrEnabled = FFmpegConfig.getUseMpegTsCBR();
long muxRate = 0;
int ret;
this.ctx = ctx;
logger.info("Initializing FFmpeg transcoder stream output.");
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
if (firstRun) {
ctx.dumpInputFormat();
}
ctx.secondaryStream = writer2 != null;
int numInputStreams = ctx.avfCtxInput.nb_streams();
// Creates the output container and AVFormatContext.
ctx.allocAvfContainerOutputContext(outputFilename);
if (ctx.secondaryStream) {
ctx.allocAvfContainerOutputContext2(outputFilename);
ctx.streamMap2 = new OutputStreamMap[numInputStreams];
for (int i = 0; i < ctx.streamMap2.length; i++) {
ctx.streamMap2[i] = new OutputStreamMap();
ctx.streamMap2[i].iStream = ctx.avfCtxInput.streams(i);
ctx.streamMap2[i].iCodecContext = ctx.streamMap2[i].iStream.codec();
ctx.streamMap2[i].iCodecType = ctx.streamMap2[i].iCodecContext.codec_type();
ctx.streamMap2[i].iCodecRational = ctx.streamMap2[i].iCodecContext.time_base();
ctx.streamMap2[i].iStreamRational = ctx.streamMap2[i].iStream.time_base();
}
}
lastDtsByStreamIndex = new long[numInputStreams];
Arrays.fill(lastDtsByStreamIndex, Integer.MIN_VALUE);
lastPtsByStreamIndex = new long[numInputStreams];
Arrays.fill(lastPtsByStreamIndex, Integer.MIN_VALUE);
firstDtsByStreamIndex = new long[numInputStreams];
Arrays.fill(firstDtsByStreamIndex, Integer.MIN_VALUE);
firstPtsByStreamIndex = new long[numInputStreams];
Arrays.fill(firstPtsByStreamIndex, Integer.MIN_VALUE);
ctx.streamMap = new OutputStreamMap[numInputStreams];
for (int i = 0; i < ctx.streamMap.length; i++) {
ctx.streamMap[i] = new OutputStreamMap();
ctx.streamMap[i].iStream = ctx.avfCtxInput.streams(i);
ctx.streamMap[i].iCodecContext = ctx.streamMap[i].iStream.codec();
ctx.streamMap[i].iCodecType = ctx.streamMap[i].iCodecContext.codec_type();
ctx.streamMap[i].iCodecRational = ctx.streamMap[i].iCodecContext.time_base();
ctx.streamMap[i].iStreamRational = ctx.streamMap[i].iStream.time_base();
}
if (firstRun) {
encodedFrames = new AtomicInteger[numInputStreams];
for (int i = 0; i < encodedFrames.length; i++) {
encodedFrames[i] = new AtomicInteger(0);
}
}
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
int videoHeight;
int videoWidth;
if (ctx.videoInCodecCtx != null) {
videoHeight = ctx.videoInCodecCtx.height();
videoWidth = ctx.videoInCodecCtx.width();
} else {
videoHeight = 0;
videoWidth = 0;
}
if (firstRun) {
AVCodec videoCodec = null;
if (ctx.videoInCodecCtx != null) {
videoCodec = avcodec_find_decoder(ctx.videoInCodecCtx.codec_id());
}
if (ctx.encodeProfile != null && ctx.videoInCodecCtx != null && ctx.preferredVideo > NO_STREAM_IDX) {
ctx.videoEncodeSettings = ctx.encodeProfile.getVideoEncoderMap(
videoWidth,
videoHeight,
ctx.encodeProfile.getVideoEncoderCodec(
videoCodec));
// Remove the encoder profile if we cannot get permission to transcode. This will
// prevent any possible future attempts.
String weightStr = ctx.videoEncodeSettings.get("encode_weight");
int weight = 2;
if (weightStr != null) {
try {
weight = Integer.parseInt(weightStr);
} catch (NumberFormatException e) {
logger.error("Unable to parse '{}' into an integer, using the default {}.",
weightStr, weight);
}
} else {
logger.warn("encode_weight is not set. Using default {}.", weight);
}
if (!getTranscodePermission(ctx.OPAQUE, weight)) {
ctx.encodeProfile = null;
}
} else {
if (ctx.encodeProfile != null) {
// Everything needed to make a correct decision is not available. Remux only.
logger.warn("ctx.videoCodecCtx was null or there was no preferred video when" +
" trying to get permission to transcode. Remuxing instead.");
ctx.encodeProfile = null;
}
}
interlaced = ctx.encodeProfile != null &&
ctx.encodeProfile.canInterlaceDetect(videoHeight, videoWidth) &&
fastDeinterlaceDetection();
}
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
if (ctx.encodeProfile != null &&
ctx.encodeProfile.canTranscodeVideo(
interlaced,
avcodec_get_name(ctx.videoInCodecCtx.codec_id()).getString(),
videoHeight,
videoWidth)) {
ret = avcodec_open2(ctx.videoInCodecCtx,
avcodec_find_decoder(ctx.videoInCodecCtx.codec_id()), (PointerPointer<AVDictionary>) null);
if (ret < 0) {
throw new FFmpegException("Failed to open decoder for stream #" + ctx.preferredVideo, ret);
}
if ((ctx.videoOutStream = addTranscodeVideoStreamToContext(ctx, ctx.preferredVideo, ctx.encodeProfile)) == null) {
// If transcoding is not possible, we will just copy it.
logger.warn("Unable to set up transcoding. The stream will be copied.");
if ((ctx.videoOutStream = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(ctx.preferredVideo))) == null) {
throw new FFmpegException("Could not find a video stream", -1);
}
} else {
ctx.streamMap[ctx.preferredVideo].transcode = true;
}
} else {
if ((ctx.videoOutStream = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(ctx.preferredVideo))) == null) {
throw new FFmpegException("Could not find a video stream", -1);
}
}
if (ctx.secondaryStream) {
if ((ctx.videoOutStream2 = addCopyStreamToContext(ctx.avfCtxOutput2, ctx.avfCtxInput.streams(ctx.preferredVideo))) == null) {
throw new FFmpegException("Could not find a video stream for output 2", -1);
}
ctx.streamMap2[ctx.preferredVideo].outStreamIndex = ctx.videoOutStream2.id();
ctx.streamMap2[ctx.preferredVideo].oCodecRational = ctx.videoOutStream2.codec().time_base();
ctx.streamMap2[ctx.preferredVideo].oStreamRational = ctx.videoOutStream2.time_base();
ctx.streamMap2[ctx.preferredVideo].oCodecContext = ctx.videoOutStream2.codec();
ctx.streamMap2[ctx.preferredVideo].oStream = ctx.videoOutStream2;
}
if ((ctx.audioOutStream = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(ctx.preferredAudio))) == null) {
throw new FFmpegException("Could not find a audio stream", -1);
}
ctx.streamMap[ctx.preferredVideo].outStreamIndex = ctx.videoOutStream.id();
ctx.streamMap[ctx.preferredVideo].oCodecRational = ctx.videoOutStream.codec().time_base();
ctx.streamMap[ctx.preferredVideo].oStreamRational = ctx.videoOutStream.time_base();
ctx.streamMap[ctx.preferredVideo].oCodecContext = ctx.videoOutStream.codec();
ctx.streamMap[ctx.preferredVideo].oStream = ctx.videoOutStream;
ctx.streamMap[ctx.preferredAudio].outStreamIndex = ctx.audioOutStream.id();
ctx.streamMap[ctx.preferredAudio].oCodecRational = ctx.videoOutStream.codec().time_base();
ctx.streamMap[ctx.preferredAudio].oStreamRational = ctx.videoOutStream.time_base();
ctx.streamMap[ctx.preferredAudio].oCodecContext = ctx.videoOutStream.codec();
ctx.streamMap[ctx.preferredAudio].oStream = ctx.videoOutStream;
for (int i = 0; i < numInputStreams; ++i) {
if (ctx.streamMap[i].outStreamIndex != NO_STREAM_IDX) {
continue;
}
AVCodecContext codecCtx = getCodecContext(ctx.avfCtxInput.streams(i));
if (codecCtx != null) {
AVStream avsOutput = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(i));
if (avsOutput != null) {
ctx.streamMap[i].outStreamIndex = avsOutput.id();
ctx.streamMap[i].oCodecRational = ctx.videoOutStream.codec().time_base();
ctx.streamMap[i].oStreamRational = ctx.videoOutStream.time_base();
ctx.streamMap[i].oCodecContext = ctx.videoOutStream.codec();
ctx.streamMap[i].oStream = ctx.videoOutStream;
}
}
}
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
filter_ctx = new FilteringContext[numInputStreams];
if ((ret = initFilters()) < 0) {
throw new FFmpegException("initFilters: Unable to allocate filters.", ret);
}
ctx.dumpOutputFormat();
ctx.allocIoOutputContext(writer);
if (ctx.secondaryStream) {
ctx.dumpOutputFormat2("(CCExtractor)");
ctx.allocIoOutputContext2(writer2);
ret = avformat_write_header(ctx.avfCtxOutput2, (PointerPointer<avutil.AVDictionary>) null);
if (ret < 0) {
deallocFilterGraphs();
throw new FFmpegException("Error while writing header to file 2 '" + outputFilename + "'", ret);
}
}
if (ctx.isInterrupted()) {
deallocFilterGraphs();
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
logger.debug("Writing header");
if (mpegTsCbrEnabled && !ctx.streamMap[ctx.preferredVideo].transcode && outputFilename.endsWith(".ts")) {
// This value will generally be the safest guess 99% of the time. If the value is guessed
// too low, there will be issues during remuxing and we can't go back and fix a live
// stream.
muxRate = ctx.avfCtxInput.streams(ctx.preferredVideo).codec().rc_max_rate();
// No normal broadcast content should be over 20mb/s.
if (muxRate == 0 || muxRate > 20000000) {
muxRate = 20000000;
}
// Audio + margin of error.
muxRate += 2000000;
logger.debug("Using MPEG-TS CBR {} kb/s.", muxRate / 1000);
muxerDict = new AVDictionary(null);
av_dict_set_int(muxerDict, "muxrate", muxRate, 0);
av_dict_set_int(muxerDict, "pat_period", 1, 0);
av_dict_set_int(muxerDict, "sdt_period", 10, 0);
ret = avformat_write_header(ctx.avfCtxOutput, muxerDict);
} else {
muxerDict = new AVDictionary(null);
av_dict_set_int(muxerDict, "pat_period", 1, 0);
av_dict_set_int(muxerDict, "sdt_period", 10, 0);
ret = avformat_write_header(ctx.avfCtxOutput, muxerDict);
}
if (ret < 0) {
deallocFilterGraphs();
throw new FFmpegException("Error while writing header to file '" + outputFilename + "'", ret);
}
logger.info("Initialized FFmpeg transcoder stream output.");
firstRun = false;
}
private void deallocFilterGraphs() {
if (filter_ctx == null) {
return;
}
for (int i = 0; i < filter_ctx.length; i++) {
if (filter_ctx[i].buffersink_ctx != null) {
avfilter_free(filter_ctx[i].buffersink_ctx);
filter_ctx[i].buffersink_ctx = null;
}
if (filter_ctx[i].buffersink_ctx != null) {
avfilter_free(filter_ctx[i].buffersink_ctx);
filter_ctx[i].buffersink_ctx = null;
}
if (filter_ctx[i].filter_graph != null) {
avfilter_graph_free(filter_ctx[i].filter_graph);
filter_ctx[i].filter_graph = null;
}
}
filter_ctx = null;
}
private boolean fastDeinterlaceDetection() throws FFmpegException {
int ret = avcodec_open2(ctx.videoInCodecCtx,
avcodec_find_decoder(ctx.videoInCodecCtx.codec_id()), (PointerPointer<AVDictionary>) null);
if (ret < 0) {
throw new FFmpegException("Failed to open decoder for stream #" + ctx.preferredVideo, ret);
}
AVPacket packet = new AVPacket();
packet.data(null);
packet.size(0);
int got_frame[] = new int[] { 0 };
AVFrame frame;
// This number will increase as interlaced flags are found. If no frames are found after 60
// frames, give up.
int frameLimit = 90;
// This is is the absolute frame limit. Once this number is reached the method will return
// that this is not interlaced content.
int absFrameLimit = frameLimit * 2;
int totalFrames = 0;
int interThresh = 3;
int interFrames = 0;
try {
if (ctx.SEEK_BUFFER != null) {
ctx.SEEK_BUFFER.setNoWrap(true);
}
long stopTime = System.currentTimeMillis() + 1000;
while(!ctx.isInterrupted()) {
if (System.currentTimeMillis() > stopTime) {
break;
}
ret = av_read_frame(ctx.avfCtxInput, packet);
if (ret < 0) {
if (ret != AVERROR_EOF) {
logger.error("Error reading frame during interlaced detection: {}", ret);
}
break;
}
int inputStreamIndex = packet.stream_index();
AVStream stream = ctx.avfCtxInput.streams(inputStreamIndex);
if (inputStreamIndex >= ctx.streamMap.length ||
inputStreamIndex != ctx.preferredVideo) {
// The packet is diverted to a queue to be processed after detection. If it is
// determined that re-muxing is preferred over transcoding, these packets will
// be fed to the re-muxer. If re-muxing isn't preferred, this queue can be
// de-allocated later.
av_packet_unref(packet);
continue;
}
frame = av_frame_alloc();
if (frame == null) {
throw new FFmpegException("av_frame_alloc: Unable to allocate frame.", -1);
}
av_packet_rescale_ts(packet,
stream.time_base(),
stream.codec().time_base());
logger.debug("Decoding video frame {} for interlace detection. {} frames interlaced.", totalFrames, interFrames);
ret = avcodec_decode_video2(stream.codec(), frame,
got_frame, packet);
av_packet_rescale_ts(packet,
stream.codec().time_base(),
stream.time_base());
if (ret < 0) {
av_frame_free(frame);
//av_packet_unref(packet);
logger.error("Decoding failed");
continue;
}
if (got_frame[0] != 0) {
int interlaced = frame.interlaced_frame();;
interFrames += interlaced;
frameLimit += interlaced;
// Do not retain decoded packets. The RAM usage will get insane very quickly.
}
av_frame_free(frame);
av_packet_unref(packet);
if (interFrames >= interThresh) {
logger.info("Content is interlaced.");
return true;
} else if (totalFrames++ >= frameLimit || totalFrames >= absFrameLimit) {
break;
}
}
} catch (FFmpegException e) {
logger.error("Deinterlace detection exception => ", e);
} finally {
avcodec_close(ctx.videoInCodecCtx);
/*if (interFrames < interThresh) {
// Return to the start.
avio_seek(ctx.avfCtxInput.pb(), 0, 0);
}*/
if (ctx.SEEK_BUFFER != null) {
ctx.SEEK_BUFFER.setNoWrap(false);
}
}
return false;
}
@Override
public synchronized void streamOutput() throws FFmpegException {
int ret = 0;
long lastPreOffsetDts[] = new long[firstDtsByStreamIndex.length];
Arrays.fill(lastPreOffsetDts, 0);
// This value will be adjusted as needed to keep the entire stream on the same time code.
//long tsOffset = 0;
long tsOffsets[] = new long[firstDtsByStreamIndex.length];
Arrays.fill(tsOffsets, 0);
// This value indicates what streams are currently in use and are desirable to be in
// agreement with the other streams.
boolean tsActiveOffsets[] = new boolean[firstDtsByStreamIndex.length];
Arrays.fill(tsActiveOffsets, false);
// This is set when the offset for any stream has changed, so we know it needs to be synced.
boolean tsOffsetChanged = false;
// This value is used to determine when to force the offsets to sync up. Currently the loop
// will try to sync up for up to 120 frames that are not out of sync before everything if
// forced to sync to the largest offset.
int tsOffsetAttempts = 0;
int tsOffsetAttemptLimit = 120;
// This is incremented by frame duration. It does not distinguish between streams, so if
// multiple streams are being corrected, this value will increase faster that the assumed
// duration. This is an acceptable compromise vs iterating an array or calculating the most
// accurate value on each update.
long lastErrorTime = 0;
long lastErrorTimeLimit = 5 * TS_TIME_BASE;
// This is the number of time the error time has increased due to an error.
int errorCounter = 0;
// This is the maximum number of errors within the error time limit allowed before the
// discontinuity tolerance is increased.
int errorLimit = 50;
// This is the number of times the discontinuity tolerance has been increased.
int adjustments = 0;
// This is the last stream that adjusted it's offset. This is used as the correct offset
// value after everything is synced up.
int lastToAdjust = 0;
// This is the number of ticks off +/- from the expected timestamp allowed before corrective
// action is taken. This number will increase automatically if it is determined to be too
// low.
int discontinuityTolerance = 3500000;
long expectedDts;
// This is the most frames to skip while waiting for a key frame to arrive before starting
// without one.
int maxFramesToStart = 768;
final boolean fixingEnabled = FFmpegConfig.getFixStream();
final boolean useCodecTimebase = FFmpegConfig.getUseCompatiblityTimebase();
int switchFlag;
long dts;
long pts;
long preOffsetDts;
long preOffsetPts;
long increment;
long diff = 0;
// Used when streaming first starts to keep the first frame from being a known bad frame.
// This helps with some players that otherwise would just assume it must be a bad video.
boolean firstFrame = true;
AVPacket packet = new AVPacket();
packet.data(null);
packet.size(0);
AVPacket copyPacket = new AVPacket();
copyPacket.data(null);
packet.size(0);
//AVStream iavStream;
//AVCodecContext iavCodecContext;
int inputStreamIndex;
int outputStreamIndex;
int codecType;
int got_frame[] = new int[] { 0 };
// This needs to start out null or Java complains about the cleanup.
AVFrame frame = null;
try {
startTime = System.currentTimeMillis();
while (true) {
ret = av_read_frame(ctx.avfCtxInput, packet);
if (ret < 0) {
break;
}
inputStreamIndex = packet.stream_index();
if (inputStreamIndex >= ctx.streamMap.length ||
(outputStreamIndex = ctx.streamMap[inputStreamIndex].outStreamIndex)
== NO_STREAM_IDX) {
av_packet_unref(packet);
continue;
}
preOffsetDts = packet.dts();
preOffsetPts = packet.pts();
// Discard all frames that don't have any timestamps since especially without a
// presentation timestamp, the frame will never be displayed anyway.
if (preOffsetDts == AV_NOPTS_VALUE || preOffsetPts == AV_NOPTS_VALUE) {
/*logger.debug("stream {}, dts == AV_NOPTS_VALUE || pts == AV_NOPTS_VALUE," +
" discarding frame.", inputStreamIndex);*/
av_packet_unref(packet);
continue;
}
lastPreOffsetDts[inputStreamIndex] = preOffsetDts;
if (firstFrame) {
boolean keyPacket = (packet.flags() & AV_PKT_FLAG_KEY) > 0;
boolean corruptPacket = (packet.flags() & AV_PKT_FLAG_CORRUPT) > 0;
if ((!corruptPacket && keyPacket && outputStreamIndex == 0) || maxFramesToStart <= 0) {
firstFrame = false;
} else {
maxFramesToStart -= 1;
av_packet_unref(packet);
continue;
}
long minDts = packet.dts();
for (int i = 0; i < lastPreOffsetDts.length; i++) {
if (lastPreOffsetDts[i] > 0) {
minDts = Math.min(minDts, lastPreOffsetDts[i]);
}
}
Arrays.fill(lastDtsByStreamIndex, 0);
Arrays.fill(lastPtsByStreamIndex, 0);
Arrays.fill(tsOffsets, -minDts);
}
if (switching && outputStreamIndex == 0) {
switchFlag = packet.flags() & AV_PKT_FLAG_KEY;
// Check if we are at least on a flagged video key frame. Then switch before the
// frame is actually processed. This ensures that if we are muxing, we are
// starting with hopefully an I frame and if we are transcoding this is likely a
// good transition point.
if (switchFlag > 0 || System.currentTimeMillis() >= switchTimeout) {
logger.debug("Video key frame flag: {}", switchFlag);
synchronized (switchLock) {
try {
switchStreamOutput();
long minDts = packet.dts();
for (int i = 0; i < lastPreOffsetDts.length; i++) {
if (lastPreOffsetDts[i] > 0) {
minDts = Math.min(minDts, lastPreOffsetDts[i]);
}
}
Arrays.fill(lastDtsByStreamIndex, 0);
Arrays.fill(lastPtsByStreamIndex, 0);
Arrays.fill(tsOffsets, -minDts);
errorCounter = 0;
} catch (InterruptedException e) {
logger.debug("Switching was interrupted.");
av_packet_unref(packet);
break;
}
switching = false;
switchLock.notifyAll();
}
logger.info("SWITCH successful: {}ms.",
System.currentTimeMillis() - (switchTimeout - 10000));
}
}
if (lastErrorTime > lastErrorTimeLimit) {
errorCounter = 0;
adjustments = 0;
lastErrorTime = 0;
}
if (errorCounter > errorLimit) {
discontinuityTolerance *= 2;
logger.info("adjusting tolerance to {}. errors = {}, adjustments = {}",
discontinuityTolerance, errorCounter, adjustments);
errorCounter = 0;
adjustments += 1;
lastErrorTime = 0;
}
dts = preOffsetDts + tsOffsets[inputStreamIndex];
pts = preOffsetPts + tsOffsets[inputStreamIndex];
// These are referenced several times. This keeps these variables from constantly
// being copied into the JVM.
//iavStream = ctx.streamMap[inputStreamIndex].iStream;
//iavCodecContext = ctx.streamMap[inputStreamIndex].iCodecContext;
codecType = ctx.streamMap[inputStreamIndex].iCodecType;
if ((codecType == AVMEDIA_TYPE_VIDEO ||
codecType ==AVMEDIA_TYPE_AUDIO) &&
lastDtsByStreamIndex[inputStreamIndex] > 0) {
tsActiveOffsets[inputStreamIndex] = true;
// There are probably many other situations that could come up making this value
// incorrect, but this is only optimizing for typical MPEG-TS input and
// MPEG-TS/PS output. This has the potential to introduce a rounding error since
// it is not based on the stream time base rational.
increment = Math.max(packet.duration(), 0);
expectedDts = (lastDtsByStreamIndex[inputStreamIndex] + increment); // & 0x1ffffffffL;
diff = dts - expectedDts;
if (fixingEnabled &&
(diff > discontinuityTolerance || diff < -increment * 4)) {
errorCounter += 1;
lastErrorTime += increment;
long oldDts = dts;
long oldPts = pts;
long oldOffset = tsOffsets[inputStreamIndex];
dts -= diff;
pts -= diff;
tsOffsets[inputStreamIndex] -= diff;
tsOffsetChanged = true;
lastToAdjust = inputStreamIndex;
logger.debug("fixing stream {} timestamp discontinuity diff = {}," +
" offset = {}, new offset = {}," +
" preoff dts = {}, dts = {}, new dts {}, last dts = {}," +
" preoff pts = {}, pts = {}, new pts = {}, last pts = {}",
inputStreamIndex, diff,
oldOffset, tsOffsets[inputStreamIndex],
preOffsetDts, oldDts, dts, lastDtsByStreamIndex[inputStreamIndex],
preOffsetPts, oldPts, pts, lastPtsByStreamIndex[inputStreamIndex]);
} else if (tsOffsetChanged) {
// If the offset is changed for any one of the streams, we need to make sure
// they are all using the same offset once the event is over or they may
// slowly get out of sync.
long maxOffset = Long.MIN_VALUE;
long minOffset = Long.MAX_VALUE;
for (int i = 0; i < tsOffsets.length; i++) {
if (!tsActiveOffsets[i]) {
continue;
}
if (minOffset == Long.MAX_VALUE || tsOffsets[i] < minOffset) {
minOffset = tsOffsets[i];
}
if (maxOffset == Long.MIN_VALUE || tsOffsets[i] > maxOffset) {
maxOffset = tsOffsets[i];
}
}
long offsetDiff = maxOffset - minOffset;
if ((offsetDiff > -discontinuityTolerance &&
offsetDiff < discontinuityTolerance) ||
tsOffsetAttempts > tsOffsetAttemptLimit) {
// This is a good spot to correct the offset before it reaches the long
// wrap around limit. If this is going backwards so much that it wraps
// around backwards you likely would have noticed just based on the poor
// playback of the stream. Subtracting this value will effectively
// result in the exact same offset.
if (tsOffsetAttempts > tsOffsetAttemptLimit) {
logger.debug("force sync offsets {} to {}, diff = {}, attempts = {}",
tsOffsets, tsOffsets[lastToAdjust], offsetDiff, tsOffsetAttempts);
} else {
logger.debug("sync offsets {} to {}, diff = {}, attempts = {}",
tsOffsets, tsOffsets[lastToAdjust], offsetDiff, tsOffsetAttempts);
}
tsOffsetChanged = false;
tsOffsetAttempts = 0;
for (int i = 0; i < tsOffsets.length; i++) {
tsOffsets[i] = tsOffsets[lastToAdjust];
}
lastToAdjust = ctx.preferredVideo;
} else {
tsOffsetAttempts += 1;
}
} else if (!fixingEnabled && diff > 162000000) {
// If the stream is more than 30 minutes ahead, discard it. Leaving it alone
// will do nothing but break things since we are not trying to fix errors.
logger.debug("discarding frame stream {}, dts {} - last dts {}" +
" > 162000000, pts {}, last pts {}",
inputStreamIndex, dts, lastDtsByStreamIndex[inputStreamIndex],
pts, lastPtsByStreamIndex[inputStreamIndex]);
errorCounter += 1;
lastErrorTime += increment;
av_packet_unref(packet);
continue;
}
if (dts <= lastDtsByStreamIndex[inputStreamIndex]) {
// If the decode time stamp is equal to the last one, discard the frame.
// There isn't a simple way to know if the frame can be put in the assumed
// correct place without putting a ripple in the timeline.
if (lastDtsByStreamIndex[inputStreamIndex] == dts) {
logger.debug("discarding frame stream {}, dts {} == last dts {}," +
" pts {}, last pts {}",
inputStreamIndex, dts, lastDtsByStreamIndex[inputStreamIndex],
pts, lastPtsByStreamIndex[inputStreamIndex]);
av_packet_unref(packet);
continue;
}
// If the pts is still greater than the last pts, fix the dts so it can be
// muxed. This helps retain H.264 B frames when the decode timestamps are
// out of order.
if (pts > lastPtsByStreamIndex[inputStreamIndex] &&
pts > lastDtsByStreamIndex[inputStreamIndex]) {
long oldDts = dts;
dts = lastDtsByStreamIndex[inputStreamIndex] + 1;
logger.debug("re-ordering stream {}, diff = {}, offset = {}" +
" preoff dts = {}, dts = {}, new dts = {}," +
" last dts = {}," +
" preoff pts = {}, pts = {} > last pts = {}",
diff, inputStreamIndex, tsOffsets[inputStreamIndex],
preOffsetDts, oldDts, dts,
lastDtsByStreamIndex[inputStreamIndex],
preOffsetPts, pts, lastPtsByStreamIndex[inputStreamIndex]);
} else {
logger.debug("discarding packet stream {}," +
" dts {} < last dts {}," +
" pts {} <= last pts {}",
inputStreamIndex,
dts, lastDtsByStreamIndex[inputStreamIndex],
pts, lastPtsByStreamIndex[inputStreamIndex]);
av_packet_unref(packet);
continue;
}
}
}
packet.dts(dts);
packet.pts(pts);
lastDtsByStreamIndex[inputStreamIndex] = dts;
lastPtsByStreamIndex[inputStreamIndex] = pts;
if (ctx.secondaryStream && inputStreamIndex == ctx.preferredVideo) {
av_copy_packet(copyPacket, packet);
av_packet_copy_props(copyPacket, packet);
//logPacket(ctx.avfCtxInput, copyPacket, "copy2-in");
// remux this frame without re-encoding
av_packet_rescale_ts(copyPacket,
ctx.streamMap2[inputStreamIndex].iStreamRational,
ctx.streamMap2[inputStreamIndex].oStreamRational);
//logPacket(ctx.avfCtxInput, copyPacket, "copy2-out");
copyPacket.stream_index(ctx.streamMap2[inputStreamIndex].outStreamIndex);
ret = av_interleaved_write_frame(ctx.avfCtxOutput2, copyPacket);
if (ret < 0) {
logger.error("Error from av_interleaved_write_frame output 2: {}", ret);
}
}
//logger.trace("Demuxer gave frame of streamIndex {}", inputStreamIndex);
if (filter_ctx[inputStreamIndex].filter_graph != null) {
//logger.trace("Going to re-encode & filter the frame");
frame = av_frame_alloc();
if (frame == null) {
throw new FFmpegException("av_frame_alloc: Unable to allocate frame.",
ENOMEM);
}
//logPacket(ctx.avfCtxInput, packet, "trans-dec-in");
av_packet_rescale_ts(packet,
ctx.streamMap[inputStreamIndex].iStreamRational,
ctx.streamMap[inputStreamIndex].iCodecRational);
//logPacket(ctx.avfCtxInput, packet, "trans-dec-out");
if (codecType == AVMEDIA_TYPE_VIDEO) {
ret = avcodec_decode_video2(
ctx.streamMap[inputStreamIndex].iCodecContext, frame,
got_frame, packet);
} else {
ret = avcodec_decode_audio4(
ctx.streamMap[inputStreamIndex].iCodecContext, frame,
got_frame, packet);
}
if (ret < 0) {
av_frame_free(frame);
av_packet_unref(packet);
logger.error("Decoding failed");
continue;
}
if (got_frame[0] != 0) {
frame.pts(av_frame_get_best_effort_timestamp(frame));
ret = filterEncodeWriteFrame(frame, inputStreamIndex);
av_frame_free(frame);
if (ret < 0) {
logger.error("Error from filterEncodeWriteFrame: {}", ret);
//throw new FFmpegException("Error from filterEncodeWriteFrame.", ret);
}
} else {
av_frame_free(frame);
}
} else {
//logPacket(ctx.avfCtxInput, packet, "copy-in");
// remux this frame without re-encoding
av_packet_rescale_ts(packet,
ctx.streamMap[inputStreamIndex].iStreamRational,
ctx.streamMap[inputStreamIndex].oStreamRational);
packet.pos(-1);
//logPacket(ctx.avfCtxInput, packet, "copy-out");
packet.stream_index(ctx.streamMap[inputStreamIndex].outStreamIndex);
ret = av_interleaved_write_frame(ctx.avfCtxOutput, packet);
if (ret < 0) {
logger.error("Error from av_interleaved_write_frame: {}", ret);
}
}
av_packet_unref(packet);
}
int numInputStreams = ctx.avfCtxInput.nb_streams();
// flush filters and encoders
for (int i = 0; i < numInputStreams; i++) {
if (filter_ctx != null && i < filter_ctx.length) {
// flush filter
if (filter_ctx[i].filter_graph == null)
continue;
ret = filterEncodeWriteFrame(null, i);
if (ret < 0) {
logger.error("Flushing filter failed: {}", ret);
}
}
// flush encoder
ret = flushEncoder(i);
if (ret < 0) {
logger.error("Flushing encoder failed: {}", ret);
}
}
ret = av_write_trailer(ctx.avfCtxOutput);
} finally {
returnTranscodePermission(ctx.OPAQUE);
// Cleanup.
endStreamOutput(packet, frame);
logger.info("FFmpeg transcoder ended with code {}", ret);
}
}
private void switchStreamOutput() throws FFmpegException, InterruptedException {
int ret;
int numInputStreams = ctx.avfCtxInput.nb_streams();
// flush filters and encoders
for (int i = 0; i < numInputStreams; i++) {
if (filter_ctx != null && i < filter_ctx.length) {
// flush filter
if (filter_ctx[i].filter_graph == null)
continue;
ret = filterEncodeWriteFrame(null, i);
if (ret < 0) {
logger.error("Flushing filter failed: {}", ret);
}
}
/* flush encoder */
ret = flushEncoder(i);
if (ret < 0) {
logger.error("Flushing encoder failed: {}", ret);
}
}
av_write_trailer(ctx.avfCtxOutput);
deallocFilterGraphs();
ctx.deallocOutputContext();
if (ctx.secondaryStream) {
av_write_trailer(ctx.avfCtxOutput2);
ctx.deallocOutputContext2();
}
if (ctx.isInterrupted()) {
return;
}
initStreamOutput(ctx, newFilename, newWriter, newWriter2, false);
}
private void endStreamOutput(AVPacket packet, AVFrame frame) {
av_packet_unref(packet);
av_frame_free(frame);
deallocFilterGraphs();
}
private int initFilter(FilteringContext fctx, AVCodecContext dec_ctx,
AVCodecContext enc_ctx, AVStream out_stream, String filter_spec,
AVCodec encoder, AVDictionary dict) throws FFmpegException {
int ret = 0;
int decCodecType;
avfilter.AVFilter buffersrc;
avfilter.AVFilter buffersink;
avfilter.AVFilterContext buffersrc_ctx;
avfilter.AVFilterContext buffersink_ctx;
avfilter.AVFilterInOut outputs = avfilter_inout_alloc();
avfilter.AVFilterInOut inputs = avfilter_inout_alloc();
avfilter.AVFilterGraph filter_graph = avfilter_graph_alloc();
try {
decCodecType = dec_ctx.codec_type();
if (outputs == null || inputs == null || filter_graph == null) {
throw new FFmpegException("Not enough memory available", ENOMEM);
}
if (decCodecType == AVMEDIA_TYPE_VIDEO) {
buffersrc = avfilter_get_by_name("buffer");
buffersink = avfilter_get_by_name("buffersink");
if (buffersrc == null || buffersink == null) {
throw new FFmpegException("Filtering source or sink element not found",
AVERROR_UNKNOWN);
}
String parameters = String.format(
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:frame_rate=%d/%d:pixel_aspect=%d/%d",
dec_ctx.width(), dec_ctx.height(), dec_ctx.pix_fmt(),
dec_ctx.time_base().num(), dec_ctx.time_base().den(),
dec_ctx.framerate().num(), dec_ctx.framerate().den(),
dec_ctx.sample_aspect_ratio().num(), dec_ctx.sample_aspect_ratio().den());
/*String parameters = String.format(
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
dec_ctx.width(), dec_ctx.height(), dec_ctx.pix_fmt(),
dec_ctx.time_base().num(), dec_ctx.time_base().den(),
dec_ctx.sample_aspect_ratio().num(), dec_ctx.sample_aspect_ratio().den());*/
ret = avfilter_graph_create_filter(buffersrc_ctx = new AVFilterContext(null),
buffersrc, "in", parameters, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create buffer source", ret);
}
ret = avfilter_graph_create_filter(buffersink_ctx = new AVFilterContext(null),
buffersink, "out", null, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create buffer sink", ret);
}
BytePointer setBin = new BytePointer(4);
setBin.asByteBuffer().putInt(enc_ctx.pix_fmt());
ret = av_opt_set_bin(buffersink_ctx, "pix_fmts", setBin, 4, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set pixel format", ret);
}
} else if (decCodecType == AVMEDIA_TYPE_AUDIO) {
buffersrc = avfilter_get_by_name("abuffer");
buffersink = avfilter_get_by_name("abuffersink");
if (buffersrc == null || buffersink == null) {
throw new FFmpegException("filtering source or sink element not found", AVERROR_UNKNOWN);
}
if (dec_ctx.channel_layout() == 0) {
dec_ctx.channel_layout(av_get_default_channel_layout(dec_ctx.channels()));
}
String parameters = String.format(
"time_base=%d/%d:sample_rate=%d:sample_fmt=%s:channel_layout=0x%x",
dec_ctx.time_base().num(), dec_ctx.time_base().den(), dec_ctx.sample_rate(),
av_get_sample_fmt_name(dec_ctx.sample_fmt()).getString(),
dec_ctx.channel_layout());
ret = avfilter_graph_create_filter(buffersrc_ctx = new AVFilterContext(), buffersrc, "in",
parameters, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create audio buffer source", ret);
}
ret = avfilter_graph_create_filter(buffersink_ctx = new AVFilterContext(), buffersink, "out",
null, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create audio buffer sink", ret);
}
BytePointer setBin = new BytePointer(4);
setBin.asByteBuffer().putInt(enc_ctx.sample_fmt());
av_opt_set_bin(buffersink_ctx, "sample_fmts", setBin, 4, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set output sample format", ret);
}
setBin = new BytePointer(8);
setBin.asByteBuffer().putLong(enc_ctx.channel_layout());
av_opt_set_bin(buffersink_ctx, "channel_layouts", setBin, 8, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set output channel layout", ret);
}
setBin = new BytePointer(4);
setBin.asByteBuffer().putInt(enc_ctx.sample_rate());
av_opt_set_bin(buffersink_ctx, "sample_rates", setBin, 4, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set output sample rate", ret);
}
} else {
throw new FFmpegException("initFilter: Not audio or video.", AVERROR_UNKNOWN);
}
// Endpoints for the filter graph.
outputs.name(av_strdup(new BytePointer("in")));
outputs.filter_ctx(buffersrc_ctx);
outputs.pad_idx(0);
outputs.next(null);
inputs.name(av_strdup(new BytePointer("out")));
inputs.filter_ctx(buffersink_ctx);
inputs.pad_idx(0);
inputs.next(null);
if (outputs.name() == null || inputs.name() == null) {
throw new FFmpegException("av_strdup: Not enough memory.", ENOMEM);
}
ret = avfilter_graph_parse_ptr(filter_graph, filter_spec,
inputs, outputs, null);
if (ret < 0) {
throw new FFmpegException("avfilter_graph_parse_ptr: Unable to create.", ret);
}
ret = avfilter_graph_config(filter_graph, null);
if (ret < 0) {
throw new FFmpegException("avfilter_graph_config: Unable to create.", ret);
}
/* Fill FilteringContext */
fctx.buffersrc_ctx = buffersrc_ctx;
fctx.buffersink_ctx = buffersink_ctx;
fctx.filter_graph = filter_graph;
AVFilterContext outFilterContext = avfilter_graph_get_filter(filter_graph, "out");
if (outFilterContext == null) {
throw new FFmpegException("avfilter_graph_get_filter: Unable to get 'out' filter.", AVERROR_UNKNOWN);
}
int outFilterInputs = outFilterContext.nb_inputs();
if (outFilterInputs == 1) {
if (decCodecType == AVMEDIA_TYPE_VIDEO) {
int height;
int width;
int format;
AVRational ar;
AVRational fr;
AVRational tb;
if (logger.isDebugEnabled()) {
height = enc_ctx.height();
width = enc_ctx.width();
format = enc_ctx.pix_fmt();
ar = enc_ctx.sample_aspect_ratio();
fr = enc_ctx.framerate();
tb = enc_ctx.time_base();
logger.debug("Before filter: h:{} w:{} fmt:{} ar:{}/{} fr:{}/{} tb:{}/{}",
height, width, format, ar.num(), ar.den(), fr.num(), fr.den(), tb.num(), tb.den());
}
AVFilterLink input = outFilterContext.inputs(0);
height = input.h();
width = input.w();
format = input.format();
ar = input.sample_aspect_ratio();
fr = input.frame_rate();
tb = input.time_base();
if (logger.isDebugEnabled()) {
logger.debug("After filter: h:{} w:{} fmt:{} ar:{}/{} fr:{}/{} tb: {}/{}",
height, width, format, ar.num(), ar.den(), fr.num(), fr.den(), tb.num(), tb.den());
}
enc_ctx.height(height);
enc_ctx.width(width);
enc_ctx.pix_fmt(format);
enc_ctx.sample_aspect_ratio(ar);
enc_ctx.framerate(fr);
enc_ctx.time_base(tb);
ret = avcodec_open2(enc_ctx, encoder, dict);
av_dict_free(dict);
if (ret < 0) {
logger.error("Cannot open video encoder. Error {}.", ret);
}
}
} else {
throw new FFmpegException("nb_inputs: 'out' filter has " + outFilterInputs + " inputs.", AVERROR_UNKNOWN);
}
} catch (FFmpegException e) {
if (filter_graph != null) {
avfilter_graph_free(filter_graph);
}
throw e;
} finally {
avfilter_inout_free(inputs);
avfilter_inout_free(outputs);
}
return ret;
}
private int initFilters() throws FFmpegException {
String filter_spec;
int ret;
int codecType;
int nbStreams = ctx.avfCtxInput.nb_streams();
filter_ctx = new FilteringContext[nbStreams];
for (int i = 0; i < nbStreams; i++) {
filter_ctx[i] = new FilteringContext();
codecType = ctx.avfCtxInput.streams(i).codec().codec_type();
if ( !ctx.streamMap[i].transcode || !(
codecType == AVMEDIA_TYPE_AUDIO ||
codecType == AVMEDIA_TYPE_VIDEO)) {
continue;
}
filter_ctx[i].buffersrc_ctx = new AVFilterContext();
filter_ctx[i].buffersink_ctx = new AVFilterContext();
filter_ctx[i].filter_graph = new AVFilterGraph();
if (codecType == AVMEDIA_TYPE_VIDEO) {
if (interlaced) {
filter_spec = ctx.videoEncodeSettings.get("deinterlace_filter");
} else {
filter_spec = ctx.videoEncodeSettings.get("progressive_filter");
}
if (filter_spec == null) {
filter_spec = "fps=fps=opendct_fps:round=near";
logger.warn("No filter was specified. Using 'fps=fps=opendct_fps:round=near'." +
" To avoid this message, set 'deinterlace_filter' and" +
" 'progressive_filter' to 'null' or 'fps=fps=opendct_fps:round=near'" +
" in the profile.");
} else {
if (filter_spec.contains("opendct_")) {
AVRational fullRate = ctx.avfCtxInput.streams(i).codec().framerate();
AVRational halfRate = av_mul_q(fullRate, av_make_q(1, 2));
AVRational doubleRate = av_mul_q(fullRate, av_make_q(2, 1));
filter_spec = filter_spec.replace("opendct_hfps", halfRate.num() + "/" + halfRate.den());
filter_spec = filter_spec.replace("opendct_fps", fullRate.num() + "/" + fullRate.den());
filter_spec = filter_spec.replace("opendct_dfps", doubleRate.num() + "/" + doubleRate.den());
}
}
} else {
filter_spec = "anull"; /* passthrough (dummy) filter for audio */
}
ret = initFilter(filter_ctx[i], ctx.avfCtxInput.streams(i).codec(),
ctx.avfCtxOutput.streams(ctx.streamMap[i].outStreamIndex).codec(),
ctx.avfCtxOutput.streams(ctx.streamMap[i].outStreamIndex), filter_spec,
ctx.streamMap[i].iCodec, ctx.streamMap[i].iDict);
if (ret != 0) {
return ret;
}
}
return 0;
}
private int encodeWriteFrame(AVFrame filt_frame, int stream_index, int got_frame[]) {
int ret = 0;
avcodec.AVPacket enc_pkt = new avcodec.AVPacket();
if (got_frame == null || got_frame.length == 0) {
logger.warn("got_frame will not be able to be used ByRef.");
got_frame = new int[] { 0 };
}
//logger.trace("Encoding frame");
// encode filtered frame
enc_pkt.data(null);
enc_pkt.size(0);
av_init_packet(enc_pkt);
if (ctx.streamMap[stream_index].iCodecType == AVMEDIA_TYPE_VIDEO) {
ret = avcodec_encode_video2(ctx.streamMap[stream_index].oCodecContext, enc_pkt,
filt_frame, got_frame);
} else if (ctx.streamMap[stream_index].iCodecType == AVMEDIA_TYPE_AUDIO) {
ret = avcodec_encode_audio2(ctx.streamMap[stream_index].oCodecContext, enc_pkt,
filt_frame, got_frame);
}
av_frame_free(filt_frame);
if (ret < 0) {
return ret;
}
if (got_frame[0] == 0) {
return 0;
}
//logPacket(ctx.avfCtxOutput, enc_pkt, "trans-enc-in");
// prepare packet for muxing
enc_pkt.stream_index(ctx.streamMap[stream_index].outStreamIndex);
av_packet_rescale_ts(enc_pkt,
ctx.streamMap[stream_index].oCodecRational,
ctx.streamMap[stream_index].oStreamRational);
//logPacket(ctx.avfCtxOutput, enc_pkt, "trans-enc-out");
//logger.trace("Muxing frame");
// mux encoded frame
ret = av_interleaved_write_frame(ctx.avfCtxOutput, enc_pkt);
if (encodedFrames[stream_index].addAndGet(1) == 1000) {
long endTime = System.currentTimeMillis();
if (startTime != endTime) {
logger.debug("FPS: {}", (double)encodedFrames[stream_index].get() / (double)((endTime - startTime) / 1000));
}
encodedFrames[stream_index].set(0);
startTime = endTime;
}
return ret;
}
private int filterEncodeWriteFrame(AVFrame frame, int stream_index) {
int ret;
AVFrame filt_frame;
int got_frame[] = new int[] { 0 };
//logger.trace("Pushing decoded frame to filters");
// push the decoded frame into the filtergraph
ret = av_buffersrc_add_frame_flags(filter_ctx[stream_index].buffersrc_ctx,
frame, 0);
if (ret < 0) {
logger.error("Error while feeding the filtergraph");
return ret;
}
// pull filtered frames from the filtergraph
while (true) {
filt_frame = av_frame_alloc();
if (filt_frame == null) {
ret = ENOMEM;
break;
}
//logger.trace("Pulling filtered frame from filters");
ret = av_buffersink_get_frame(filter_ctx[stream_index].buffersink_ctx,
filt_frame);
if (ret < 0) {
/* if no more frames for output - returns AVERROR(EAGAIN)
* if flushed and no more frames for output - returns AVERROR_EOF
* rewrite retcode to 0 to show it as normal procedure completion
*/
if (ret == AVERROR_EOF || ret == EAGAIN) {
ret = 0;
}
av_frame_free(filt_frame);
break;
}
filt_frame.pict_type(AV_PICTURE_TYPE_NONE);
ret = encodeWriteFrame(filt_frame, stream_index, got_frame);
if (ret < 0) {
break;
}
}
return ret;
}
private int flushEncoder(int stream_index) {
if (ctx.streamMap == null ||
ctx.streamMap.length <= stream_index ||
ctx.streamMap[stream_index].outStreamIndex == NO_STREAM_IDX) {
return 0;
}
int ret;
int got_frame[] = new int[] { 0 };
if ((ctx.avfCtxOutput.streams(ctx.streamMap[stream_index].outStreamIndex).codec().codec().capabilities() &
AV_CODEC_CAP_DELAY) == 0) {
return 0;
}
while (true) {
logger.debug("Flushing stream #{} encoder", stream_index);
ret = encodeWriteFrame(null, stream_index, got_frame);
if (ret < 0) {
break;
}
if (got_frame[0] == 0) {
return 0;
}
}
return ret;
}
}
|
src/main/java/opendct/video/ffmpeg/FFmpegTranscoder.java
|
/*
* Copyright 2016 The OpenDCT Authors. All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (c) 2010 Nicolas George
* Copyright (c) 2011 Stefano Sabatini
* Copyright (c) 2014 Andrey Utkin
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package opendct.video.ffmpeg;
import opendct.config.Config;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bytedeco.javacpp.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import static opendct.video.ffmpeg.FFmpegUtil.*;
import static org.bytedeco.javacpp.avcodec.*;
import static org.bytedeco.javacpp.avfilter.*;
import static org.bytedeco.javacpp.avformat.*;
import static org.bytedeco.javacpp.avutil.*;
public class FFmpegTranscoder implements FFmpegStreamProcessor {
private static final Logger logger = LogManager.getLogger(FFmpegTranscoder.class);
private static final long WRAP_0 = 8589934592L;
private static final long WRAP_1 = 8589934592L / 2L;
private static final long WRAP_0_LOW = WRAP_0 - 90000;
private static final long WRAP_0_HIGH = WRAP_0 + 90000;
private static final long WRAP_1_LOW = WRAP_1 - 90000;
private static final long WRAP_1_HIGH = WRAP_1 + 90000;
private static final int TS_TIME_BASE = 90000;
private boolean switching = false;
private long switchTimeout = 0;
private FFmpegWriter newWriter = null;
private FFmpegWriter newWriter2 = null;
private String newFilename = null;
private final Object switchLock = new Object();
private static Map<Pointer, Integer> permissionMap = new HashMap<>();
private static int permissionWeight = 0;
private static int transcodeLimit =
Config.getInteger("consumer.ffmpeg.transcode_limit",
(Runtime.getRuntime().availableProcessors() - 1) * 2);
private static final float dts_delta_threshold = 10;
private long firstDtsByStreamIndex[] = new long[0];
private long firstPtsByStreamIndex[] = new long[0];
private long lastDtsByStreamIndex[] = new long[0];
private long lastPtsByStreamIndex[] = new long[0];
boolean mpegTsCbrEnabled = false;
private AtomicInteger encodedFrames[] = new AtomicInteger[0];
private long startTime = 0;
private AVDictionary muxerDict;
private FFmpegContext ctx = null;
private boolean interlaced = false;
private FilteringContext filter_ctx[] = new FilteringContext[0];
private class FilteringContext {
private AVFilterContext buffersink_ctx;
private AVFilterContext buffersrc_ctx;
private AVFilterGraph filter_graph;
}
/**
* Request permission to use transcoding resources.
*
* @param opaque A unique pointer for the requesting FFmpeg context.
* @param weight The assigned weight to the transcoding job.
* @return <i>true</i> if the transcoding is allowed to proceed.
*/
public static synchronized boolean getTranscodePermission(Pointer opaque, int weight) {
if (opaque == null) {
return false;
}
Integer checkWeight = permissionMap.get(opaque);
// The capture device is asking for permission, but the transcode limit has been reached.
// The stream can only be remuxed now.
if (checkWeight == null && permissionWeight + weight > transcodeLimit) {
return false;
} else if (checkWeight != null) {
returnTranscodePermission(opaque);
if (permissionWeight + weight > transcodeLimit) {
return false;
}
}
permissionWeight += weight;
permissionMap.put(opaque, weight);
return true;
}
public static synchronized void returnTranscodePermission(Pointer opaque) {
if (opaque == null) {
return;
}
Integer weight = permissionMap.get(opaque);
if (weight != null) {
permissionWeight -= weight;
permissionMap.remove(opaque);
}
}
@Override
public boolean switchOutput(String newFilename, FFmpegWriter writer, FFmpegWriter writer2) {
synchronized (switchLock) {
logger.info("SWITCH started.");
this.newFilename = newFilename;
newWriter = writer;
newWriter2 = writer2;
switchTimeout = System.currentTimeMillis() + 10000;
switching = true;
while (switching && !ctx.isInterrupted()) {
try {
switchLock.wait(500);
// The timeout will also manage a situation whereby this is called and
// the other end is not currently running.
if (switching && System.currentTimeMillis() > switchTimeout) {
logger.warn("SWITCH timed out.");
return false;
}
} catch (InterruptedException e) {
logger.info("SWITCH wait was interrupted.");
}
}
}
return true;
}
@Override
public void initStreamOutput(FFmpegContext ctx, String outputFilename,
FFmpegWriter writer, FFmpegWriter writer2)
throws FFmpegException, InterruptedException {
initStreamOutput(ctx, outputFilename, writer, writer2, true);
}
/**
* Initialize output stream after detection has already been performed.
*
* @param ctx The FFmpeg context with inputs populated from input stream detection.
* @param outputFilename The name of the file that will be written. This is only a hint to the
* muxer about what file format is desired.
* @param writer The writer to be used for output.
* @param firstRun If <i>false</i> this will skip various things such as interlace detection and
* displaying the input stream information.
* @throws FFmpegException This is thrown if there are any problems that cannot be handled.
* @throws InterruptedException This is thrown if initialization is is interrupted.
*/
public void initStreamOutput(FFmpegContext ctx, String outputFilename,
FFmpegWriter writer, FFmpegWriter writer2, boolean firstRun)
throws FFmpegException, InterruptedException {
mpegTsCbrEnabled = FFmpegConfig.getUseMpegTsCBR();
long muxRate = 0;
int ret;
this.ctx = ctx;
logger.info("Initializing FFmpeg transcoder stream output.");
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
if (firstRun) {
ctx.dumpInputFormat();
}
ctx.secondaryStream = writer2 != null;
int numInputStreams = ctx.avfCtxInput.nb_streams();
// Creates the output container and AVFormatContext.
ctx.allocAvfContainerOutputContext(outputFilename);
if (ctx.secondaryStream) {
ctx.allocAvfContainerOutputContext2(outputFilename);
ctx.streamMap2 = new OutputStreamMap[numInputStreams];
for (int i = 0; i < ctx.streamMap2.length; i++) {
ctx.streamMap2[i] = new OutputStreamMap();
ctx.streamMap2[i].iStream = ctx.avfCtxInput.streams(i);
ctx.streamMap2[i].iCodecContext = ctx.streamMap2[i].iStream.codec();
ctx.streamMap2[i].iCodecType = ctx.streamMap2[i].iCodecContext.codec_type();
ctx.streamMap2[i].iCodecRational = ctx.streamMap2[i].iCodecContext.time_base();
ctx.streamMap2[i].iStreamRational = ctx.streamMap2[i].iStream.time_base();
}
}
lastDtsByStreamIndex = new long[numInputStreams];
Arrays.fill(lastDtsByStreamIndex, Integer.MIN_VALUE);
lastPtsByStreamIndex = new long[numInputStreams];
Arrays.fill(lastPtsByStreamIndex, Integer.MIN_VALUE);
firstDtsByStreamIndex = new long[numInputStreams];
Arrays.fill(firstDtsByStreamIndex, Integer.MIN_VALUE);
firstPtsByStreamIndex = new long[numInputStreams];
Arrays.fill(firstPtsByStreamIndex, Integer.MIN_VALUE);
ctx.streamMap = new OutputStreamMap[numInputStreams];
for (int i = 0; i < ctx.streamMap.length; i++) {
ctx.streamMap[i] = new OutputStreamMap();
ctx.streamMap[i].iStream = ctx.avfCtxInput.streams(i);
ctx.streamMap[i].iCodecContext = ctx.streamMap[i].iStream.codec();
ctx.streamMap[i].iCodecType = ctx.streamMap[i].iCodecContext.codec_type();
ctx.streamMap[i].iCodecRational = ctx.streamMap[i].iCodecContext.time_base();
ctx.streamMap[i].iStreamRational = ctx.streamMap[i].iStream.time_base();
}
if (firstRun) {
encodedFrames = new AtomicInteger[numInputStreams];
for (int i = 0; i < encodedFrames.length; i++) {
encodedFrames[i] = new AtomicInteger(0);
}
}
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
int videoHeight;
int videoWidth;
if (ctx.videoInCodecCtx != null) {
videoHeight = ctx.videoInCodecCtx.height();
videoWidth = ctx.videoInCodecCtx.width();
} else {
videoHeight = 0;
videoWidth = 0;
}
if (firstRun) {
AVCodec videoCodec = null;
if (ctx.videoInCodecCtx != null) {
videoCodec = avcodec_find_decoder(ctx.videoInCodecCtx.codec_id());
}
if (ctx.encodeProfile != null && ctx.videoInCodecCtx != null && ctx.preferredVideo > NO_STREAM_IDX) {
ctx.videoEncodeSettings = ctx.encodeProfile.getVideoEncoderMap(
videoWidth,
videoHeight,
ctx.encodeProfile.getVideoEncoderCodec(
videoCodec));
// Remove the encoder profile if we cannot get permission to transcode. This will
// prevent any possible future attempts.
String weightStr = ctx.videoEncodeSettings.get("encode_weight");
int weight = 2;
if (weightStr != null) {
try {
weight = Integer.parseInt(weightStr);
} catch (NumberFormatException e) {
logger.error("Unable to parse '{}' into an integer, using the default {}.",
weightStr, weight);
}
} else {
logger.warn("encode_weight is not set. Using default {}.", weight);
}
if (!getTranscodePermission(ctx.OPAQUE, weight)) {
ctx.encodeProfile = null;
}
} else {
if (ctx.encodeProfile != null) {
// Everything needed to make a correct decision is not available. Remux only.
logger.warn("ctx.videoCodecCtx was null or there was no preferred video when" +
" trying to get permission to transcode. Remuxing instead.");
ctx.encodeProfile = null;
}
}
interlaced = ctx.encodeProfile != null &&
ctx.encodeProfile.canInterlaceDetect(videoHeight, videoWidth) &&
fastDeinterlaceDetection();
}
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
if (ctx.encodeProfile != null &&
ctx.encodeProfile.canTranscodeVideo(
interlaced,
avcodec_get_name(ctx.videoInCodecCtx.codec_id()).getString(),
videoHeight,
videoWidth)) {
ret = avcodec_open2(ctx.videoInCodecCtx,
avcodec_find_decoder(ctx.videoInCodecCtx.codec_id()), (PointerPointer<AVDictionary>) null);
if (ret < 0) {
throw new FFmpegException("Failed to open decoder for stream #" + ctx.preferredVideo, ret);
}
if ((ctx.videoOutStream = addTranscodeVideoStreamToContext(ctx, ctx.preferredVideo, ctx.encodeProfile)) == null) {
// If transcoding is not possible, we will just copy it.
logger.warn("Unable to set up transcoding. The stream will be copied.");
if ((ctx.videoOutStream = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(ctx.preferredVideo))) == null) {
throw new FFmpegException("Could not find a video stream", -1);
}
} else {
ctx.streamMap[ctx.preferredVideo].transcode = true;
}
} else {
if ((ctx.videoOutStream = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(ctx.preferredVideo))) == null) {
throw new FFmpegException("Could not find a video stream", -1);
}
}
if (ctx.secondaryStream) {
if ((ctx.videoOutStream2 = addCopyStreamToContext(ctx.avfCtxOutput2, ctx.avfCtxInput.streams(ctx.preferredVideo))) == null) {
throw new FFmpegException("Could not find a video stream for output 2", -1);
}
ctx.streamMap2[ctx.preferredVideo].outStreamIndex = ctx.videoOutStream2.id();
ctx.streamMap2[ctx.preferredVideo].oCodecRational = ctx.videoOutStream2.codec().time_base();
ctx.streamMap2[ctx.preferredVideo].oStreamRational = ctx.videoOutStream2.time_base();
ctx.streamMap2[ctx.preferredVideo].oCodecContext = ctx.videoOutStream2.codec();
ctx.streamMap2[ctx.preferredVideo].oStream = ctx.videoOutStream2;
}
if ((ctx.audioOutStream = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(ctx.preferredAudio))) == null) {
throw new FFmpegException("Could not find a audio stream", -1);
}
ctx.streamMap[ctx.preferredVideo].outStreamIndex = ctx.videoOutStream.id();
ctx.streamMap[ctx.preferredVideo].oCodecRational = ctx.videoOutStream.codec().time_base();
ctx.streamMap[ctx.preferredVideo].oStreamRational = ctx.videoOutStream.time_base();
ctx.streamMap[ctx.preferredVideo].oCodecContext = ctx.videoOutStream.codec();
ctx.streamMap[ctx.preferredVideo].oStream = ctx.videoOutStream;
ctx.streamMap[ctx.preferredAudio].outStreamIndex = ctx.audioOutStream.id();
ctx.streamMap[ctx.preferredAudio].oCodecRational = ctx.videoOutStream.codec().time_base();
ctx.streamMap[ctx.preferredAudio].oStreamRational = ctx.videoOutStream.time_base();
ctx.streamMap[ctx.preferredAudio].oCodecContext = ctx.videoOutStream.codec();
ctx.streamMap[ctx.preferredAudio].oStream = ctx.videoOutStream;
for (int i = 0; i < numInputStreams; ++i) {
if (ctx.streamMap[i].outStreamIndex != NO_STREAM_IDX) {
continue;
}
AVCodecContext codecCtx = getCodecContext(ctx.avfCtxInput.streams(i));
if (codecCtx != null) {
AVStream avsOutput = addCopyStreamToContext(ctx.avfCtxOutput, ctx.avfCtxInput.streams(i));
if (avsOutput != null) {
ctx.streamMap[i].outStreamIndex = avsOutput.id();
ctx.streamMap[i].oCodecRational = ctx.videoOutStream.codec().time_base();
ctx.streamMap[i].oStreamRational = ctx.videoOutStream.time_base();
ctx.streamMap[i].oCodecContext = ctx.videoOutStream.codec();
ctx.streamMap[i].oStream = ctx.videoOutStream;
}
}
}
if (ctx.isInterrupted()) {
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
filter_ctx = new FilteringContext[numInputStreams];
if ((ret = initFilters()) < 0) {
throw new FFmpegException("initFilters: Unable to allocate filters.", ret);
}
ctx.dumpOutputFormat();
ctx.allocIoOutputContext(writer);
if (ctx.secondaryStream) {
ctx.dumpOutputFormat2("(CCExtractor)");
ctx.allocIoOutputContext2(writer2);
ret = avformat_write_header(ctx.avfCtxOutput2, (PointerPointer<avutil.AVDictionary>) null);
if (ret < 0) {
deallocFilterGraphs();
throw new FFmpegException("Error while writing header to file 2 '" + outputFilename + "'", ret);
}
}
if (ctx.isInterrupted()) {
deallocFilterGraphs();
throw new InterruptedException(FFMPEG_INIT_INTERRUPTED);
}
logger.debug("Writing header");
if (mpegTsCbrEnabled && !ctx.streamMap[ctx.preferredVideo].transcode && outputFilename.endsWith(".ts")) {
// This value will generally be the safest guess 99% of the time. If the value is guessed
// too low, there will be issues during remuxing and we can't go back and fix a live
// stream.
muxRate = ctx.avfCtxInput.streams(ctx.preferredVideo).codec().rc_max_rate();
// No normal broadcast content should be over 20mb/s.
if (muxRate == 0 || muxRate > 20000000) {
muxRate = 20000000;
}
// Audio + margin of error.
muxRate += 2000000;
logger.debug("Using MPEG-TS CBR {} kb/s.", muxRate / 1000);
muxerDict = new AVDictionary(null);
av_dict_set_int(muxerDict, "muxrate", muxRate, 0);
av_dict_set_int(muxerDict, "pat_period", 1, 0);
av_dict_set_int(muxerDict, "sdt_period", 10, 0);
ret = avformat_write_header(ctx.avfCtxOutput, muxerDict);
} else {
muxerDict = new AVDictionary(null);
av_dict_set_int(muxerDict, "pat_period", 1, 0);
av_dict_set_int(muxerDict, "sdt_period", 10, 0);
ret = avformat_write_header(ctx.avfCtxOutput, muxerDict);
}
if (ret < 0) {
deallocFilterGraphs();
throw new FFmpegException("Error while writing header to file '" + outputFilename + "'", ret);
}
logger.info("Initialized FFmpeg transcoder stream output.");
firstRun = false;
}
private void deallocFilterGraphs() {
if (filter_ctx == null) {
return;
}
for (int i = 0; i < filter_ctx.length; i++) {
if (filter_ctx[i].buffersink_ctx != null) {
avfilter_free(filter_ctx[i].buffersink_ctx);
filter_ctx[i].buffersink_ctx = null;
}
if (filter_ctx[i].buffersink_ctx != null) {
avfilter_free(filter_ctx[i].buffersink_ctx);
filter_ctx[i].buffersink_ctx = null;
}
if (filter_ctx[i].filter_graph != null) {
avfilter_graph_free(filter_ctx[i].filter_graph);
filter_ctx[i].filter_graph = null;
}
}
filter_ctx = null;
}
private boolean fastDeinterlaceDetection() throws FFmpegException {
int ret = avcodec_open2(ctx.videoInCodecCtx,
avcodec_find_decoder(ctx.videoInCodecCtx.codec_id()), (PointerPointer<AVDictionary>) null);
if (ret < 0) {
throw new FFmpegException("Failed to open decoder for stream #" + ctx.preferredVideo, ret);
}
AVPacket packet = new AVPacket();
packet.data(null);
packet.size(0);
int got_frame[] = new int[] { 0 };
AVFrame frame;
// This number will increase as interlaced flags are found. If no frames are found after 60
// frames, give up.
int frameLimit = 90;
// This is is the absolute frame limit. Once this number is reached the method will return
// that this is not interlaced content.
int absFrameLimit = frameLimit * 2;
int totalFrames = 0;
int interThresh = 3;
int interFrames = 0;
try {
if (ctx.SEEK_BUFFER != null) {
ctx.SEEK_BUFFER.setNoWrap(true);
}
long stopTime = System.currentTimeMillis() + 1000;
while(!ctx.isInterrupted()) {
if (System.currentTimeMillis() > stopTime) {
break;
}
ret = av_read_frame(ctx.avfCtxInput, packet);
if (ret < 0) {
if (ret != AVERROR_EOF) {
logger.error("Error reading frame during interlaced detection: {}", ret);
}
break;
}
int inputStreamIndex = packet.stream_index();
AVStream stream = ctx.avfCtxInput.streams(inputStreamIndex);
if (inputStreamIndex >= ctx.streamMap.length ||
inputStreamIndex != ctx.preferredVideo) {
// The packet is diverted to a queue to be processed after detection. If it is
// determined that re-muxing is preferred over transcoding, these packets will
// be fed to the re-muxer. If re-muxing isn't preferred, this queue can be
// de-allocated later.
av_packet_unref(packet);
continue;
}
frame = av_frame_alloc();
if (frame == null) {
throw new FFmpegException("av_frame_alloc: Unable to allocate frame.", -1);
}
av_packet_rescale_ts(packet,
stream.time_base(),
stream.codec().time_base());
logger.debug("Decoding video frame {} for interlace detection. {} frames interlaced.", totalFrames, interFrames);
ret = avcodec_decode_video2(stream.codec(), frame,
got_frame, packet);
av_packet_rescale_ts(packet,
stream.codec().time_base(),
stream.time_base());
if (ret < 0) {
av_frame_free(frame);
//av_packet_unref(packet);
logger.error("Decoding failed");
continue;
}
if (got_frame[0] != 0) {
int interlaced = frame.interlaced_frame();;
interFrames += interlaced;
frameLimit += interlaced;
// Do not retain decoded packets. The RAM usage will get insane very quickly.
}
av_frame_free(frame);
av_packet_unref(packet);
if (interFrames >= interThresh) {
logger.info("Content is interlaced.");
return true;
} else if (totalFrames++ >= frameLimit || totalFrames >= absFrameLimit) {
break;
}
}
} catch (FFmpegException e) {
logger.error("Deinterlace detection exception => ", e);
} finally {
avcodec_close(ctx.videoInCodecCtx);
/*if (interFrames < interThresh) {
// Return to the start.
avio_seek(ctx.avfCtxInput.pb(), 0, 0);
}*/
if (ctx.SEEK_BUFFER != null) {
ctx.SEEK_BUFFER.setNoWrap(false);
}
}
return false;
}
@Override
public synchronized void streamOutput() throws FFmpegException {
int ret = 0;
long lastPreOffsetDts[] = new long[firstDtsByStreamIndex.length];
Arrays.fill(lastPreOffsetDts, 0);
// This value will be adjusted as needed to keep the entire stream on the same time code.
//long tsOffset = 0;
long tsOffsets[] = new long[firstDtsByStreamIndex.length];
Arrays.fill(tsOffsets, 0);
// This value indicates what streams are currently in use and are desirable to be in
// agreement with the other streams.
boolean tsActiveOffsets[] = new boolean[firstDtsByStreamIndex.length];
Arrays.fill(tsActiveOffsets, false);
// This is set when the offset for any stream has changed, so we know it needs to be synced.
boolean tsOffsetChanged = false;
// This value is used to determine when to force the offsets to sync up. Currently the loop
// will try to sync up for up to 120 frames that are not out of sync before everything if
// forced to sync to the largest offset.
int tsOffsetAttempts = 0;
int tsOffsetAttemptLimit = 120;
// This is incremented by frame duration. It does not distinguish between streams, so if
// multiple streams are being corrected, this value will increase faster that the assumed
// duration. This is an acceptable compromise vs iterating an array or calculating the most
// accurate value on each update.
long lastErrorTime = 0;
long lastErrorTimeLimit = 5 * TS_TIME_BASE;
// This is the number of time the error time has increased due to an error.
int errorCounter = 0;
// This is the maximum number of errors within the error time limit allowed before the
// discontinuity tolerance is increased.
int errorLimit = 50;
// This is the number of times the discontinuity tolerance has been increased.
int adjustments = 0;
// This is the last stream that adjusted it's offset. This is used as the correct offset
// value after everything is synced up.
int lastToAdjust = 0;
// This is the number of ticks off +/- from the expected timestamp allowed before corrective
// action is taken. This number will increase automatically if it is determined to be too
// low.
int discontinuityTolerance = 3500000;
long expectedDts;
// This is the most frames to skip while waiting for a key frame to arrive before starting
// without one.
int maxFramesToStart = 768;
final boolean fixingEnabled = FFmpegConfig.getFixStream();
final boolean useCodecTimebase = FFmpegConfig.getUseCompatiblityTimebase();
int switchFlag;
long dts;
long pts;
long preOffsetDts;
long preOffsetPts;
long increment;
long diff = 0;
// Used when streaming first starts to keep the first frame from being a known bad frame.
// This helps with some players that otherwise would just assume it must be a bad video.
boolean firstFrame = true;
AVPacket packet = new AVPacket();
packet.data(null);
packet.size(0);
AVPacket copyPacket = new AVPacket();
copyPacket.data(null);
packet.size(0);
//AVStream iavStream;
//AVCodecContext iavCodecContext;
int inputStreamIndex;
int outputStreamIndex;
int codecType;
int got_frame[] = new int[] { 0 };
// This needs to start out null or Java complains about the cleanup.
AVFrame frame = null;
try {
startTime = System.currentTimeMillis();
while (true) {
ret = av_read_frame(ctx.avfCtxInput, packet);
if (ret < 0) {
break;
}
inputStreamIndex = packet.stream_index();
if (inputStreamIndex >= ctx.streamMap.length ||
(outputStreamIndex = ctx.streamMap[inputStreamIndex].outStreamIndex)
== NO_STREAM_IDX) {
av_packet_unref(packet);
continue;
}
preOffsetDts = packet.dts();
preOffsetPts = packet.pts();
// Discard all frames that don't have any timestamps since especially without a
// presentation timestamp, the frame will never be displayed anyway.
if (preOffsetDts == AV_NOPTS_VALUE || preOffsetPts == AV_NOPTS_VALUE) {
/*logger.debug("stream {}, dts == AV_NOPTS_VALUE || pts == AV_NOPTS_VALUE," +
" discarding frame.", inputStreamIndex);*/
av_packet_unref(packet);
continue;
}
lastPreOffsetDts[inputStreamIndex] = preOffsetDts;
if (firstFrame) {
boolean keyPacket = (packet.flags() & AV_PKT_FLAG_KEY) > 0;
boolean corruptPacket = (packet.flags() & AV_PKT_FLAG_CORRUPT) > 0;
if ((!corruptPacket && keyPacket && outputStreamIndex == 0) || maxFramesToStart <= 0) {
firstFrame = false;
} else {
maxFramesToStart -= 1;
av_packet_unref(packet);
continue;
}
long minDts = packet.dts();
for (int i = 0; i < lastPreOffsetDts.length; i++) {
if (lastPreOffsetDts[i] > 0) {
minDts = Math.min(minDts, lastPreOffsetDts[i]);
}
}
Arrays.fill(lastDtsByStreamIndex, 0);
Arrays.fill(lastPtsByStreamIndex, 0);
Arrays.fill(tsOffsets, -minDts);
}
if (switching && outputStreamIndex == 0) {
switchFlag = packet.flags() & AV_PKT_FLAG_KEY;
// Check if we are at least on a flagged video key frame. Then switch before the
// frame is actually processed. This ensures that if we are muxing, we are
// starting with hopefully an I frame and if we are transcoding this is likely a
// good transition point.
if (switchFlag > 0 || System.currentTimeMillis() >= switchTimeout) {
logger.debug("Video key frame flag: {}", switchFlag);
synchronized (switchLock) {
try {
switchStreamOutput();
if (mpegTsCbrEnabled && ctx.outputFilename.endsWith(".ts")) {
long minDts = packet.dts();
for (int i = 0; i < lastPreOffsetDts.length; i++) {
if (lastPreOffsetDts[i] > 0) {
minDts = Math.min(minDts, lastPreOffsetDts[i]);
}
}
Arrays.fill(lastDtsByStreamIndex, 0);
Arrays.fill(lastPtsByStreamIndex, 0);
Arrays.fill(tsOffsets, -minDts);
}
errorCounter = 0;
} catch (InterruptedException e) {
logger.debug("Switching was interrupted.");
av_packet_unref(packet);
break;
}
switching = false;
switchLock.notifyAll();
}
logger.info("SWITCH successful: {}ms.",
System.currentTimeMillis() - (switchTimeout - 10000));
}
}
if (lastErrorTime > lastErrorTimeLimit) {
errorCounter = 0;
adjustments = 0;
lastErrorTime = 0;
}
if (errorCounter > errorLimit) {
discontinuityTolerance *= 2;
logger.info("adjusting tolerance to {}. errors = {}, adjustments = {}",
discontinuityTolerance, errorCounter, adjustments);
errorCounter = 0;
adjustments += 1;
lastErrorTime = 0;
}
dts = preOffsetDts + tsOffsets[inputStreamIndex];
pts = preOffsetPts + tsOffsets[inputStreamIndex];
// These are referenced several times. This keeps these variables from constantly
// being copied into the JVM.
//iavStream = ctx.streamMap[inputStreamIndex].iStream;
//iavCodecContext = ctx.streamMap[inputStreamIndex].iCodecContext;
codecType = ctx.streamMap[inputStreamIndex].iCodecType;
if ((codecType == AVMEDIA_TYPE_VIDEO ||
codecType ==AVMEDIA_TYPE_AUDIO) &&
lastDtsByStreamIndex[inputStreamIndex] > 0) {
tsActiveOffsets[inputStreamIndex] = true;
// There are probably many other situations that could come up making this value
// incorrect, but this is only optimizing for typical MPEG-TS input and
// MPEG-TS/PS output. This has the potential to introduce a rounding error since
// it is not based on the stream time base rational.
increment = Math.max(packet.duration(), 0);
expectedDts = (lastDtsByStreamIndex[inputStreamIndex] + increment); // & 0x1ffffffffL;
diff = dts - expectedDts;
if (fixingEnabled &&
(diff > discontinuityTolerance || diff < -increment * 4)) {
errorCounter += 1;
lastErrorTime += increment;
long oldDts = dts;
long oldPts = pts;
long oldOffset = tsOffsets[inputStreamIndex];
dts -= diff;
pts -= diff;
tsOffsets[inputStreamIndex] -= diff;
tsOffsetChanged = true;
lastToAdjust = inputStreamIndex;
logger.debug("fixing stream {} timestamp discontinuity diff = {}," +
" offset = {}, new offset = {}," +
" preoff dts = {}, dts = {}, new dts {}, last dts = {}," +
" preoff pts = {}, pts = {}, new pts = {}, last pts = {}",
inputStreamIndex, diff,
oldOffset, tsOffsets[inputStreamIndex],
preOffsetDts, oldDts, dts, lastDtsByStreamIndex[inputStreamIndex],
preOffsetPts, oldPts, pts, lastPtsByStreamIndex[inputStreamIndex]);
} else if (tsOffsetChanged) {
// If the offset is changed for any one of the streams, we need to make sure
// they are all using the same offset once the event is over or they may
// slowly get out of sync.
long maxOffset = Long.MIN_VALUE;
long minOffset = Long.MAX_VALUE;
for (int i = 0; i < tsOffsets.length; i++) {
if (!tsActiveOffsets[i]) {
continue;
}
if (minOffset == Long.MAX_VALUE || tsOffsets[i] < minOffset) {
minOffset = tsOffsets[i];
}
if (maxOffset == Long.MIN_VALUE || tsOffsets[i] > maxOffset) {
maxOffset = tsOffsets[i];
}
}
long offsetDiff = maxOffset - minOffset;
if ((offsetDiff > -discontinuityTolerance &&
offsetDiff < discontinuityTolerance) ||
tsOffsetAttempts > tsOffsetAttemptLimit) {
// This is a good spot to correct the offset before it reaches the long
// wrap around limit. If this is going backwards so much that it wraps
// around backwards you likely would have noticed just based on the poor
// playback of the stream. Subtracting this value will effectively
// result in the exact same offset.
if (tsOffsetAttempts > tsOffsetAttemptLimit) {
logger.debug("force sync offsets {} to {}, diff = {}, attempts = {}",
tsOffsets, tsOffsets[lastToAdjust], offsetDiff, tsOffsetAttempts);
} else {
logger.debug("sync offsets {} to {}, diff = {}, attempts = {}",
tsOffsets, tsOffsets[lastToAdjust], offsetDiff, tsOffsetAttempts);
}
tsOffsetChanged = false;
tsOffsetAttempts = 0;
for (int i = 0; i < tsOffsets.length; i++) {
tsOffsets[i] = tsOffsets[lastToAdjust];
}
lastToAdjust = ctx.preferredVideo;
} else {
tsOffsetAttempts += 1;
}
} else if (!fixingEnabled && diff > 162000000) {
// If the stream is more than 30 minutes ahead, discard it. Leaving it alone
// will do nothing but break things since we are not trying to fix errors.
logger.debug("discarding frame stream {}, dts {} - last dts {}" +
" > 162000000, pts {}, last pts {}",
inputStreamIndex, dts, lastDtsByStreamIndex[inputStreamIndex],
pts, lastPtsByStreamIndex[inputStreamIndex]);
errorCounter += 1;
lastErrorTime += increment;
av_packet_unref(packet);
continue;
}
if (dts <= lastDtsByStreamIndex[inputStreamIndex]) {
// If the decode time stamp is equal to the last one, discard the frame.
// There isn't a simple way to know if the frame can be put in the assumed
// correct place without putting a ripple in the timeline.
if (lastDtsByStreamIndex[inputStreamIndex] == dts) {
logger.debug("discarding frame stream {}, dts {} == last dts {}," +
" pts {}, last pts {}",
inputStreamIndex, dts, lastDtsByStreamIndex[inputStreamIndex],
pts, lastPtsByStreamIndex[inputStreamIndex]);
av_packet_unref(packet);
continue;
}
// If the pts is still greater than the last pts, fix the dts so it can be
// muxed. This helps retain H.264 B frames when the decode timestamps are
// out of order.
if (pts > lastPtsByStreamIndex[inputStreamIndex] &&
pts > lastDtsByStreamIndex[inputStreamIndex]) {
long oldDts = dts;
dts = lastDtsByStreamIndex[inputStreamIndex] + 1;
logger.debug("re-ordering stream {}, diff = {}, offset = {}" +
" preoff dts = {}, dts = {}, new dts = {}," +
" last dts = {}," +
" preoff pts = {}, pts = {} > last pts = {}",
diff, inputStreamIndex, tsOffsets[inputStreamIndex],
preOffsetDts, oldDts, dts,
lastDtsByStreamIndex[inputStreamIndex],
preOffsetPts, pts, lastPtsByStreamIndex[inputStreamIndex]);
} else {
logger.debug("discarding packet stream {}," +
" dts {} < last dts {}," +
" pts {} <= last pts {}",
inputStreamIndex,
dts, lastDtsByStreamIndex[inputStreamIndex],
pts, lastPtsByStreamIndex[inputStreamIndex]);
av_packet_unref(packet);
continue;
}
}
}
packet.dts(dts);
packet.pts(pts);
lastDtsByStreamIndex[inputStreamIndex] = dts;
lastPtsByStreamIndex[inputStreamIndex] = pts;
if (ctx.secondaryStream && inputStreamIndex == ctx.preferredVideo) {
av_copy_packet(copyPacket, packet);
av_packet_copy_props(copyPacket, packet);
//logPacket(ctx.avfCtxInput, copyPacket, "copy2-in");
// remux this frame without re-encoding
av_packet_rescale_ts(copyPacket,
ctx.streamMap2[inputStreamIndex].iStreamRational,
ctx.streamMap2[inputStreamIndex].oStreamRational);
//logPacket(ctx.avfCtxInput, copyPacket, "copy2-out");
copyPacket.stream_index(ctx.streamMap2[inputStreamIndex].outStreamIndex);
ret = av_interleaved_write_frame(ctx.avfCtxOutput2, copyPacket);
if (ret < 0) {
logger.error("Error from av_interleaved_write_frame output 2: {}", ret);
}
}
//logger.trace("Demuxer gave frame of streamIndex {}", inputStreamIndex);
if (filter_ctx[inputStreamIndex].filter_graph != null) {
//logger.trace("Going to re-encode & filter the frame");
frame = av_frame_alloc();
if (frame == null) {
throw new FFmpegException("av_frame_alloc: Unable to allocate frame.",
ENOMEM);
}
//logPacket(ctx.avfCtxInput, packet, "trans-dec-in");
av_packet_rescale_ts(packet,
ctx.streamMap[inputStreamIndex].iStreamRational,
ctx.streamMap[inputStreamIndex].iCodecRational);
//logPacket(ctx.avfCtxInput, packet, "trans-dec-out");
if (codecType == AVMEDIA_TYPE_VIDEO) {
ret = avcodec_decode_video2(
ctx.streamMap[inputStreamIndex].iCodecContext, frame,
got_frame, packet);
} else {
ret = avcodec_decode_audio4(
ctx.streamMap[inputStreamIndex].iCodecContext, frame,
got_frame, packet);
}
if (ret < 0) {
av_frame_free(frame);
av_packet_unref(packet);
logger.error("Decoding failed");
continue;
}
if (got_frame[0] != 0) {
frame.pts(av_frame_get_best_effort_timestamp(frame));
ret = filterEncodeWriteFrame(frame, inputStreamIndex);
av_frame_free(frame);
if (ret < 0) {
logger.error("Error from filterEncodeWriteFrame: {}", ret);
//throw new FFmpegException("Error from filterEncodeWriteFrame.", ret);
}
} else {
av_frame_free(frame);
}
} else {
//logPacket(ctx.avfCtxInput, packet, "copy-in");
// remux this frame without re-encoding
av_packet_rescale_ts(packet,
ctx.streamMap[inputStreamIndex].iStreamRational,
ctx.streamMap[inputStreamIndex].oStreamRational);
packet.pos(-1);
//logPacket(ctx.avfCtxInput, packet, "copy-out");
packet.stream_index(ctx.streamMap[inputStreamIndex].outStreamIndex);
ret = av_interleaved_write_frame(ctx.avfCtxOutput, packet);
if (ret < 0) {
logger.error("Error from av_interleaved_write_frame: {}", ret);
}
}
av_packet_unref(packet);
}
int numInputStreams = ctx.avfCtxInput.nb_streams();
// flush filters and encoders
for (int i = 0; i < numInputStreams; i++) {
if (filter_ctx != null && i < filter_ctx.length) {
// flush filter
if (filter_ctx[i].filter_graph == null)
continue;
ret = filterEncodeWriteFrame(null, i);
if (ret < 0) {
logger.error("Flushing filter failed: {}", ret);
}
}
// flush encoder
ret = flushEncoder(i);
if (ret < 0) {
logger.error("Flushing encoder failed: {}", ret);
}
}
av_write_trailer(ctx.avfCtxOutput);
} finally {
returnTranscodePermission(ctx.OPAQUE);
// Cleanup.
endStreamOutput(packet, frame);
logger.info("FFmpeg transcoder ended with code {}", ret);
}
}
private void switchStreamOutput() throws FFmpegException, InterruptedException {
int ret;
int numInputStreams = ctx.avfCtxInput.nb_streams();
// flush filters and encoders
for (int i = 0; i < numInputStreams; i++) {
if (filter_ctx != null && i < filter_ctx.length) {
// flush filter
if (filter_ctx[i].filter_graph == null)
continue;
ret = filterEncodeWriteFrame(null, i);
if (ret < 0) {
logger.error("Flushing filter failed: {}", ret);
}
}
/* flush encoder */
ret = flushEncoder(i);
if (ret < 0) {
logger.error("Flushing encoder failed: {}", ret);
}
}
av_write_trailer(ctx.avfCtxOutput);
deallocFilterGraphs();
ctx.deallocOutputContext();
if (ctx.secondaryStream) {
av_write_trailer(ctx.avfCtxOutput2);
ctx.deallocOutputContext2();
}
if (ctx.isInterrupted()) {
return;
}
initStreamOutput(ctx, newFilename, newWriter, newWriter2, false);
}
private void endStreamOutput(AVPacket packet, AVFrame frame) {
av_packet_unref(packet);
av_frame_free(frame);
deallocFilterGraphs();
}
private int initFilter(FilteringContext fctx, AVCodecContext dec_ctx,
AVCodecContext enc_ctx, AVStream out_stream, String filter_spec,
AVCodec encoder, AVDictionary dict) throws FFmpegException {
int ret = 0;
int decCodecType;
avfilter.AVFilter buffersrc;
avfilter.AVFilter buffersink;
avfilter.AVFilterContext buffersrc_ctx;
avfilter.AVFilterContext buffersink_ctx;
avfilter.AVFilterInOut outputs = avfilter_inout_alloc();
avfilter.AVFilterInOut inputs = avfilter_inout_alloc();
avfilter.AVFilterGraph filter_graph = avfilter_graph_alloc();
try {
decCodecType = dec_ctx.codec_type();
if (outputs == null || inputs == null || filter_graph == null) {
throw new FFmpegException("Not enough memory available", ENOMEM);
}
if (decCodecType == AVMEDIA_TYPE_VIDEO) {
buffersrc = avfilter_get_by_name("buffer");
buffersink = avfilter_get_by_name("buffersink");
if (buffersrc == null || buffersink == null) {
throw new FFmpegException("Filtering source or sink element not found",
AVERROR_UNKNOWN);
}
String parameters = String.format(
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:frame_rate=%d/%d:pixel_aspect=%d/%d",
dec_ctx.width(), dec_ctx.height(), dec_ctx.pix_fmt(),
dec_ctx.time_base().num(), dec_ctx.time_base().den(),
dec_ctx.framerate().num(), dec_ctx.framerate().den(),
dec_ctx.sample_aspect_ratio().num(), dec_ctx.sample_aspect_ratio().den());
/*String parameters = String.format(
"video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
dec_ctx.width(), dec_ctx.height(), dec_ctx.pix_fmt(),
dec_ctx.time_base().num(), dec_ctx.time_base().den(),
dec_ctx.sample_aspect_ratio().num(), dec_ctx.sample_aspect_ratio().den());*/
ret = avfilter_graph_create_filter(buffersrc_ctx = new AVFilterContext(null),
buffersrc, "in", parameters, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create buffer source", ret);
}
ret = avfilter_graph_create_filter(buffersink_ctx = new AVFilterContext(null),
buffersink, "out", null, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create buffer sink", ret);
}
BytePointer setBin = new BytePointer(4);
setBin.asByteBuffer().putInt(enc_ctx.pix_fmt());
ret = av_opt_set_bin(buffersink_ctx, "pix_fmts", setBin, 4, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set pixel format", ret);
}
} else if (decCodecType == AVMEDIA_TYPE_AUDIO) {
buffersrc = avfilter_get_by_name("abuffer");
buffersink = avfilter_get_by_name("abuffersink");
if (buffersrc == null || buffersink == null) {
throw new FFmpegException("filtering source or sink element not found", AVERROR_UNKNOWN);
}
if (dec_ctx.channel_layout() == 0) {
dec_ctx.channel_layout(av_get_default_channel_layout(dec_ctx.channels()));
}
String parameters = String.format(
"time_base=%d/%d:sample_rate=%d:sample_fmt=%s:channel_layout=0x%x",
dec_ctx.time_base().num(), dec_ctx.time_base().den(), dec_ctx.sample_rate(),
av_get_sample_fmt_name(dec_ctx.sample_fmt()).getString(),
dec_ctx.channel_layout());
ret = avfilter_graph_create_filter(buffersrc_ctx = new AVFilterContext(), buffersrc, "in",
parameters, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create audio buffer source", ret);
}
ret = avfilter_graph_create_filter(buffersink_ctx = new AVFilterContext(), buffersink, "out",
null, null, filter_graph);
if (ret < 0) {
throw new FFmpegException("Cannot create audio buffer sink", ret);
}
BytePointer setBin = new BytePointer(4);
setBin.asByteBuffer().putInt(enc_ctx.sample_fmt());
av_opt_set_bin(buffersink_ctx, "sample_fmts", setBin, 4, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set output sample format", ret);
}
setBin = new BytePointer(8);
setBin.asByteBuffer().putLong(enc_ctx.channel_layout());
av_opt_set_bin(buffersink_ctx, "channel_layouts", setBin, 8, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set output channel layout", ret);
}
setBin = new BytePointer(4);
setBin.asByteBuffer().putInt(enc_ctx.sample_rate());
av_opt_set_bin(buffersink_ctx, "sample_rates", setBin, 4, AV_OPT_SEARCH_CHILDREN);
if (ret < 0) {
throw new FFmpegException("Cannot set output sample rate", ret);
}
} else {
throw new FFmpegException("initFilter: Not audio or video.", AVERROR_UNKNOWN);
}
// Endpoints for the filter graph.
outputs.name(av_strdup(new BytePointer("in")));
outputs.filter_ctx(buffersrc_ctx);
outputs.pad_idx(0);
outputs.next(null);
inputs.name(av_strdup(new BytePointer("out")));
inputs.filter_ctx(buffersink_ctx);
inputs.pad_idx(0);
inputs.next(null);
if (outputs.name() == null || inputs.name() == null) {
throw new FFmpegException("av_strdup: Not enough memory.", ENOMEM);
}
ret = avfilter_graph_parse_ptr(filter_graph, filter_spec,
inputs, outputs, null);
if (ret < 0) {
throw new FFmpegException("avfilter_graph_parse_ptr: Unable to create.", ret);
}
ret = avfilter_graph_config(filter_graph, null);
if (ret < 0) {
throw new FFmpegException("avfilter_graph_config: Unable to create.", ret);
}
/* Fill FilteringContext */
fctx.buffersrc_ctx = buffersrc_ctx;
fctx.buffersink_ctx = buffersink_ctx;
fctx.filter_graph = filter_graph;
AVFilterContext outFilterContext = avfilter_graph_get_filter(filter_graph, "out");
if (outFilterContext == null) {
throw new FFmpegException("avfilter_graph_get_filter: Unable to get 'out' filter.", AVERROR_UNKNOWN);
}
int outFilterInputs = outFilterContext.nb_inputs();
if (outFilterInputs == 1) {
if (decCodecType == AVMEDIA_TYPE_VIDEO) {
int height;
int width;
int format;
AVRational ar;
AVRational fr;
AVRational tb;
if (logger.isDebugEnabled()) {
height = enc_ctx.height();
width = enc_ctx.width();
format = enc_ctx.pix_fmt();
ar = enc_ctx.sample_aspect_ratio();
fr = enc_ctx.framerate();
tb = enc_ctx.time_base();
logger.debug("Before filter: h:{} w:{} fmt:{} ar:{}/{} fr:{}/{} tb:{}/{}",
height, width, format, ar.num(), ar.den(), fr.num(), fr.den(), tb.num(), tb.den());
}
AVFilterLink input = outFilterContext.inputs(0);
height = input.h();
width = input.w();
format = input.format();
ar = input.sample_aspect_ratio();
fr = input.frame_rate();
tb = input.time_base();
if (logger.isDebugEnabled()) {
logger.debug("After filter: h:{} w:{} fmt:{} ar:{}/{} fr:{}/{} tb: {}/{}",
height, width, format, ar.num(), ar.den(), fr.num(), fr.den(), tb.num(), tb.den());
}
enc_ctx.height(height);
enc_ctx.width(width);
enc_ctx.pix_fmt(format);
enc_ctx.sample_aspect_ratio(ar);
enc_ctx.framerate(fr);
enc_ctx.time_base(tb);
ret = avcodec_open2(enc_ctx, encoder, dict);
av_dict_free(dict);
if (ret < 0) {
logger.error("Cannot open video encoder. Error {}.", ret);
}
}
} else {
throw new FFmpegException("nb_inputs: 'out' filter has " + outFilterInputs + " inputs.", AVERROR_UNKNOWN);
}
} catch (FFmpegException e) {
if (filter_graph != null) {
avfilter_graph_free(filter_graph);
}
throw e;
} finally {
avfilter_inout_free(inputs);
avfilter_inout_free(outputs);
}
return ret;
}
private int initFilters() throws FFmpegException {
String filter_spec;
int ret;
int codecType;
int nbStreams = ctx.avfCtxInput.nb_streams();
filter_ctx = new FilteringContext[nbStreams];
for (int i = 0; i < nbStreams; i++) {
filter_ctx[i] = new FilteringContext();
codecType = ctx.avfCtxInput.streams(i).codec().codec_type();
if ( !ctx.streamMap[i].transcode || !(
codecType == AVMEDIA_TYPE_AUDIO ||
codecType == AVMEDIA_TYPE_VIDEO)) {
continue;
}
filter_ctx[i].buffersrc_ctx = new AVFilterContext();
filter_ctx[i].buffersink_ctx = new AVFilterContext();
filter_ctx[i].filter_graph = new AVFilterGraph();
if (codecType == AVMEDIA_TYPE_VIDEO) {
if (interlaced) {
filter_spec = ctx.videoEncodeSettings.get("deinterlace_filter");
} else {
filter_spec = ctx.videoEncodeSettings.get("progressive_filter");
}
if (filter_spec == null) {
filter_spec = "fps=fps=opendct_fps:round=near";
logger.warn("No filter was specified. Using 'fps=fps=opendct_fps:round=near'." +
" To avoid this message, set 'deinterlace_filter' and" +
" 'progressive_filter' to 'null' or 'fps=fps=opendct_fps:round=near'" +
" in the profile.");
} else {
if (filter_spec.contains("opendct_")) {
AVRational fullRate = ctx.avfCtxInput.streams(i).codec().framerate();
AVRational halfRate = av_mul_q(fullRate, av_make_q(1, 2));
AVRational doubleRate = av_mul_q(fullRate, av_make_q(2, 1));
filter_spec = filter_spec.replace("opendct_hfps", halfRate.num() + "/" + halfRate.den());
filter_spec = filter_spec.replace("opendct_fps", fullRate.num() + "/" + fullRate.den());
filter_spec = filter_spec.replace("opendct_dfps", doubleRate.num() + "/" + doubleRate.den());
}
}
} else {
filter_spec = "anull"; /* passthrough (dummy) filter for audio */
}
ret = initFilter(filter_ctx[i], ctx.avfCtxInput.streams(i).codec(),
ctx.avfCtxOutput.streams(ctx.streamMap[i].outStreamIndex).codec(),
ctx.avfCtxOutput.streams(ctx.streamMap[i].outStreamIndex), filter_spec,
ctx.streamMap[i].iCodec, ctx.streamMap[i].iDict);
if (ret != 0) {
return ret;
}
}
return 0;
}
private int encodeWriteFrame(AVFrame filt_frame, int stream_index, int got_frame[]) {
int ret = 0;
avcodec.AVPacket enc_pkt = new avcodec.AVPacket();
if (got_frame == null || got_frame.length == 0) {
logger.warn("got_frame will not be able to be used ByRef.");
got_frame = new int[] { 0 };
}
//logger.trace("Encoding frame");
// encode filtered frame
enc_pkt.data(null);
enc_pkt.size(0);
av_init_packet(enc_pkt);
if (ctx.streamMap[stream_index].iCodecType == AVMEDIA_TYPE_VIDEO) {
ret = avcodec_encode_video2(ctx.streamMap[stream_index].oCodecContext, enc_pkt,
filt_frame, got_frame);
} else if (ctx.streamMap[stream_index].iCodecType == AVMEDIA_TYPE_AUDIO) {
ret = avcodec_encode_audio2(ctx.streamMap[stream_index].oCodecContext, enc_pkt,
filt_frame, got_frame);
}
av_frame_free(filt_frame);
if (ret < 0) {
return ret;
}
if (got_frame[0] == 0) {
return 0;
}
//logPacket(ctx.avfCtxOutput, enc_pkt, "trans-enc-in");
// prepare packet for muxing
enc_pkt.stream_index(ctx.streamMap[stream_index].outStreamIndex);
av_packet_rescale_ts(enc_pkt,
ctx.streamMap[stream_index].oCodecRational,
ctx.streamMap[stream_index].oStreamRational);
//logPacket(ctx.avfCtxOutput, enc_pkt, "trans-enc-out");
//logger.trace("Muxing frame");
// mux encoded frame
ret = av_interleaved_write_frame(ctx.avfCtxOutput, enc_pkt);
if (encodedFrames[stream_index].addAndGet(1) == 1000) {
long endTime = System.currentTimeMillis();
if (startTime != endTime) {
logger.debug("FPS: {}", (double)encodedFrames[stream_index].get() / (double)((endTime - startTime) / 1000));
}
encodedFrames[stream_index].set(0);
startTime = endTime;
}
return ret;
}
private int filterEncodeWriteFrame(AVFrame frame, int stream_index) {
int ret;
AVFrame filt_frame;
int got_frame[] = new int[] { 0 };
//logger.trace("Pushing decoded frame to filters");
// push the decoded frame into the filtergraph
ret = av_buffersrc_add_frame_flags(filter_ctx[stream_index].buffersrc_ctx,
frame, 0);
if (ret < 0) {
logger.error("Error while feeding the filtergraph");
return ret;
}
// pull filtered frames from the filtergraph
while (true) {
filt_frame = av_frame_alloc();
if (filt_frame == null) {
ret = ENOMEM;
break;
}
//logger.trace("Pulling filtered frame from filters");
ret = av_buffersink_get_frame(filter_ctx[stream_index].buffersink_ctx,
filt_frame);
if (ret < 0) {
/* if no more frames for output - returns AVERROR(EAGAIN)
* if flushed and no more frames for output - returns AVERROR_EOF
* rewrite retcode to 0 to show it as normal procedure completion
*/
if (ret == AVERROR_EOF || ret == EAGAIN) {
ret = 0;
}
av_frame_free(filt_frame);
break;
}
filt_frame.pict_type(AV_PICTURE_TYPE_NONE);
ret = encodeWriteFrame(filt_frame, stream_index, got_frame);
if (ret < 0) {
break;
}
}
return ret;
}
private int flushEncoder(int stream_index) {
if (ctx.streamMap == null ||
ctx.streamMap.length <= stream_index ||
ctx.streamMap[stream_index].outStreamIndex == NO_STREAM_IDX) {
return 0;
}
int ret;
int got_frame[] = new int[] { 0 };
if ((ctx.avfCtxOutput.streams(ctx.streamMap[stream_index].outStreamIndex).codec().codec().capabilities() &
AV_CODEC_CAP_DELAY) == 0) {
return 0;
}
while (true) {
logger.debug("Flushing stream #{} encoder", stream_index);
ret = encodeWriteFrame(null, stream_index, got_frame);
if (ret < 0) {
break;
}
if (got_frame[0] == 0) {
return 0;
}
}
return ret;
}
}
|
Re-added TS offset reset.
|
src/main/java/opendct/video/ffmpeg/FFmpegTranscoder.java
|
Re-added TS offset reset.
|
|
Java
|
apache-2.0
|
6efb68724199ae209f403da2cd205b171ecc9f1e
| 0
|
bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,oriontribunal/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud
|
package com.planet_ink.coffee_mud.core;
import com.planet_ink.coffee_mud.WebMacros.interfaces.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.Clan.Function;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
import java.io.File;
import java.lang.ref.WeakReference;
import java.lang.reflect.Modifier;
import java.net.URL;
import org.mozilla.javascript.*;
import org.mozilla.javascript.optimizer.*;
/*
Copyright 2001-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* The core class loader, but more importantly, the core object template manager
* for the whole mud. Classes are grouped by their core interfaces, allowing them
* to have short "ID" names as referents. Classes are loaded and initialized from the
* class loader and then kept as template objects, with newInstances created on demand (or
* simply returned as the template, in cases where the objects are shared).
* @author Bo Zimmerman
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class CMClass extends ClassLoader
{
protected static boolean debugging=false;
protected static volatile long lastUpdateTime=System.currentTimeMillis();
protected static final Map<String,Class<?>> classes=new Hashtable<String,Class<?>>();
private static CMClass[] clss=new CMClass[256];
/**
* Creates a new instance of the class loader, updating the thread-group ref if necessary.
*/
public CMClass()
{
super();
final char c=Thread.currentThread().getThreadGroup().getName().charAt(0);
if(clss==null)
clss=new CMClass[256];
if(clss[c]==null)
clss[c]=this;
}
/**
* Creates and returns a new CMClass object for the current calling thread
* @return a new CMClass object for the current calling thread
*/
public static final CMClass initialize()
{
return new CMClass();
}
/**
* Returns the CMClass instance tied to this particular thread group, or null if not yet created.
* @return the CMClass instance tied to this particular thread group, or null if not yet created.
*/
private static CMClass c()
{
return clss[Thread.currentThread().getThreadGroup().getName().charAt(0)];
}
/**
* Returns the CMClass instance tied to the given thread group, or null if not yet created.
* @param c the code for the thread group to return (0-255)
* @return the CMClass instance tied to the given thread group, or null if not yet created.
*/
public static CMClass c(byte c)
{
return clss[c];
}
/**
* Returns the CMClass instance tied to this particular thread group, or null if not yet created.
* @return the CMClass instance tied to this particular thread group, or null if not yet created.
*/
public static CMClass instance()
{
return c();
}
private static boolean[] classLoaderSync={false};
public static enum CMObjectType
{
/** stat constant for race type objects */
RACE("com.planet_ink.coffee_mud.Races.interfaces.Race"),
/** stat constant for char class type objects */
CHARCLASS("com.planet_ink.coffee_mud.CharClasses.interfaces.CharClass"),
/** stat constant for mob type objects */
MOB("com.planet_ink.coffee_mud.MOBS.interfaces.MOB"),
/** stat constant for ability type objects */
ABILITY("com.planet_ink.coffee_mud.Abilities.interfaces.Ability"),
/** stat constant for locale/room type objects */
LOCALE("com.planet_ink.coffee_mud.Locales.interfaces.Room"),
/** stat constant for exit type objects */
EXIT("com.planet_ink.coffee_mud.Exits.interfaces.Exit"),
/** stat constant for item type objects */
ITEM("com.planet_ink.coffee_mud.Items.interfaces.Item"),
/** stat constant for behavior type objects */
BEHAVIOR("com.planet_ink.coffee_mud.Behaviors.interfaces.Behavior"),
/** stat constant for clan type objects */
CLAN("com.planet_ink.coffee_mud.core.interfaces.Clan"),
/** stat constant for weapon type objects */
WEAPON("com.planet_ink.coffee_mud.Items.interfaces.Weapon"),
/** stat constant for armor type objects */
ARMOR("com.planet_ink.coffee_mud.Items.interfaces.Armor"),
/** stat constant for misc magic type objects */
MISCMAGIC("com.planet_ink.coffee_mud.Items.interfaces.MiscMagic"),
/** stat constant for area type objects */
AREA("com.planet_ink.coffee_mud.Areas.interfaces.Area"),
/** stat constant for command type objects */
COMMAND("com.planet_ink.coffee_mud.Commands.interfaces.Command"),
/** stat constant for clan items type objects */
CLANITEM("com.planet_ink.coffee_mud.Items.interfaces.ClanItem"),
/** stat constant for misc tech type objects */
TECH("com.planet_ink.coffee_mud.Items.interfaces.Electronics"),
/** stat constant for misc tech type objects */
SHIPTECH("com.planet_ink.coffee_mud.Items.interfaces.ShipComponent"),
/** stat constant for misc tech type objects */
SOFTWARE("com.planet_ink.coffee_mud.Items.interfaces.Software"),
/** stat constant for webmacros type objects */
WEBMACRO("com.planet_ink.coffee_mud.WebMacros.interfaces.WebMacro"),
/** stat constant for common type objects */
COMMON("com.planet_ink.coffee_mud.Common.interfaces.CMCommon"),
/** stat constant for library type objects */
LIBRARY("com.planet_ink.coffee_mud.Libraries.interfaces.CMLibrary");
public final String ancestorName; // in meters
CMObjectType(String ancestorName)
{
this.ancestorName = ancestorName;
}
}
/** collection of all object types that are classified as "items" of one sort or another */
public static final CMObjectType[] OBJECTS_ITEMTYPES = new CMObjectType[]{
CMObjectType.MISCMAGIC,
CMObjectType.ITEM,
CMObjectType.ARMOR,
CMObjectType.CLANITEM,
CMObjectType.MISCMAGIC,
CMObjectType.TECH,
CMObjectType.SHIPTECH,
CMObjectType.SOFTWARE,
CMObjectType.WEAPON
};
/** static int for the web macro object with the longest name, used for web optimization */
public static int longestWebMacro=-1;
protected Hashtable<String,CMCommon> common=new Hashtable<String,CMCommon>();
protected XVector<Race> races=new XVector<Race>();
protected XVector<CharClass> charClasses=new XVector<CharClass>();
protected XVector<MOB> MOBs=new XVector<MOB>();
protected XVector<Ability> abilities=new XVector<Ability>();
protected XVector<Room> locales=new XVector<Room>();
protected XVector<Exit> exits=new XVector<Exit>();
protected XVector<Item> items=new XVector<Item>();
protected XVector<Behavior> behaviors=new XVector<Behavior>();
protected XVector<Weapon> weapons=new XVector<Weapon>();
protected XVector<Armor> armor=new XVector<Armor>();
protected XVector<MiscMagic> miscMagic=new XVector<MiscMagic>();
protected XVector<Electronics> tech=new XVector<Electronics>();
protected XVector<ClanItem> clanItems=new XVector<ClanItem>();
protected XVector<Area> areaTypes=new XVector<Area>();
protected XVector<Command> commands=new XVector<Command>();
protected XVector<CMLibrary> libraries=new XVector<CMLibrary>();
protected Hashtable<String,WebMacro> webMacros=new Hashtable<String,WebMacro>();
protected Hashtable<String,Command> commandWords=new Hashtable<String,Command>();
protected static final LinkedList<CMMsg> MSGS_CACHE=new LinkedList<CMMsg>();
protected static final LinkedList<MOB> MOB_CACHE=new LinkedList<MOB>();
protected static final int MAX_MSGS=10000+((Runtime.getRuntime().maxMemory()==Integer.MAX_VALUE)?10000:(int)(Runtime.getRuntime().maxMemory()/10000));
protected static final int MAX_MOBS=50+(MAX_MSGS/200);
/*
* removed to save memory and processing time -- but left for future use
protected static final long[] OBJECT_CREATIONS=new long[OBJECT_TOTAL];
protected static final long[] OBJECT_DESTRUCTIONS=new long[OBJECT_TOTAL];
protected static final Map<CMObject,Object>[] OBJECT_CACHE=new WeakHashMap[OBJECT_TOTAL];
protected static final boolean KEEP_OBJECT_CACHE=false;
static
{
if(KEEP_OBJECT_CACHE)
for(int i=0;i<OBJECT_TOTAL;i++)
OBJECT_CACHE[i]=new WeakHashMap<CMObject,Object>();
}
public final static void bumpCounter(final CMObject O, final int which)
{
if(KEEP_OBJECT_CACHE)
{
if(OBJECT_CACHE[which].containsKey(O))
{
Log.errOut("Duplicate!",new Exception("Duplicate Found!"));
return;
}
OBJECT_CACHE[which].put(O,OBJECT_CACHE);
}
OBJECT_CREATIONS[which]++;
}
public final static void unbumpCounter(final CMObject O, final int which)
{
if(KEEP_OBJECT_CACHE)
{
if(OBJECT_CACHE[which].containsKey(O)) // yes, if its in there, its bad
{
OBJECT_CACHE[which].remove(O);
Log.errOut("bumped!",O.getClass().getName());
return;
}
}
OBJECT_DESTRUCTIONS[which]++;
}
public static final String getCounterReport()
{
StringBuffer str=new StringBuffer("");
for(int i=0;i<OBJECT_TOTAL;i++)
if(OBJECT_CREATIONS[i]>0)
str.append(L("@x1: Created: @x2, Destroyed: @x3, Remaining: @x4\n\r",CMStrings.padRight(OBJECT_DESCS[i],12),OBJECT_CREATIONS[i],OBJECT_DESTRUCTIONS[i],(OBJECT_CREATIONS[i]-OBJECT_DESTRUCTIONS[i])));
return str.toString();
}
public static final long numRemainingObjectCounts(final int type)
{
return OBJECT_CREATIONS[type] - OBJECT_DESTRUCTIONS[type];
}
*/
/**
* Returns whether the given class exists in the vm,
* not necessarily any given classloader.
* Requires a fully qualified java class name.
* @param className a fully qualified java class name.
* @return whether the given class exists in the vm
*/
public final static boolean exists(String className)
{
try
{
Class.forName (className);
return true;
}
catch (final ClassNotFoundException exception)
{
return false;
}
}
/**
* Checks the given object against the given object type
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param O the object to inspect
* @param type the type to compare against
* @return true if theres a match, and false otherwise
*/
public final static boolean isType(final Object O, final CMObjectType type)
{
switch(type)
{
case RACE:
return O instanceof Race;
case CHARCLASS:
return O instanceof CharClass;
case MOB:
return O instanceof MOB;
case ABILITY:
return O instanceof Ability;
case LOCALE:
return O instanceof Room;
case EXIT:
return O instanceof Exit;
case ITEM:
return O instanceof Item;
case BEHAVIOR:
return O instanceof Behavior;
case CLAN:
return O instanceof Clan;
case WEAPON:
return O instanceof Weapon;
case ARMOR:
return O instanceof Armor;
case MISCMAGIC:
return O instanceof MiscMagic;
case AREA:
return O instanceof Area;
case COMMAND:
return O instanceof Command;
case CLANITEM:
return O instanceof ClanItem;
case TECH:
return O instanceof Electronics;
case WEBMACRO:
return O instanceof WebMacro;
case COMMON:
return O instanceof CMCommon;
case LIBRARY:
return O instanceof CMLibrary;
case SOFTWARE:
return O instanceof Software;
case SHIPTECH:
return O instanceof ShipComponent;
}
return false;
}
/**
* Returns a newInstance of an object of the given type and ID. NULL if not found.
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param ID the ID of the object to look for
* @param type the type of object to check
* @return a newInstance of an object of the given type and ID.
*/
public final static CMObject getByType(final String ID, final CMObjectType type)
{
switch(type)
{
case RACE:
return CMClass.getRace(ID);
case CHARCLASS:
return CMClass.getCharClass(ID);
case MOB:
return CMClass.getMOB(ID);
case ABILITY:
return CMClass.getAbility(ID);
case LOCALE:
return CMClass.getLocale(ID);
case EXIT:
return CMClass.getExit(ID);
case ITEM:
return CMClass.getBasicItem(ID);
case BEHAVIOR:
return CMClass.getBehavior(ID);
case CLAN:
return CMClass.getCommon(ID);
case WEAPON:
return CMClass.getWeapon(ID);
case ARMOR:
return CMClass.getAreaType(ID);
case MISCMAGIC:
return CMClass.getMiscMagic(ID);
case AREA:
return CMClass.getAreaType(ID);
case COMMAND:
return CMClass.getCommand(ID);
case CLANITEM:
return CMClass.getClanItem(ID);
case TECH:
return CMClass.getTech(ID);
case WEBMACRO:
return CMClass.getWebMacro(ID);
case COMMON:
return CMClass.getCommon(ID);
case LIBRARY:
return CMClass.getLibrary(ID);
case SHIPTECH:
return CMClass.getTech(ID);
case SOFTWARE:
return CMClass.getTech(ID);
}
return null;
}
/**
* Returns the object type of the given object
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param O the object to inspect
* @return the cmobjectype type
*/
public final static CMObjectType getType(final Object O)
{
if(O instanceof Race)
return CMObjectType.RACE;
if(O instanceof CharClass)
return CMObjectType.CHARCLASS;
if(O instanceof Ability)
return CMObjectType.ABILITY;
if(O instanceof Room)
return CMObjectType.LOCALE;
if(O instanceof MOB)
return CMObjectType.MOB;
if(O instanceof Exit)
return CMObjectType.EXIT;
if(O instanceof Behavior)
return CMObjectType.BEHAVIOR;
if(O instanceof WebMacro)
return CMObjectType.WEBMACRO;
if(O instanceof Area)
return CMObjectType.AREA;
if(O instanceof CMLibrary)
return CMObjectType.LIBRARY;
if(O instanceof CMCommon)
return CMObjectType.COMMON;
if(O instanceof Command)
return CMObjectType.COMMAND;
if(O instanceof Clan)
return CMObjectType.CLAN;
if(O instanceof ClanItem)
return CMObjectType.CLANITEM;
if(O instanceof MiscMagic)
return CMObjectType.MISCMAGIC;
if(O instanceof Armor)
return CMObjectType.ARMOR;
if(O instanceof Weapon)
return CMObjectType.WEAPON;
if(O instanceof Item)
return CMObjectType.ITEM;
if(O instanceof Software)
return CMObjectType.SOFTWARE;
if(O instanceof ShipComponent)
return CMObjectType.SHIPTECH;
if(O instanceof Electronics)
return CMObjectType.TECH;
return null;
}
/**
* Given a string, Integer, or some other stringable object, this will return the
* cmobjecttype based on its name or ordinal relationship.
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param nameOrOrdinal the string, integer, or whatever object
* @return the cmobjecttype it refers to
*/
public static CMObjectType getTypeByNameOrOrdinal(final Object nameOrOrdinal)
{
if(nameOrOrdinal==null)
return null;
if(nameOrOrdinal instanceof Integer)
{
final int itemtypeord = ((Integer)nameOrOrdinal).intValue();
if((itemtypeord>=0)&&(itemtypeord<CMObjectType.values().length))
return CMClass.CMObjectType.values()[itemtypeord];
}
if(nameOrOrdinal instanceof Long)
{
final int itemtypeord = ((Long)nameOrOrdinal).intValue();
if((itemtypeord>=0)&&(itemtypeord<CMObjectType.values().length))
return CMClass.CMObjectType.values()[itemtypeord];
}
final String s=nameOrOrdinal.toString();
if(s.length()==0)
return null;
if(CMath.isInteger(s))
{
final int itemtypeord=CMath.s_int(s);
if((itemtypeord>=0)&&(itemtypeord<CMObjectType.values().length))
return CMClass.CMObjectType.values()[itemtypeord];
}
try
{
return CMClass.CMObjectType.valueOf(s);
}
catch(final Exception e)
{
return (CMClass.CMObjectType)CMath.s_valueOf(CMClass.CMObjectType.values(), s.toUpperCase().trim());
}
}
protected static final Object getClassSet(final String type)
{
return getClassSet(findObjectType(type));
}
protected static final Object getClassSet(final CMObjectType code)
{
switch(code)
{
case RACE:
return c().races;
case CHARCLASS:
return c().charClasses;
case MOB:
return c().MOBs;
case ABILITY:
return c().abilities;
case LOCALE:
return c().locales;
case EXIT:
return c().exits;
case ITEM:
return c().items;
case BEHAVIOR:
return c().behaviors;
case CLAN:
return null;
case WEAPON:
return c().weapons;
case ARMOR:
return c().armor;
case MISCMAGIC:
return c().miscMagic;
case AREA:
return c().areaTypes;
case COMMAND:
return c().commands;
case CLANITEM:
return c().clanItems;
case TECH:
return c().tech;
case WEBMACRO:
return c().webMacros;
case COMMON:
return c().common;
case LIBRARY:
return c().libraries;
case SHIPTECH:
return c().tech;
case SOFTWARE:
return c().tech;
}
return null;
}
/**
* Returns the total number of template/prototypes of the given type stored by
* this CMClass instance.
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param type the type of object to count
* @return the number stored
*/
public static final int numPrototypes(final CMObjectType type)
{
final Object o = getClassSet(type);
if(o instanceof Set)
return ((Set)o).size();
if(o instanceof List)
return ((List)o).size();
if(o instanceof Collection)
return ((Collection)o).size();
if(o instanceof HashSet)
return ((HashSet)o).size();
if(o instanceof Hashtable)
return ((Hashtable)o).size();
if(o instanceof Vector)
return ((Vector)o).size();
return 0;
}
/**
* An enumeration of all the stored races in this classloader for this thread
* @return an enumeration of all the stored races in this classloader for this thread
*/
public static final Enumeration<Race> races(){return c().races.elements();}
/**
* An enumeration of all the stored common Objects in this classloader for this thread
* @return an enumeration of all the stored common Objects in this classloader for this thread
*/
public static final Enumeration<CMCommon> commonObjects(){return c().common.elements();}
/**
* An enumeration of all the stored char Classes in this classloader for this thread
* @return an enumeration of all the stored char Classes in this classloader for this thread
*/
public static final Enumeration<CharClass> charClasses(){return c().charClasses.elements();}
/**
* An enumeration of all the stored mob Types in this classloader for this thread
* @return an enumeration of all the stored mob Types in this classloader for this thread
*/
public static final Enumeration<MOB> mobTypes(){return c().MOBs.elements();}
/**
* An enumeration of all the stored races in this classloader for this thread
* @return an enumeration of all the stored races in this classloader for this thread
*/
public static final Enumeration<CMLibrary> libraries(){return c().libraries.elements();}
/**
* An enumeration of all the stored locales in this classloader for this thread
* @return an enumeration of all the stored locales in this classloader for this thread
*/
public static final Enumeration<Room> locales(){return c().locales.elements();}
/**
* An enumeration of all the stored exits in this classloader for this thread
* @return an enumeration of all the stored exits in this classloader for this thread
*/
public static final Enumeration<Exit> exits(){return c().exits.elements();}
/**
* An enumeration of all the stored behaviors in this classloader for this thread
* @return an enumeration of all the stored behaviors in this classloader for this thread
*/
public static final Enumeration<Behavior> behaviors(){return c().behaviors.elements();}
/**
* An enumeration of all the stored basic Items in this classloader for this thread
* @return an enumeration of all the stored basic Items in this classloader for this thread
*/
public static final Enumeration<Item> basicItems(){return c().items.elements();}
/**
* An enumeration of all the stored weapons in this classloader for this thread
* @return an enumeration of all the stored weapons in this classloader for this thread
*/
public static final Enumeration<Weapon> weapons(){return c().weapons.elements();}
/**
* An enumeration of all the stored armor in this classloader for this thread
* @return an enumeration of all the stored armor in this classloader for this thread
*/
public static final Enumeration<Armor> armor(){return c().armor.elements();}
/**
* An enumeration of all the stored misc Magic in this classloader for this thread
* @return an enumeration of all the stored misc Magic in this classloader for this thread
*/
public static final Enumeration<MiscMagic> miscMagic(){return c().miscMagic.elements();}
/**
* An enumeration of all the stored misc Magic in this classloader for this thread
* @param f the filterer to help select which ones you want
* @return an enumeration of all the stored misc Magic in this classloader for this thread
*/
public static final Enumeration<MiscMagic> miscMagic(Filterer<MiscMagic> f){return new FilteredEnumeration<MiscMagic>(c().miscMagic.elements(),f);}
/**
* An enumeration of all the stored misc Tech in this classloader for this thread
* @return an enumeration of all the stored misc Tech in this classloader for this thread
*/
public static final Enumeration<Electronics>tech(){return c().tech.elements();}
/**
* An enumeration of all the stored misc Tech in this classloader for this thread
* @param f the filterer to help select which ones you want
* @return an enumeration of all the stored misc Tech in this classloader for this thread
*/
public static final Enumeration<Electronics>tech(Filterer<Electronics> f){return new FilteredEnumeration<Electronics>(c().tech.elements(),f);}
/**
* An enumeration of all the stored clan Items in this classloader for this thread
* @return an enumeration of all the stored clan Items in this classloader for this thread
*/
public static final Enumeration<ClanItem> clanItems(){return c().clanItems.elements();}
/**
* An enumeration of all the stored area Types in this classloader for this thread
* @return an enumeration of all the stored area Types in this classloader for this thread
*/
public static final Enumeration<Area> areaTypes(){return c().areaTypes.elements();}
/**
* An enumeration of all the stored commands in this classloader for this thread
* @return an enumeration of all the stored commands in this classloader for this thread
*/
public static final Enumeration<Command> commands(){return c().commands.elements();}
/**
* An enumeration of all the stored abilities in this classloader for this thread
* @return an enumeration of all the stored abilities in this classloader for this thread
*/
public static final Enumeration<Ability> abilities(){return c().abilities.elements();}
/**
* An enumeration of all the stored abilities in this classloader for this thread
* @param f the filterer to help select which ones you want
* @return an enumeration of all the stored abilities in this classloader for this thread
*/
public static final Enumeration<Ability> abilities(Filterer<Ability> f){return new FilteredEnumeration<Ability>(c().abilities.elements(),f);}
/**
* An enumeration of all the stored webmacros in this classloader for this thread
* @return an enumeration of all the stored webmacros in this classloader for this thread
*/
public static final Enumeration<WebMacro> webmacros(){return c().webMacros.elements();}
/**
* Returns a random available race prototype from your classloader
* @return a random available race prototype
*/
public static final Race randomRace(){return c().races.elementAt((int)Math.round(Math.floor(Math.random()*(c().races.size()))));}
/**
* Returns a random available char class prototype from your classloader
* @return a random available char class prototype
*/
public static final CharClass randomCharClass(){return c().charClasses.elementAt((int)Math.round(Math.floor(Math.random()*(c().charClasses.size()))));}
/**
* Returns a random available ability prototype from your classloader
* @return a random available ability prototype
*/
public static final Ability randomAbility(){ return c().abilities.elementAt((int)Math.round(Math.floor(Math.random()*(c().abilities.size()))));}
/**
* Returns a random available area prototype from your classloader
* @return a random available area prototype
*/
public static final Area randomArea(){return c().areaTypes.elementAt((int)Math.round(Math.floor(Math.random()*(c().areaTypes.size()))));}
/**
* Returns a new instance of a locale object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a locale object of the given ID
*/
public static final Room getLocale(final String calledThis){ return (Room)getNewGlobal(c().locales,calledThis); }
/**
* Returns a reference to the prototype for the library of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a reference to the prototype for the library of the given ID
*/
public static final CMLibrary getLibrary(final String calledThis) { return (CMLibrary)getGlobal(c().libraries,calledThis); }
/**
* Returns a new instance of a area object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a area object of the given ID
*/
public static final Area getAreaType(final String calledThis) { return (Area)getNewGlobal(c().areaTypes,calledThis); }
/**
* Returns a new instance of a exit object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a exit object of the given ID
*/
public static final Exit getExit(final String calledThis) { return (Exit)getNewGlobal(c().exits,calledThis);}
/**
* Returns a new instance of a MOB object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a MOB object of the given ID
*/
public static final MOB getMOB(final String calledThis) { return (MOB)getNewGlobal(c().MOBs,calledThis); }
/**
* Returns a new instance of a weapon object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a weapon object of the given ID
*/
public static final Weapon getWeapon(final String calledThis) { return (Weapon)getNewGlobal(c().weapons,calledThis); }
/**
* Returns a new instance of a clan item object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a clan item object of the given ID
*/
public static final ClanItem getClanItem(final String calledThis) { return (ClanItem)getNewGlobal(c().clanItems,calledThis); }
/**
* Returns a new instance of a misc magic object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a misc magic object of the given ID
*/
public static final Item getMiscMagic(final String calledThis) { return (Item)getNewGlobal(c().miscMagic,calledThis); }
/**
* Returns a new instance of a misc tech object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a misc tech object of the given ID
*/
public static final Item getTech(final String calledThis) { return (Item)getNewGlobal(c().tech,calledThis);}
/**
* Returns a new instance of a armor object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a armor object of the given ID
*/
public static final Armor getArmor(final String calledThis) { return (Armor)getNewGlobal(c().armor,calledThis); }
/**
* Returns a new instance of a basic item object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a basic item object of the given ID
*/
public static final Item getBasicItem(final String calledThis) { return (Item)getNewGlobal(c().items,calledThis); }
/**
* Returns a new instance of a behavior object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a behavior object of the given ID
*/
public static final Behavior getBehavior(final String calledThis) { return (Behavior)getNewGlobal(c().behaviors,calledThis); }
/**
* Returns a new instance of a ability object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a ability object of the given ID
*/
public static final Ability getAbility(final String calledThis) { return (Ability)getNewGlobal(c().abilities,calledThis); }
/**
* Returns the prototype instance of the ability object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return the prototype instance of a ability object of the given ID
*/
public static final Ability getAbilityPrototype(final String calledThis) { return (Ability)getGlobal(c().abilities,calledThis); }
/**
* Returns a reference to the prototype for the char class of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a reference to the prototype for the char class of the given ID
*/
public static final CharClass getCharClass(final String calledThis){ return (CharClass)getGlobal(c().charClasses,calledThis);}
/**
* Returns a new instance of a common object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a common object of the given ID
*/
public static final CMCommon getCommon(final String calledThis){return (CMCommon)getNewGlobal(c().common,calledThis);}
/**
* Returns a reference to the prototype for the command of the given ID from your classloader
* @param word the ID() of the object to return
* @return a reference to the prototype for the command of the given ID
*/
public static final Command getCommand(final String word){return (Command)getGlobal(c().commands,word);}
/**
* Returns a reference to the prototype for the web macro of the given ID from your classloader
* @param macroName the ID() of the object to return
* @return a reference to the prototype for the web macro of the given ID
*/
public static final WebMacro getWebMacro(final String macroName){return c().webMacros.get(macroName);}
/**
* Returns a reference to the prototype for the race of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a reference to the prototype for the race of the given ID
*/
public static final Race getRace(final String calledThis){return (Race)getGlobal(c().races,calledThis);}
/**
* Returns the number of prototypes in the classloader of the given set of types
* @param types the types to count
* @return the number of prototypes in the classloader of the given set of types
*/
public static final int numPrototypes(final CMObjectType[] types)
{
int total=0;
for (final CMObjectType type : types)
total+=numPrototypes(type);
return total;
}
/**
* Fills the given list with the IDs of the various Item types, subject to the given filters
* @param namesList the list to populate with IDs
* @param NonArchon true to not include Archon items
* @param NonGeneric true to not include Gen items
* @param NonStandard true to not include Standard items
* @param themeCode the theme mask to respect, sortof
*/
public static final void addAllItemClassNames(final List<String> namesList, final boolean NonArchon,
final boolean NonGeneric, final boolean NonStandard,
final int themeCode)
{
namesList.addAll(getAllItemClassNames(basicItems(),NonArchon,NonGeneric,NonStandard));
namesList.addAll(getAllItemClassNames(weapons(),NonArchon,NonGeneric,NonStandard));
if(CMath.bset(themeCode,Area.THEME_FANTASY))
{
namesList.addAll(getAllItemClassNames(armor(),NonArchon,NonGeneric,NonStandard));
namesList.addAll(getAllItemClassNames(miscMagic(),NonArchon,NonGeneric,NonStandard));
}
if(CMath.bset(themeCode,Area.THEME_TECHNOLOGY))
namesList.addAll(getAllItemClassNames(tech(),NonArchon,NonGeneric,NonStandard));
namesList.addAll(getAllItemClassNames(clanItems(),NonArchon,NonGeneric,NonStandard));
}
private static List<String> getAllItemClassNames(final Enumeration<? extends Item> i,
final boolean NonArchon, final boolean NonGeneric, final boolean NonStandard)
{
final Vector<String> V=new Vector<String>();
for(;i.hasMoreElements();)
{
final Item I=i.nextElement();
if(((!NonArchon)||(!(I instanceof ArchonOnly)))
&&((!NonStandard)||(I.isGeneric()))
&&((!NonGeneric)||(!I.isGeneric())))
V.addElement(CMClass.classID(I));
}
return V;
}
/**
* Returns a new instance of an item object of the given ID from your classloader
* Will search basic, armor, weapons, misc magic, clan items, and misc tech respectively
* @param calledThis the ID() of the object to return
* @return a new instance of an item object of the given ID
*/
public static Item getItem(final String calledThis)
{
Item thisItem=(Item)getNewGlobal(c().items,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().armor,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().weapons,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().miscMagic,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().clanItems,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().tech,calledThis);
return thisItem;
}
protected Item sampleItem=null;
/**
* Returns the saved copy of the first basic item prototype
* @return the saved copy of the first basic item prototype
*/
public static final Item sampleItem()
{
final CMClass myC=c();
if((myC.sampleItem==null)&&(myC.items.size()>0))
myC.sampleItem= (Item)myC.items.firstElement().copyOf();
return myC.sampleItem;
}
/**
* Returns a reference to the prototype of an item object of the given ID from your classloader
* Will search basic, armor, weapons, misc magic, clan items, and misc tech respectively
* @param itemID the ID() of the object to return
* @return a reference to the prototype of an item object of the given ID
*/
public static final Item getItemPrototype(final String itemID)
{
Item thisItem=(Item)getGlobal(c().items,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().armor,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().weapons,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().miscMagic,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().clanItems,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().tech,itemID);
return thisItem;
}
/**
* Returns a reference to the prototype of a mob object of the given ID from your classloader
* @param mobID the ID() of the object to return
* @return a reference to the prototype of an mob object of the given ID
*/
public static final MOB getMOBPrototype(final String mobID)
{
return (MOB)CMClass.getGlobal(c().MOBs,mobID);
}
protected MOB sampleMOB=null;
/**
* Returns the saved copy of the first mob prototype
* @return the saved copy of the first mob prototype
*/
public static final MOB sampleMOB()
{
final CMClass myC=c();
if((myC.sampleMOB==null)&&(myC.MOBs.size()>0))
{
myC.sampleMOB=(MOB)myC.MOBs.firstElement().copyOf();
myC.sampleMOB.basePhyStats().setDisposition(PhyStats.IS_NOT_SEEN);
myC.sampleMOB.phyStats().setDisposition(PhyStats.IS_NOT_SEEN);
}
if(myC.sampleMOB.location()==null)
myC.sampleMOB.setLocation(CMLib.map().getRandomRoom());
return myC.sampleMOB;
}
protected MOB samplePlayer=null;
/**
* Returns the saved copy of the first mob prototype as a player
* @return the saved copy of the first mob prototype as a player
*/
public static final MOB samplePlayer()
{
final CMClass myC=c();
if((myC.samplePlayer==null)&&(myC.MOBs.size()>0))
{
myC.samplePlayer=(MOB)myC.MOBs.firstElement().copyOf();
myC.samplePlayer.basePhyStats().setDisposition(PhyStats.IS_NOT_SEEN);
myC.samplePlayer.phyStats().setDisposition(PhyStats.IS_NOT_SEEN);
final PlayerStats playerStats = (PlayerStats)getCommon("DefaultPlayerStats");
if(playerStats != null)
{
if(CMProps.isUsingAccountSystem())
{
final PlayerAccount account = (PlayerAccount)getCommon("DefaultPlayerAccount");
if(account != null)
playerStats.setAccount(account);
}
myC.samplePlayer.setPlayerStats(playerStats);
}
}
if(myC.samplePlayer.location()==null)
myC.samplePlayer.setLocation(CMLib.map().getRandomRoom());
return myC.samplePlayer;
}
/**
* Searches the command prototypes for a trigger word match and returns the command.
* @param word the command word to search for
* @param exactOnly true for a whole word match, false for a startsWith match
* @return the command prototypes for a trigger word match and returns the command.
*/
public static final Command findCommandByTrigger(final String word, final boolean exactOnly)
{
final CMClass myC=c();
final Command C=myC.commandWords.get(word.trim().toUpperCase());
if((exactOnly)||(C!=null))
return C;
final String upword=word.toUpperCase();
String key;
for(final Enumeration<String> e=myC.commandWords.keys();e.hasMoreElements();)
{
key=e.nextElement();
if(key.toUpperCase().startsWith(upword))
return myC.commandWords.get(key);
}
return null;
}
protected final int totalLocalClasses()
{
return races.size()+charClasses.size()+MOBs.size()+abilities.size()+locales.size()+exits.size()
+items.size()+behaviors.size()+weapons.size()+armor.size()+miscMagic.size()+clanItems.size()
+tech.size()+areaTypes.size()+common.size()+libraries.size()+commands.size()
+webMacros.size();
}
/**
* Returns the total number of prototypes of all classes in your classloader
* @return the total number of prototypes of all classes in your classloader
*/
public static final int totalClasses(){ return c().totalLocalClasses();}
/**
* Deletes the class of the given object type from your classloader
* @param type the type of object that the given object belongs to
* @param O the specific prototype class to remove
* @return true
*/
public static final boolean delClass(final CMObjectType type, final CMObject O)
{
if(O==null)
return false;
if(classes.containsKey(O.getClass().getName()))
classes.remove(O.getClass().getName());
final Object set=getClassSet(type);
if(set==null)
return false;
CMClass.lastUpdateTime=System.currentTimeMillis();
if(set instanceof List)
{
((List)set).remove(O);
if(set instanceof XVector)
((XVector)set).sort();
}
else
if(set instanceof Hashtable)
((Hashtable)set).remove(O.ID().trim());
else
if(set instanceof HashSet)
((HashSet)set).remove(O);
else
return false;
if(set==c().commands)
reloadCommandWords();
//if(set==libraries) CMLib.registerLibraries(libraries.elements());
return true;
}
/**
* Adds a new prototype of the given object type from your classloader
* @param type the type of object that the given object belongs to
* @param O the specific prototype class to add
* @return true
*/
public static final boolean addClass(final CMObjectType type, final CMObject O)
{
final Object set=getClassSet(type);
if(set==null)
return false;
CMClass.lastUpdateTime=System.currentTimeMillis();
if(set instanceof List)
{
((List)set).add(O);
if(set instanceof XVector)
((XVector)set).sort();
}
else
if(set instanceof Hashtable)
((Hashtable)set).put(O.ID().trim().toUpperCase(), O);
else
if(set instanceof HashSet)
((HashSet)set).add(O);
else
return false;
if(set==c().commands)
reloadCommandWords();
if(set==c().libraries)
CMLib.registerLibraries(c().libraries.elements());
return true;
}
/**
* Searches for a match to the given object type name,
* preferring exact, but accepting prefixes.
* @param name the object type name to search for
* @return the matching object type or NULL
*/
public final static CMObjectType findObjectType(final String name)
{
for(final CMObjectType o : CMObjectType.values())
{
if(o.toString().equalsIgnoreCase(name))
return o;
}
final String upperName=name.toUpperCase();
for(final CMObjectType o : CMObjectType.values())
{
if(o.toString().toUpperCase().startsWith(upperName))
return o;
}
for(final CMObjectType o : CMObjectType.values())
{
if(upperName.startsWith(o.toString().toUpperCase()))
return o;
}
return null;
}
/**
* Searches for a match to the given object type name,
* preferring exact, but accepting prefixes. Returns
* the ancestor java class type
* @param code the object type name to search for
* @return the matching object type interface/ancestor or NULL
*/
public final static String findTypeAncestor(final String code)
{
final CMObjectType typ=findObjectType(code);
if(typ!=null)
return typ.ancestorName;
return "";
}
/**
* Returns the internal object type to which the given object example
* belongs by checking its interface implementations/ancestry
* @param O the object to find the type of
* @return the type of object this is, or NULL
*/
public final static CMObjectType getObjectType(final Object O)
{
for(final CMObjectType o : CMObjectType.values())
{
try
{
final Class<?> ancestorCl = instance().loadClass(o.ancestorName);
if(CMClass.checkAncestry(O.getClass(),ancestorCl))
return o;
}catch(final Exception e){}
}
return null;
}
/**
* Loads the class with the given coffeemud or java path to your classloader.
* @param classType the type of object to load
* @param path the file or java path of the class to load
* @param quiet true to not report errors to the log, false otherwise
* @return true if the prototype was loaded
*/
public static final boolean loadClass(final CMObjectType classType, final String path, final boolean quiet)
{
debugging=CMSecurity.isDebugging(CMSecurity.DbgFlag.CLASSLOADER);
final Object set=getClassSet(classType);
if(set==null)
return false;
CMClass.lastUpdateTime=System.currentTimeMillis();
if(!loadListToObj(set,path,classType.ancestorName,quiet))
return false;
if(set instanceof List)
{
if(set instanceof XVector)
((XVector)set).sort();
if(set==c().commands)
reloadCommandWords();
if(set==c().libraries)
CMLib.registerLibraries(c().libraries.elements());
}
return true;
}
protected static String makeDotClassPath(final String path)
{
String pathLess=path;
final String upperPathLess=pathLess.toUpperCase();
if(upperPathLess.endsWith(".CLASS"))
pathLess=pathLess.substring(0,pathLess.length()-6);
else
if(upperPathLess.endsWith(".JAVA"))
pathLess=pathLess.substring(0,pathLess.length()-5);
else
if(upperPathLess.endsWith(".JS"))
pathLess=pathLess.substring(0,pathLess.length()-3);
pathLess=pathLess.replace('/','.');
pathLess=pathLess.replace('\\','.');
return pathLess;
}
protected static String makeFilePath(final String path)
{
final String upperPath=path.toUpperCase();
if((!upperPath.endsWith(".CLASS"))
&&(!upperPath.endsWith(".JAVA"))
&&(!upperPath.endsWith(".JS")))
return path.replace('.','/')+".class";
return path;
}
/**
* If the given class exists in the classloader, a new instance will be returned.
* If it does not, it will be loaded, and then a new instance of it will be returned.
* @param classType the type of class as a filter
* @param path the path of some sort to get a new instance of
* @param quiet true to not post errors to the log, false otherwise
* @return a new instance of the given class
*/
public static final Object getLoadNewClassInstance(final CMObjectType classType, final String path, final boolean quiet)
{
if((path==null)||(path.length()==0))
return null;
try
{
final String pathLess=makeDotClassPath(path);
if(classes.containsKey(pathLess))
return (classes.get(pathLess)).newInstance();
}catch(final Exception e){}
final Vector<Object> V=new Vector<Object>(1);
if(!loadListToObj(V,makeFilePath(path),classType.ancestorName,quiet))
return null;
if(V.size()==0)
return null;
final Object o = V.firstElement();
try
{
return o.getClass().newInstance();
}
catch(final Exception e)
{
return o;
}
}
/**
* Returns true if the given class has been loaded into the classloader, or if it is loadable
* through the cm class loading system.
* @param classType the type of class to check for (for ancestry confirmation)
* @param path the path of the class to check for
* @return true if it is loaded or loadable, false otherwise
*/
public final static boolean checkForCMClass(final CMObjectType classType, final String path)
{
if((path==null)||(path.length()==0))
return false;
try
{
final String pathLess=makeDotClassPath(path);
if(classes.containsKey(pathLess))
return true;
}catch(final Exception e){}
final Vector<Object> V=new Vector<Object>(1);
if(!loadListToObj(V,makeFilePath(path),classType.ancestorName,true))
return false;
if(V.size()==0)
return false;
return true;
}
/**
* Returns the base prototype of the given type, by id
* @param type the cmobjecttype to return
* @param calledThis the ID of the cmobjecttype
* @return the base prototype of the given type, by id
*/
public static final CMObject getPrototypeByID(final CMObjectType type, final String calledThis)
{
final Object set=getClassSet(type);
if(set==null)
return null;
CMObject thisItem;
if(set instanceof List)
thisItem=getGlobal((List)set,calledThis);
else
if(set instanceof Map)
thisItem=getGlobal((Map)set,calledThis);
else
return null;
return thisItem;
}
/**
* Returns either a new instance of the class of the given full java name,
* or the coffeemud prototype of the class with the given id. Checks all
* cmobjecttypes.
* @param calledThis the ID or the given full java name.
* @return a new instance of the class, or the prototype
*/
public static final Object getObjectOrPrototype(final String calledThis)
{
String shortThis=calledThis;
final int x=shortThis.lastIndexOf('.');
if(x>0)
{
shortThis=shortThis.substring(x+1);
try{
return classes.get(calledThis).newInstance();
}catch(final Exception e){}
}
for(final CMObjectType o : CMObjectType.values())
{
final Object thisItem=getPrototypeByID(o,shortThis);
if(thisItem!=null)
return thisItem;
}
return null;
}
/**
* Returns a new instance of a Environmental of the given id, prefers items,
* but also checks mobs and abilities as well.
* @param calledThis the id of the cmobject
* @return a new instance of a Environmental
*/
public static final Environmental getUnknown(final String calledThis)
{
Environmental thisItem=(Environmental)getNewGlobal(c().items,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().armor,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().weapons,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().miscMagic,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().tech,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().MOBs,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().abilities,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().clanItems,calledThis);
if((thisItem==null)&&(c().charClasses.size()>0)&&(calledThis.length()>0))
Log.sysOut("CMClass","Unknown Unknown '"+calledThis+"'.");
return thisItem;
}
/**
* Does a search for a race of the given name, first checking
* for identical matches, then case insensitive name matches.
* @param calledThis the name or id
* @return the race object
*/
public static final Race findRace(final String calledThis)
{
final Race thisItem=getRace(calledThis);
if(thisItem!=null)
return thisItem;
Race R;
final CMClass c=c();
for(int i=0;i<c.races.size();i++)
{
R=c.races.elementAt(i);
if(R.name().equalsIgnoreCase(calledThis))
return R;
}
return null;
}
/**
* Does a search for a Char Class of the given name, first checking
* for identical matches, then case insensitive name matches.
* @param calledThis the name or id
* @return the Char Class object
*/
public static final CharClass findCharClass(final String calledThis)
{
final CharClass thisItem=getCharClass(calledThis);
if(thisItem!=null)
return thisItem;
CharClass C;
final CMClass c=c();
for(int i=0;i<c.charClasses.size();i++)
{
C=c.charClasses.elementAt(i);
for(int n=0;n<C.nameSet().length;n++)
if(C.nameSet()[n].equalsIgnoreCase(calledThis))
return C;
}
return null;
}
/**
* Returns a new instance of the cmobject of the given id from the given list
* @param list the list to search, must be alphabetized
* @param ID the perfect cmobject ID of the object
* @return a new instance of the cmobject of the given id from the given list
*/
public static final CMObject getNewGlobal(final List<? extends CMObject> list, final String ID)
{
final CMObject O=getGlobal(list,ID);
if(O!=null)
return O.newInstance();
return null;
}
/**
* Returns the prototype of the cmobject of the given id from the given list
* @param list the list to search, must be alphabetized
* @param ID the perfect cmobject ID of the object
* @return the prototype of the cmobject of the given id from the given list
*/
public static final CMObject getGlobal(final List<? extends CMObject> list, final String ID)
{
if(list.size()==0)
return null;
int start=0;
int end=list.size()-1;
while(start<=end)
{
final int mid=(end+start)/2;
final int comp=classID(list.get(mid)).compareToIgnoreCase(ID);
if(comp==0)
return list.get(mid);
else
if(comp>0)
end=mid-1;
else
start=mid+1;
}
return null;
}
/**
* Searches for an Ability object using the given search term.
* This "finder" matches the ID, and searches the name and display text.
* @param calledThis the search term to use
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis)
{
return findAbility(calledThis,-1,-1,false);
}
/**
* Searches for an Ability object using the given search term and filters.
* This "finder" matches the ID, and searches the name and display text.
* @param calledThis the search term to use
* @param ofClassDomain a class/domain filter, or -1 to skip
* @param ofFlags an ability flag filter, or -1 to skip
* @param exactOnly true to match only case-insensitive whole strings, false otherwise
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis, final int ofClassDomain, final long ofFlags, final boolean exactOnly)
{
final Vector<Ability> ableV;
Ability A;
if((ofClassDomain>=0)||(ofFlags>=0))
{
ableV = new Vector<Ability>();
for(final Enumeration<Ability> e=c().abilities.elements();e.hasMoreElements();)
{
A=e.nextElement();
if((ofClassDomain<0)
||((A.classificationCode() & Ability.ALL_ACODES)==ofClassDomain)
||((A.classificationCode() & Ability.ALL_DOMAINS)==ofClassDomain))
{
if((ofFlags<0)
||(CMath.bset(A.flags(),ofFlags)))
ableV.addElement(A);
}
}
}
else
ableV = c().abilities;
A=(Ability)getGlobal(ableV,calledThis);
if(A==null)
A=(Ability)CMLib.english().fetchEnvironmental(ableV,calledThis,true);
if((A==null)&&(!exactOnly))
A=(Ability)CMLib.english().fetchEnvironmental(ableV,calledThis,false);
if(A!=null)
A=(Ability)A.newInstance();
return A;
}
/**
* Searches for a Behavior object using the given search term.
* This "finder" matches the ID, and searches the name.
* @param calledThis the search term to use
* @return the first behavior found matching the search term
*/
public static final Behavior findBehavior(final String calledThis)
{
Behavior B=(Behavior)getGlobal(c().behaviors,calledThis);
if(B==null)
B=getBehaviorByName(calledThis,true);
if(B==null)
B=getBehaviorByName(calledThis,false);
if(B!=null)
B=(Behavior)B.copyOf();
return B;
}
/**
* Searches for a Behavior object using the given search term and filters.
* This "finder" matches the name only, no ID.
* @param calledThis the search term to use
* @param exact true for whole string match, false otherwise
* @return the first behavior found matching the search term
*/
public static final Behavior getBehaviorByName(final String calledThis, final boolean exact)
{
if(calledThis==null)
return null;
Behavior B=null;
for(final Enumeration<Behavior> e=behaviors();e.hasMoreElements();)
{
B=e.nextElement();
if(B.name().equalsIgnoreCase(calledThis))
return (Behavior)B.copyOf();
}
if(exact)
return null;
for(final Enumeration<Behavior> e=behaviors();e.hasMoreElements();)
{
B=e.nextElement();
if(CMLib.english().containsString(B.name(),calledThis))
return (Behavior)B.copyOf();
}
return null;
}
/**
* Searches for an Ability object using the given search term.
* This "finder" matches the name only, no ID
* @param calledThis the search term to use
* @param exact true for whole string match, false otherwise
* @return the first ability found matching the search term
*/
public static final Ability getAbilityByName(final String calledThis, final boolean exact)
{
if(calledThis==null)
return null;
Ability A=null;
for(final Enumeration<Ability> e=abilities();e.hasMoreElements();)
{
A=e.nextElement();
if(A.name().equalsIgnoreCase(calledThis))
return A;
}
if(exact)
return null;
for(final Enumeration<Ability> e=abilities();e.hasMoreElements();)
{
A=e.nextElement();
if(CMLib.english().containsString(A.name(),calledThis))
return A;
}
return null;
}
/**
* Searches for an Ability object using the given search term and filters.
* This "finder" searches the name and display text, and finally the ID.
* The filter here is to allow you to filter only abilities that a given
* mob qualifies for by sending their charstats as a "character class" set.
* @param calledThis the search term to use
* @param charStats only the abilities qualified for by the classes herein
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis, final CharStats charStats)
{
Ability A=null;
final List<Ability> As=new LinkedList<Ability>();
for(final Enumeration<Ability> e=abilities();e.hasMoreElements();)
{
A=e.nextElement();
for(int c=0;c<charStats.numClasses();c++)
{
final CharClass C=charStats.getMyClass(c);
if(CMLib.ableMapper().getQualifyingLevel(C.ID(),true,A.ID())>=0)
{ As.add(A); break;}
}
}
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,true);
if(A==null)
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,false);
if(A==null)
A=(Ability)getGlobal(c().abilities,calledThis);
if(A!=null)
A=(Ability)A.newInstance();
return A;
}
/**
* Searches for an Ability object using the given search term and filters.
* This "finder" searches the name and display text, and finally the ID.
* The filter here is to allow you to filter only abilities that a given
* mob actually has.
* @param calledThis the search term to use
* @param mob the dude to search
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis, final MOB mob)
{
final List<Ability> As=new LinkedList<Ability>();
Ability A=null;
for(final Enumeration<Ability> a=mob.allAbilities();a.hasMoreElements();)
{
A=a.nextElement();
if(A!=null)
As.add(A);
}
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,true);
if(A==null)
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,false);
if(A==null)
A=(Ability)getGlobal(c().abilities,calledThis);
if(A!=null)
A=(Ability)A.newInstance();
return A;
}
/**
* Given a map of CMObjects with ID()s defined, this will return the one matched by the given ID.
* If the ID is not found in the map, it will iterate and look for a case-insensitive match before
* giving up. It returns a brand new object.
* @param list the map of IDs to objects
* @param ID the ID to search for
* @return the CMObject that the ID belongs to, after newInstance is called.
*/
public static final CMObject getNewGlobal(final Map<String,? extends CMObject> list, final String ID)
{
final CMObject O=getGlobal(list,ID);
if(O!=null)
return O.newInstance();
return null;
}
/**
* Given a map of CMObjects with ID()s defined, this will return the one matched by the given class name.
* If the name is not found in the map, it will iterate and look for a case-insensitive match before
* giving up. If returns the actual map reference.
* @param list the map of IDs to objects
* @param ID the ID to search for
* @return the CMObject that the ID belongs to, straight from the map.
*/
public static final CMObject getGlobal(final Map<String,? extends CMObject> list, final String ID)
{
CMObject o=list.get(ID);
if(o==null)
{
for(final String s : list.keySet())
{
o=list.get(s);
if(classID(o).equalsIgnoreCase(ID))
return o;
}
return null;
}
return o;
}
/**
* Adds a new Race to the class sets.
* @param GR the race to add
*/
public static final void addRace(final Race GR)
{
Race R;
for(int i=0;i<c().races.size();i++)
{
R=c().races.elementAt(i);
if(R.ID().compareToIgnoreCase(GR.ID())>=0)
{
if(R.ID().compareToIgnoreCase(GR.ID())==0)
c().races.setElementAt(GR,i);
else
c().races.insertElementAt(GR,i);
return;
}
}
c().races.addElement(GR);
}
/**
* Adds a new character class to the set
* @param CR the character class to add
*/
public static final void addCharClass(final CharClass CR)
{
for(int i=0;i<c().charClasses.size();i++)
{
final CharClass C=c().charClasses.elementAt(i);
if(C.ID().compareToIgnoreCase(CR.ID())>=0)
{
if(C.ID().compareToIgnoreCase(CR.ID())==0)
c().charClasses.setElementAt(CR,i);
else
c().charClasses.insertElementAt(CR,i);
return;
}
}
c().charClasses.addElement(CR);
}
/**
* Removes the given characterclass from this set
* @param C the character class to remove
*/
public static final void delCharClass(final CharClass C)
{
c().charClasses.removeElement(C);
}
/**
* Removes the given race from this set
* @param R the race to remove
*/
public static final void delRace(final Race R)
{
c().races.removeElement(R);
}
/**
* Given a list of CMObjects, this will sort them, by {@link CMObject#ID()}
* @param V the list of objects to sort.
*/
public static final void sortCMObjectsByID(final List<CMObject> V)
{
Collections.sort(V,new Comparator<CMObject>()
{
@Override
public int compare(CMObject o1, CMObject o2)
{
if(o1 == null)
{
if (o2 == null)
return 0;
return -1;
}
else
if(o2 == null)
return 1;
return o1.ID().compareTo(o2.ID());
}
});
}
/**
* Given a list of environmentals, this will sort them by {@link Environmental#ID()}
* @param V the list of environmentals
*/
public static final void sortEnvironmentalsByName(final List<Environmental> V)
{
Collections.sort(V,new Comparator<Environmental>()
{
@Override
public int compare(Environmental o1, Environmental o2)
{
if(o1 == null)
{
if (o2 == null)
return 0;
return -1;
}
else
if(o2 == null)
return 1;
return o1.name().compareToIgnoreCase(o2.name());
}
});
}
/**
* Calls "initializeclass" on all the given CMObjects.
* @param V the list of CMObjects to initialize.
*/
private final void initializeClassGroup(final List<? extends CMObject> V)
{
for(int v=0;v<V.size();v++)
((CMObject)V.get(v)).initializeClass();
}
/**
* Calls "initializeclass" on all the given CMObjects.
* @param H the set of CMObjects to initialize.
*/
private final void initializeClassGroup(final Map<String,? extends CMObject> H)
{
for(final Object o : H.keySet())
((CMObject)H.get(o)).initializeClass();
}
/**
* Initializes ALL the internal classes in these sets. All of them. All types.
*/
public final void intializeClasses()
{
final char tCode=Thread.currentThread().getThreadGroup().getName().charAt(0);
for(final CMObjectType o : CMObjectType.values())
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe(o.toString())))
{
final Object set = CMClass.getClassSet(o);
if(set instanceof List)
initializeClassGroup((List)set);
else
if(set instanceof Hashtable)
initializeClassGroup((Map)set);
}
}
/**
* Given the "stock" default path to a set of coffeemud classes, and a potential set of requested paths, this
* method will follow requested paths (observing the default if default is listed in the request paths), and
* load all the coffeemud classes therein, making sure they respect the given interface/ancestor name.
* @param defaultPath the path to use when default is a requested path
* @param requestedPathList the ; separated list of paths to look for classes in
* @param ancestor the full class name of an acester/interface
* @return a hashtable mapping the IDs of the classes with a prototype instance of the classes
*/
public static Hashtable loadHashListToObj(final String defaultPath, String requestedPathList, final String ancestor)
{
final Hashtable<String,Object> h=new Hashtable<String,Object>();
int x=requestedPathList.indexOf(';');
String path;
while(x>=0)
{
path=requestedPathList.substring(0,x).trim();
requestedPathList=requestedPathList.substring(x+1).trim();
loadObjectListToObj(h,defaultPath,path,ancestor);
x=requestedPathList.indexOf(';');
}
loadObjectListToObj(h,defaultPath,requestedPathList,ancestor);
return h;
}
/**
* Given the "stock" default path to a set of coffeemud classes, and a potential set of requested paths, this
* method will follow requested paths (observing the default if default is listed in the request paths), and
* load all the coffeemud classes therein, making sure they respect the given interface/ancestor name.
* @param defaultPath the path to use when default is a requested path
* @param requestedPathList the ; separated list of paths to look for classes in
* @param ancestor the full class name of an acester/interface
* @return a vector of all the prototype instance of the classes
*/
public static final XVector loadVectorListToObj(final String defaultPath, String requestedPathList, final String ancestor)
{
final Vector<Object> v=new Vector<Object>();
int x=requestedPathList.indexOf(';');
String path;
while(x>=0)
{
path=requestedPathList.substring(0,x).trim();
requestedPathList=requestedPathList.substring(x+1).trim();
loadObjectListToObj(v,defaultPath,path,ancestor);
x=requestedPathList.indexOf(';');
}
loadObjectListToObj(v,defaultPath,requestedPathList,ancestor);
return new XVector(new TreeSet(v));
}
/**
* Given the "stock" default path to a set of coffeemud classes, and a potential set of requested paths, this
* method will follow requested paths (observing the default if default is listed in the request paths), and
* load all the coffeemud classes therein, making sure they respect the given interface/ancestor class.
* @param defaultPath the path to use when default is a requested path
* @param requestedPathList the ; separated list of paths to look for classes in
* @param ancestorC1 the full class of an acester/interface
* @param subDir if given, this will be appended to all requested paths except default
* @param quiet true to not report errors to the log, false otherwise
* @return a vector of all the prototype instance of the classes
*/
public static final Vector<Object> loadClassList(final String defaultPath, String requestedPathList, final String subDir, final Class<?> ancestorC1, final boolean quiet)
{
final Vector<Object> v=new Vector<Object>();
int x=requestedPathList.indexOf(';');
while(x>=0)
{
String path=requestedPathList.substring(0,x).trim();
requestedPathList=requestedPathList.substring(x+1).trim();
if(path.equalsIgnoreCase("%default%"))
loadListToObj(v,defaultPath, ancestorC1, quiet);
else
{
if((subDir!=null)&&(subDir.length()>0))
path+=subDir;
loadListToObj(v,path,ancestorC1, quiet);
}
x=requestedPathList.indexOf(';');
}
if(requestedPathList.equalsIgnoreCase("%default%"))
loadListToObj(v,defaultPath, ancestorC1, quiet);
else
{
if((subDir!=null)&&(subDir.length()>0))
requestedPathList+=subDir;
loadListToObj(v,requestedPathList,ancestorC1, quiet);
}
return v;
}
/**
* Given a java collection type of some sort (hashtable, vector, etc), a default path, a requested path,
* and the name of an interface/ancestor that classes must implement, this method will load all classes
* in the appropriate path into the given collection.
* @param collection the collection type to use (map, list, set, etc, etc)
* @param defaultPath the path to use if the given path requests the default path
* @param path the requested path to use
* @param ancestor the full java class name of an interface ancestor to force classes to respect
* @return true if classes were loaded without errors, false otherwise
*/
public static final boolean loadObjectListToObj(final Object collection, final String defaultPath, final String path, final String ancestor)
{
if(path.length()>0)
{
final boolean success;
if(path.equalsIgnoreCase("%default%"))
success=loadListToObj(collection,defaultPath, ancestor, false);
else
success=loadListToObj(collection,path,ancestor, false);
return success;
}
return false;
}
/**
* Given a java collection type of some sort (hashtable, vector, etc), a file path,
* and the name of an interface/ancestor that classes must implement, this method will load all classes
* in the appropriate path into the given collection.
* @param collection the collection type to use (map, list, set, etc, etc)
* @param filePath the path to look for classes in
* @param ancestor the full java class name of an interface ancestor to force classes to respect
* @param quiet true to not report errors, false otherwise
* @return true if classes were loaded successfully, false otherwise
*/
public static final boolean loadListToObj(final Object collection, final String filePath, final String ancestor, final boolean quiet)
{
final CMClass loader=new CMClass();
Class<?> ancestorCl=null;
if (ancestor != null && ancestor.length() != 0)
{
try
{
ancestorCl = loader.loadClass(ancestor);
}
catch (final ClassNotFoundException e)
{
if(!quiet)
Log.sysOut("CMClass","WARNING: Couldn't load ancestor class: "+ancestor);
}
}
return loadListToObj(collection, filePath, ancestorCl, quiet);
}
/**
* Given a java collection type of some sort (hashtable, vector, etc), a file path,
* and the class of an interface/ancestor that classes must implement, this method will load all classes
* in the appropriate path into the given collection.
* @param collection the collection type to use (map, list, set, etc, etc)
* @param filePath the path to look for classes in
* @param ancestorCl the full java class of an interface ancestor to force classes to respect
* @param quiet true to not report errors, false otherwise
* @return true if classes were loaded successfully, false otherwise
*/
public static final boolean loadListToObj(final Object collection, final String filePath, final Class<?> ancestorCl, final boolean quiet)
{
final CMClass loader=new CMClass();
final CMFile file=new CMFile(filePath,null,CMFile.FLAG_LOGERRORS);
final Vector<String> fileList=new Vector<String>();
if(file.canRead())
{
if(file.isDirectory())
{
final CMFile[] list=file.listFiles();
for (final CMFile element : list)
if((element.getName().indexOf('$')<0)&&(element.getName().toUpperCase().endsWith(".CLASS")))
fileList.addElement(element.getVFSPathAndName());
for (final CMFile element : list)
if(element.getName().toUpperCase().endsWith(".JS"))
fileList.addElement(element.getVFSPathAndName());
}
else
{
fileList.addElement(file.getVFSPathAndName());
}
}
else
{
if(!quiet)
Log.errOut("CMClass","Unable to access path "+file.getVFSPathAndName());
return false;
}
String item;
for(int l=0;l<fileList.size();l++)
{
item=fileList.elementAt(l);
if(item.startsWith("/"))
item=item.substring(1);
try
{
Object O=null;
String packageName=item.replace('/','.');
if(packageName.toUpperCase().endsWith(".CLASS"))
packageName=packageName.substring(0,packageName.length()-6);
final Class<?> C=loader.loadClass(packageName,true);
if(C!=null)
{
if(!checkAncestry(C,ancestorCl))
{
if(!quiet)
Log.sysOut("CMClass","WARNING: class failed ancestral check: "+packageName);
}
else
O=C.newInstance();
}
if(O==null)
{
if(!quiet)
Log.sysOut("CMClass","Unable to create class '"+packageName+"'");
}
else
{
String itemName=O.getClass().getName();
final int x=itemName.lastIndexOf('.');
if(x>=0)
itemName=itemName.substring(x+1);
if(collection instanceof Map)
{
final Map H=(Map)collection;
if(H.containsKey(itemName.trim().toUpperCase()))
H.remove(itemName.trim().toUpperCase());
H.put(itemName.trim().toUpperCase(),O);
}
else
if(collection instanceof List)
{
final List V=(List)collection;
boolean doNotAdd=false;
for(int v=0;v<V.size();v++)
if(getSimpleClassName(V.get(v)).equals(itemName))
{
V.set(v,O);
doNotAdd=true;
break;
}
if(!doNotAdd)
V.add(O);
}
else
if(collection instanceof Collection)
{
final Collection V=(Collection)collection;
for(final Object o : V)
if(getSimpleClassName(o).equals(itemName))
{
V.remove(o);
break;
}
V.add(O);
}
}
}
catch(final Exception e)
{
if(!quiet)
Log.errOut("CMClass",e);
return false;
}
}
return true;
}
/**
* This strange method returns an environmentals name,
* plus a string of instance hex digits, which I guess make
* the name more unique.
* @param E the environmenal to make a unique name for
* @return the unique name
*/
public static final String getObjInstanceStr(Environmental E)
{
if(E==null)
return "NULL";
final int x=E.toString().indexOf('@');
if(x<0)
return E.Name()+E.toString();
return E.Name()+E.toString().substring(x);
}
/**
* Returns the simple class name of an object -- basically the name that comes
* after the final "." in a classpath.
* @param O the object to get the name for
* @return the simple name
*/
public static final String getSimpleClassName(final Object O)
{
if(O==null)
return "";
return getSimpleClassName(O.getClass());
}
/**
* Returns the simple class name of a class -- basically the name that comes
* after the final "." in a classpath.
* @param C the class to get the name for
* @return the simple name
*/
public static final String getSimpleClassName(final Class<?> C)
{
if(C==null)
return "";
final String name=C.getName();
final int lastDot=name.lastIndexOf('.');
if(lastDot>=0)
return name.substring(lastDot+1);
return name;
}
/**
* Given a class, this method will return a CMFile object for the directory containing
* that class.
* @param C the class to get a directory for
* @return the CMFile containing that class
*/
public static final CMFile getClassDir(final Class<?> C)
{
final URL location = C.getProtectionDomain().getCodeSource().getLocation();
String loc;
if(location == null)
{
return null;
}
loc=location.getPath();
loc=loc.replace('/',File.separatorChar);
String floc=new java.io.File(".").getAbsolutePath();
if(floc.endsWith("."))
floc=floc.substring(0,floc.length()-1);
if(floc.endsWith(File.separator))
floc=floc.substring(0,floc.length()-File.separator.length());
int x=floc.indexOf(File.separator);
if(x>=0)
floc=floc.substring(File.separator.length());
x=loc.indexOf(floc);
loc=loc.substring(x+floc.length());
loc=loc.replace(File.separatorChar,'/');
return new CMFile("/"+loc,null);
}
/**
* Returns true if the given class implements the given ancestor/interface
* @param cl the class to check
* @param ancestorCl the ancestor/interface
* @return true if one comes from the second
*/
public static final boolean checkAncestry(final Class<?> cl, final Class<?> ancestorCl)
{
if (cl == null) return false;
if (cl.isPrimitive() || cl.isInterface()) return false;
if ( Modifier.isAbstract( cl.getModifiers()) || !Modifier.isPublic( cl.getModifiers()) ) return false;
if (ancestorCl == null) return true;
return (ancestorCl.isAssignableFrom(cl)) ;
}
/**
* Returns the address part of an instance objects "default string name", which
* looks like com.planet_ink.coffee_mud.blah.ClassName{@literal @}ab476d87e
* where the part after the at sign is the address
* @param e the object to get an address for
* @return the address
*/
public static final String classPtrStr(final Object e)
{
final String ptr=""+e;
final int x=ptr.lastIndexOf('@');
if(x>0)
return ptr.substring(x+1);
return ptr;
}
/// *// *
// * This is a simple version for external clients since they
// * will always want the class resolved before it is returned
// * to them.
// */
/**
* Returns the ID() if the object is a CMObject, and otherwise
* the simple class name, which is the class name after the final
* dot in a class path
* @param e the object to get a simple class name for.
* @return the simple class name, or ID
*/
public static final String classID(final Object e)
{
if(e!=null)
{
if(e instanceof CMObject)
return ((CMObject)e).ID();
else
if(e instanceof Command)
return getSimpleClassName(e);
else
return getSimpleClassName(e);
}
return "";
}
/**
* Attempts to load the given class, by fully qualified name
* @param className the class name
* @return the class loaded
* @throws ClassNotFoundException something went wrong
*/
@Override
public final Class<?> loadClass(final String className) throws ClassNotFoundException
{
return (loadClass(className, true));
}
/**
* Finishes loading the class into the underlying classloader by handing the byte data to
* the classloader, after building a proper full class name.
* @param className the class name
* @param classData the byte data of the class to load
* @param overPackage the package the class belongs to
* @param resolveIt true to link the class, false if this is a drill
* @return the class defined
* @throws ClassFormatError something went wrong
*/
public final Class<?> finishDefineClass(String className, final byte[] classData, final String overPackage, final boolean resolveIt)
throws ClassFormatError
{
Class<?> result=null;
if(overPackage!=null)
{
final int x=className.lastIndexOf('.');
if(x>=0)
className=overPackage+className.substring(x);
else
className=overPackage+"."+className;
}
try{result=defineClass(className, classData, 0, classData.length);}
catch(final NoClassDefFoundError e)
{
if(e.getMessage().toLowerCase().indexOf("(wrong name:")>=0)
{
final int x=className.lastIndexOf('.');
if(x>=0)
{
final String notherName=className.substring(x+1);
result=defineClass(notherName, classData, 0, classData.length);
}
else
throw e;
}
else
throw e;
}
if (result==null)
{
throw new ClassFormatError();
}
if (resolveIt)
{
resolveClass(result);
}
if(debugging)
Log.debugOut("CMClass","Loaded: "+result.getName());
classes.put(className, result);
return result;
}
/**
* Attempts to load the given class, by fully qualified name. This is fun
* because it will also load javascript classes, if the className ends with
* .js instead of .class
* This is the required version of loadClass<?> which is called
* both from loadClass<?> above and from the internal function
* FindClassFromClass.
* @param className the class name
* @param resolveIt true to link the class, false if this is a drill
* @return the class loaded
* @throws ClassNotFoundException something went wrong
*/
@Override
public synchronized final Class<?> loadClass(String className, final boolean resolveIt)
throws ClassNotFoundException
{
String pathName=null;
if(className.endsWith(".class"))
className=className.substring(0,className.length()-6);
if(className.toUpperCase().endsWith(".JS"))
{
pathName=className.substring(0,className.length()-3).replace('.','/')+className.substring(className.length()-3);
className=className.substring(0,className.length()-3);
}
else
pathName=className.replace('.','/')+".class";
Class<?> result = classes.get(className);
if (result!=null)
{
if(debugging)
Log.debugOut("CMClass","Loaded: "+result.getName());
return result;
}
if((super.findLoadedClass(className)!=null)
||(className.indexOf("com.planet_ink.coffee_mud.")<0)
||(className.startsWith("com.planet_ink.coffee_mud.core."))
||(className.startsWith("com.planet_ink.coffee_mud.application."))
||(className.indexOf(".interfaces.")>=0))
{
try
{
result=super.findSystemClass(className);
if(result!=null)
{
if(debugging)
Log.debugOut("CMClass","Loaded: "+result.getName());
return result;
}
}
catch(final Exception t){}
}
/* Try to load it from our repository */
final CMFile CF=new CMFile(pathName,null);
final byte[] classData=CF.raw();
if((classData==null)||(classData.length==0))
{
throw new ClassNotFoundException("File "+pathName+" not readable!");
}
if(CF.getName().toUpperCase().endsWith(".JS"))
{
final String name=CF.getName().substring(0,CF.getName().length()-3);
final StringBuffer str=CF.textVersion(classData);
if((str==null)||(str.length()==0))
throw new ClassNotFoundException("JavaScript file "+pathName+" not readable!");
final List<String> V=Resources.getFileLineVector(str);
Class<?> extendsClass=null;
final Vector<Class<?>> implementsClasses=new Vector<Class<?>>();
String overPackage=null;
for(int v=0;v<V.size();v++)
{
if((extendsClass==null)&&V.get(v).trim().toUpperCase().startsWith("//EXTENDS "))
{
final String extendName=V.get(v).trim().substring(10).trim();
try
{
extendsClass=loadClass(extendName);
}
catch(final ClassNotFoundException e)
{
Log.errOut("CMClass","Could not load "+CF.getName()+" from "+className+" because "+extendName+" is an invalid extension.");
throw e;
}
}
if((overPackage==null)&&V.get(v).trim().toUpperCase().startsWith("//PACKAGE "))
overPackage=V.get(v).trim().substring(10).trim();
if(V.get(v).toUpperCase().startsWith("//IMPLEMENTS "))
{
final String extendName=V.get(v).substring(13).trim();
Class<?> C=null;
try
{
C=loadClass(extendName);
}
catch(final ClassNotFoundException e)
{
continue;
}
implementsClasses.addElement(C);
}
}
final Context X=Context.enter();
final JScriptLib jlib=new JScriptLib();
X.initStandardObjects(jlib);
jlib.defineFunctionProperties(JScriptLib.functions, JScriptLib.class, ScriptableObject.DONTENUM);
final CompilerEnvirons ce = new CompilerEnvirons();
ce.initFromContext(X);
final ClassCompiler cc = new ClassCompiler(ce);
if(extendsClass==null)
Log.errOut("CMClass","Warning: "+CF.getVFSPathAndName()+" does not extend any class!");
else
cc.setTargetExtends(extendsClass);
Class<?> mainClass=null;
if(implementsClasses.size()>0)
{
final Class[] CS=new Class[implementsClasses.size()];
for(int i=0;i<implementsClasses.size();i++)
CS[i]=implementsClasses.elementAt(i);
cc.setTargetImplements(CS);
}
final Object[] objs = cc.compileToClassFiles(str.toString(), "script", 1, name);
for (int i=0;i<objs.length;i+=2)
{
final Class<?> C=finishDefineClass((String)objs[i],(byte[])objs[i+1],overPackage,resolveIt);
if(mainClass==null)
mainClass=C;
}
Context.exit();
if((debugging)&&(mainClass!=null))
Log.debugOut("CMClass","Loaded: "+mainClass.getName());
return mainClass;
}
result=finishDefineClass(className,classData,null,resolveIt);
return result;
}
/**
* Causes the map of command words associated with command objects
* to be re-mapped, so that users can use them.
*/
protected static final void reloadCommandWords()
{
c().commandWords.clear();
Command C;
String[] wordList;
for(int c=0;c<c().commands.size();c++)
{
C=c().commands.elementAt(c);
wordList=C.getAccessWords();
if(wordList!=null)
for (final String element : wordList)
c().commandWords.put(element.trim().toUpperCase(),C);
}
}
/**
* Making good use of the class path directories from the INI file, this will load
* all the damn classes in coffeemud, being nice enough to report them to the log
* as it does so
* @param page the coffeemud.ini file
* @return true if success happened, and false otherwise
*/
public static final boolean loadAllCoffeeMudClasses(final CMProps page)
{
CMClass c=c();
if(c==null)
c=new CMClass();
final CMClass baseC=clss[MudHost.MAIN_HOST];
final char tCode=Thread.currentThread().getThreadGroup().getName().charAt(0);
// wait for baseC
while((tCode!=MudHost.MAIN_HOST)&&(!classLoaderSync[0]))
{
try
{
Thread.sleep(500);
}
catch(final Exception e)
{
break;
}
}
try
{
final String prefix="com/planet_ink/coffee_mud/";
debugging=CMSecurity.isDebugging(CMSecurity.DbgFlag.CLASSLOADER);
c.libraries=loadVectorListToObj(prefix+"Libraries/",page.getStr("LIBRARY"),CMObjectType.LIBRARY.ancestorName);
if(c.libraries.size()==0)
return false;
CMLib.registerLibraries(c.libraries.elements());
if(CMLib.unregistered().length()>0)
{
Log.errOut("CMClass","Fatal Error: libraries are unregistered: "+CMLib.unregistered().substring(0,CMLib.unregistered().length()-2));
return false;
}
CMLib.propertiesLoaded(); // cause props loaded on libraries, necc for some stuff
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("COMMON")))
c.common=baseC.common;
else
c.common=loadHashListToObj(prefix+"Common/",page.getStr("COMMON"),CMObjectType.COMMON.ancestorName);
if(c.common.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("WEBMACROS")))
c.webMacros=baseC.webMacros;
else
{
c.webMacros=CMClass.loadHashListToObj(prefix+"WebMacros/", "%DEFAULT%",CMObjectType.WEBMACRO.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"WebMacros loaded : "+c.webMacros.size());
for(final Enumeration e=c.webMacros.keys();e.hasMoreElements();)
{
final String key=(String)e.nextElement();
if(key.length()>longestWebMacro)
longestWebMacro=key.length();
}
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("RACE")))
c.races=baseC.races;
else
{
c.races=loadVectorListToObj(prefix+"Races/",page.getStr("RACES"),CMObjectType.RACE.ancestorName);
//Log.sysOut(Thread.currentThread().getName(),"Races loaded : "+c.races.size());
}
if(c.races.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("CHARCLASS")))
c.charClasses=baseC.charClasses;
else
{
c.charClasses=loadVectorListToObj(prefix+"CharClasses/",page.getStr("CHARCLASSES"),CMObjectType.CHARCLASS.ancestorName);
//Log.sysOut(Thread.currentThread().getName(),"Classes loaded : "+c.charClasses.size());
}
if(c.charClasses.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("MOB")))
c.MOBs=baseC.MOBs;
else
{
c.MOBs=loadVectorListToObj(prefix+"MOBS/",page.getStr("MOBS"),CMObjectType.MOB.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"MOB Types loaded : "+c.MOBs.size());
}
if(c.MOBs.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("EXIT")))
c.exits=baseC.exits;
else
{
c.exits=loadVectorListToObj(prefix+"Exits/",page.getStr("EXITS"),CMObjectType.EXIT.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Exit Types loaded : "+c.exits.size());
}
if(c.exits.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("AREA")))
c.areaTypes=baseC.areaTypes;
else
{
c.areaTypes=loadVectorListToObj(prefix+"Areas/",page.getStr("AREAS"),CMObjectType.AREA.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Area Types loaded : "+c.areaTypes.size());
}
if(c.areaTypes.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("LOCALE")))
c.locales=baseC.locales;
else
{
c.locales=loadVectorListToObj(prefix+"Locales/",page.getStr("LOCALES"),CMObjectType.LOCALE.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Locales loaded : "+c.locales.size());
}
if(c.locales.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("ABILITY")))
c.abilities=baseC.abilities;
else
{
c.abilities=loadVectorListToObj(prefix+"Abilities/",page.getStr("ABILITIES"),CMObjectType.ABILITY.ancestorName);
if(c.abilities.size()==0)
return false;
if((page.getStr("ABILITIES")!=null)
&&(page.getStr("ABILITIES").toUpperCase().indexOf("%DEFAULT%")>=0))
{
Vector<Ability> tempV;
int size=0;
tempV=loadVectorListToObj(prefix+"Abilities/Fighter/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Ranger/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Paladin/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Fighter Skills : "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Druid/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Chants loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Languages/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Languages loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Properties/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Diseases/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Poisons/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Misc/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
Log.sysOut(Thread.currentThread().getName(),"Properties loaded : "+size);
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Prayers/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Prayers loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Archon/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Skills/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Thief/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Common/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Specializations/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Skills loaded : "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Songs/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Songs loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Spells/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Spells loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/SuperPowers/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Heroics loaded : "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Tech/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Tech Skills loaded: "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Traps/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Traps loaded : "+tempV.size());
c.abilities.addAll(tempV);
c.abilities.sort();
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: reading generic abilities");
final List<DatabaseEngine.AckRecord> genAbilities=CMLib.database().DBReadAbilities();
if(genAbilities.size()>0)
{
int loaded=0;
for(final DatabaseEngine.AckRecord rec : genAbilities)
{
String type=rec.typeClass();
if((type==null)||(type.trim().length()==0))
type="GenAbility";
final Ability A=(Ability)(CMClass.getAbility(type).copyOf());
A.setStat("ALLXML",rec.data());
if((!A.ID().equals("GenAbility"))&&(!A.ID().equals(type)))
{
c.abilities.addElement(A);
loaded++;
}
}
if(loaded>0)
{
Log.sysOut(Thread.currentThread().getName(),"GenAbles loaded : "+loaded);
c.abilities.sort();
}
}
}
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("ITEM")))
c.items=baseC.items;
else
{
c.items=loadVectorListToObj(prefix+"Items/Basic/",page.getStr("ITEMS"),CMObjectType.ITEM.ancestorName);
if(c.items.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Basic Items loaded: "+c.items.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("WEAPON")))
c.weapons=baseC.weapons;
else
{
c.weapons=loadVectorListToObj(prefix+"Items/Weapons/",page.getStr("WEAPONS"),CMObjectType.WEAPON.ancestorName);
if(c.weapons.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Weapons loaded : "+c.weapons.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("ARMOR")))
c.armor=baseC.armor;
else
{
c.armor=loadVectorListToObj(prefix+"Items/Armor/",page.getStr("ARMOR"),CMObjectType.ARMOR.ancestorName);
if(c.armor.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Armor loaded : "+c.armor.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("MISCMAGIC")))
c.miscMagic=baseC.miscMagic;
else
{
c.miscMagic=loadVectorListToObj(prefix+"Items/MiscMagic/",page.getStr("MISCMAGIC"),CMObjectType.MISCMAGIC.ancestorName);
if(c.miscMagic.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Magic Items loaded: "+c.miscMagic.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("CLANITEMS")))
c.clanItems=baseC.clanItems;
else
{
c.clanItems=loadVectorListToObj(prefix+"Items/ClanItems/",page.getStr("CLANITEMS"),CMObjectType.CLANITEM.ancestorName);
if(c.clanItems.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Clan Items loaded : "+c.clanItems.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("TECH")))
c.tech=baseC.tech;
else
{
Vector<Electronics> tempV;
c.tech=loadVectorListToObj(prefix+"Items/BasicTech/",page.getStr("TECH"),CMObjectType.TECH.ancestorName);
tempV=loadVectorListToObj(prefix+"Items/ShipTech/",page.getStr("SHIPTECH"),CMObjectType.SHIPTECH.ancestorName);
if(tempV.size()>0)
c.tech.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Items/Software/",page.getStr("SOFTWARE"),CMObjectType.SOFTWARE.ancestorName);
if(tempV.size()>0)
c.tech.addAll(tempV);
if(c.tech.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Electronics loaded: "+c.tech.size());
c.tech.sort();
}
if((c.items.size()+c.weapons.size()+c.armor.size()+c.tech.size()+c.miscMagic.size()+c.clanItems.size())==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("BEHAVIOR")))
c.behaviors=baseC.behaviors;
else
{
c.behaviors=loadVectorListToObj(prefix+"Behaviors/",page.getStr("BEHAVIORS"),CMObjectType.BEHAVIOR.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Behaviors loaded : "+c.behaviors.size());
}
if(c.behaviors.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("COMMAND")))
{
c.commands=baseC.commands;
c.commandWords=baseC.commandWords;
}
else
{
c.commands=loadVectorListToObj(prefix+"Commands/",page.getStr("COMMANDS"),CMObjectType.COMMAND.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Commands loaded : "+c.commands.size());
}
if(c.commands.size()==0)
return false;
}
catch(final Exception t)
{
t.printStackTrace();
return false;
}
reloadCommandWords();
// misc startup stuff
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("CHARCLASS")))
for(int i=0;i<c.charClasses.size();i++)
{
final CharClass C=c.charClasses.elementAt(i);
C.copyOf();
}
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("RACE")))
{
int numRaces=c.races.size();
for(int r=0;r<c.races.size();r++)
{
final Race R=c.races.elementAt(r);
R.copyOf();
}
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: reading genRaces");
final List<DatabaseEngine.AckRecord> genRaces=CMLib.database().DBReadRaces();
if(genRaces.size()>0)
{
for(int r=0;r<genRaces.size();r++)
{
final Race GR=(Race)getRace("GenRace").copyOf();
GR.setRacialParms(genRaces.get(r).data());
if(!GR.ID().equals("GenRace"))
{
addRace(GR);
numRaces++;
}
}
}
Log.sysOut(Thread.currentThread().getName(),"Races loaded : "+numRaces);
}
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("CHARCLASS")))
{
int numCharClasses=c.charClasses.size();
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: reading genClasses");
final List<DatabaseEngine.AckRecord> genClasses=CMLib.database().DBReadClasses();
if(genClasses.size()>0)
{
for(int r=0;r<genClasses.size();r++)
{
final CharClass CR=(CharClass)(CMClass.getCharClass("GenCharClass").copyOf());
CR.setClassParms(genClasses.get(r).data());
if(!CR.ID().equals("GenCharClass"))
{
addCharClass(CR);
numCharClasses++;
}
}
}
Log.sysOut(Thread.currentThread().getName(),"Classes loaded : "+numCharClasses);
}
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: initializing classes");
c.intializeClasses();
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("EXPERTISES")))
{
CMLib.expertises().recompileExpertises();
Log.sysOut(Thread.currentThread().getName(),"Expertises defined: "+CMLib.expertises().numExpertises());
}
if(tCode==MudHost.MAIN_HOST)
classLoaderSync[0]=true;
CMClass.lastUpdateTime=System.currentTimeMillis();
return true;
}
/**
* Returns a timestamp of the last time there was a change in the full set of classes.
* @return the last time there was a change
*/
public static long getLastClassUpdatedTime(){ return lastUpdateTime; }
/**
* The helper class for full blown JavaScript objects.
* @author Bo Zimmerman
*
*/
protected static final class JScriptLib extends ScriptableObject
{
@Override public String getClassName(){ return "JScriptLib";}
static final long serialVersionUID=47;
public static String[] functions = {"toJavaString"};
public String toJavaString(Object O){return Context.toString(O);}
}
/**
* CMMsg objects are normally re-used, and this method is the recycle bin.
* If the msg were to have been garbage collected, it would get returned here
* anyway, but this is the nice way to get it done.
* @param msg the CMMsg we are done using
* @return true if it was returned to the bin, and false if it was allowed to die
*/
public static final boolean returnMsg(final CMMsg msg)
{
if(MSGS_CACHE.size()<MAX_MSGS)
{
synchronized(CMClass.MSGS_CACHE)
{
if(MSGS_CACHE.size()<MAX_MSGS)
{
MSGS_CACHE.addLast(msg);
return true;
}
}
}
return false;
}
/**
* Returns either a CMMsg object from the cache, if one is available, or makes
* a new one.
* @return a CMMsg object, ready to use.
*/
public final static CMMsg getMsg()
{
try
{
synchronized(MSGS_CACHE)
{
return MSGS_CACHE.removeFirst();
}
}
catch(final Exception e)
{
return (CMMsg)getCommon("DefaultMessage");
}
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final int newAllCode, final String allMessage)
{
final CMMsg M = getMsg();
M.modify(source, newAllCode, allMessage);
return M;
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#value()
* @param source the agent source of the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @param newValue the value to set on the message
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final int newAllCode, final String allMessage, final int newValue)
{
final CMMsg M = getMsg();
M.modify(source, newAllCode, allMessage, newValue);
return M;
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final int newAllCode, final String allMessage)
{
final CMMsg M = getMsg();
M.modify(source, target, newAllCode, allMessage);
return M;
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newAllCode, final String allMessage)
{
final CMMsg M = getMsg();
M.modify(source, target, tool, newAllCode, allMessage);
return M;
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newSourceCode the source code for this action
* @param newTargetCode the target code for this action
* @param newOthersCode the others/observed code for this action
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newSourceCode, final int newTargetCode,
final int newOthersCode, final String allMessage)
{
final CMMsg M = getMsg();
M.modify(source, target, tool, newSourceCode, newTargetCode, newOthersCode, allMessage);
return M;
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newAllCode the source, target, and others code to use
* @param sourceMessage the action/message as seen by the source
* @param targetMessage the action/message as seen by the target
* @param othersMessage the action/message as seen by everyone else
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newAllCode, final String sourceMessage,
final String targetMessage, final String othersMessage)
{
final CMMsg M = getMsg();
M.modify(source, target, tool, newAllCode, sourceMessage, newAllCode, targetMessage, newAllCode, othersMessage);
return M;
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newSourceCode the source code for this action
* @param sourceMessage the action/message as seen by the source
* @param newTargetCode the target code for this action
* @param targetMessage the action/message as seen by the target
* @param newOthersCode the others/observed code for this action
* @param othersMessage the action/message as seen by everyone else
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newSourceCode, final String sourceMessage,
final int newTargetCode, final String targetMessage, final int newOthersCode, final String othersMessage)
{
final CMMsg M = getMsg();
M.modify(source, target, tool, newSourceCode, sourceMessage, newTargetCode, targetMessage, newOthersCode, othersMessage);
return M;
}
/**
* Factory mob objects are normally re-used, and this method is the recycle bin.
* If the mob were to have been garbage collected, it would get returned here
* anyway, but this is the nice way to get it done.
* @param mob the mob we are done using
* @return true if it was returned to the bin, and false if it was allowed to die
*/
public static final boolean returnMob(final MOB mob)
{
if(MOB_CACHE.size()<MAX_MOBS)
{
synchronized(CMClass.MOB_CACHE)
{
MOB_CACHE.addLast(mob);
return true;
}
}
return false;
}
/**
* Returns either a StdFactoryMOB object from the cache, if one is available, or makes
* a new one.
* @return a StdFactoryMOB object, ready to use.
*/
public final static MOB getFactoryMOB()
{
try
{
synchronized(MOB_CACHE)
{
return MOB_CACHE.removeFirst();
}
}
catch(final Exception e)
{
return getMOB("StdFactoryMOB");
}
}
/**
* Returns either a StdFactoryMOB object from the cache, if one is available, or makes
* a new one, giving him the name, level, and room location given.
* @param name the name to give the mob
* @param level the level to give the mob
* @param room the room to set the mobs location at
* @return a StdFactoryMOB object, ready to use.
*/
public final static MOB getFactoryMOB(final String name, final int level, final Room room)
{
final MOB mob2=CMClass.getFactoryMOB();
mob2.setName(name);
mob2.basePhyStats().setLevel(level);
mob2.phyStats().setLevel(level);
mob2.setLocation(room);
return mob2;
}
/**
* Unloads all the classes in this system.
* Why, I do not know.
*/
public static final void shutdown()
{
for (final CMClass cls : clss)
if(cls!=null)
cls.unload();
classLoaderSync[0]=false;
}
/**
* Clears all the class sets in this loader.
* I don't know why.
*/
public final void unload()
{
common.clear();
races.clear();
charClasses.clear();
MOBs.clear();
abilities.clear();
locales.clear();
exits.clear();
items.clear();
behaviors.clear();
weapons.clear();
armor.clear();
miscMagic.clear();
tech.clear();
areaTypes.clear();
clanItems.clear();
commands.clear();
webMacros.clear();
commandWords.clear();
}
}
|
com/planet_ink/coffee_mud/core/CMClass.java
|
package com.planet_ink.coffee_mud.core;
import com.planet_ink.coffee_mud.WebMacros.interfaces.*;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.Clan.Function;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
import java.io.File;
import java.lang.ref.WeakReference;
import java.lang.reflect.Modifier;
import java.net.URL;
import org.mozilla.javascript.*;
import org.mozilla.javascript.optimizer.*;
/*
Copyright 2001-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* The core class loader, but more importantly, the core object template manager
* for the whole mud. Classes are grouped by their core interfaces, allowing them
* to have short "ID" names as referents. Classes are loaded and initialized from the
* class loader and then kept as template objects, with newInstances created on demand (or
* simply returned as the template, in cases where the objects are shared).
* @author Bo Zimmerman
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class CMClass extends ClassLoader
{
protected static boolean debugging=false;
protected static volatile long lastUpdateTime=System.currentTimeMillis();
protected static final Map<String,Class<?>> classes=new Hashtable<String,Class<?>>();
private static CMClass[] clss=new CMClass[256];
/**
* Creates a new instance of the class loader, updating the thread-group ref if necessary.
*/
public CMClass()
{
super();
final char c=Thread.currentThread().getThreadGroup().getName().charAt(0);
if(clss==null)
clss=new CMClass[256];
if(clss[c]==null)
clss[c]=this;
}
/**
* Creates and returns a new CMClass object for the current calling thread
* @return a new CMClass object for the current calling thread
*/
public static final CMClass initialize()
{
return new CMClass();
}
/**
* Returns the CMClass instance tied to this particular thread group, or null if not yet created.
* @return the CMClass instance tied to this particular thread group, or null if not yet created.
*/
private static CMClass c()
{
return clss[Thread.currentThread().getThreadGroup().getName().charAt(0)];
}
/**
* Returns the CMClass instance tied to the given thread group, or null if not yet created.
* @param c the code for the thread group to return (0-255)
* @return the CMClass instance tied to the given thread group, or null if not yet created.
*/
public static CMClass c(byte c)
{
return clss[c];
}
/**
* Returns the CMClass instance tied to this particular thread group, or null if not yet created.
* @return the CMClass instance tied to this particular thread group, or null if not yet created.
*/
public static CMClass instance()
{
return c();
}
private static boolean[] classLoaderSync={false};
public static enum CMObjectType
{
/** stat constant for race type objects */
RACE("com.planet_ink.coffee_mud.Races.interfaces.Race"),
/** stat constant for char class type objects */
CHARCLASS("com.planet_ink.coffee_mud.CharClasses.interfaces.CharClass"),
/** stat constant for mob type objects */
MOB("com.planet_ink.coffee_mud.MOBS.interfaces.MOB"),
/** stat constant for ability type objects */
ABILITY("com.planet_ink.coffee_mud.Abilities.interfaces.Ability"),
/** stat constant for locale/room type objects */
LOCALE("com.planet_ink.coffee_mud.Locales.interfaces.Room"),
/** stat constant for exit type objects */
EXIT("com.planet_ink.coffee_mud.Exits.interfaces.Exit"),
/** stat constant for item type objects */
ITEM("com.planet_ink.coffee_mud.Items.interfaces.Item"),
/** stat constant for behavior type objects */
BEHAVIOR("com.planet_ink.coffee_mud.Behaviors.interfaces.Behavior"),
/** stat constant for clan type objects */
CLAN("com.planet_ink.coffee_mud.core.interfaces.Clan"),
/** stat constant for weapon type objects */
WEAPON("com.planet_ink.coffee_mud.Items.interfaces.Weapon"),
/** stat constant for armor type objects */
ARMOR("com.planet_ink.coffee_mud.Items.interfaces.Armor"),
/** stat constant for misc magic type objects */
MISCMAGIC("com.planet_ink.coffee_mud.Items.interfaces.MiscMagic"),
/** stat constant for area type objects */
AREA("com.planet_ink.coffee_mud.Areas.interfaces.Area"),
/** stat constant for command type objects */
COMMAND("com.planet_ink.coffee_mud.Commands.interfaces.Command"),
/** stat constant for clan items type objects */
CLANITEM("com.planet_ink.coffee_mud.Items.interfaces.ClanItem"),
/** stat constant for misc tech type objects */
TECH("com.planet_ink.coffee_mud.Items.interfaces.Electronics"),
/** stat constant for misc tech type objects */
SHIPTECH("com.planet_ink.coffee_mud.Items.interfaces.ShipComponent"),
/** stat constant for misc tech type objects */
SOFTWARE("com.planet_ink.coffee_mud.Items.interfaces.Software"),
/** stat constant for webmacros type objects */
WEBMACRO("com.planet_ink.coffee_mud.WebMacros.interfaces.WebMacro"),
/** stat constant for common type objects */
COMMON("com.planet_ink.coffee_mud.Common.interfaces.CMCommon"),
/** stat constant for library type objects */
LIBRARY("com.planet_ink.coffee_mud.Libraries.interfaces.CMLibrary");
public final String ancestorName; // in meters
CMObjectType(String ancestorName)
{
this.ancestorName = ancestorName;
}
}
/** collection of all object types that are classified as "items" of one sort or another */
public static final CMObjectType[] OBJECTS_ITEMTYPES = new CMObjectType[]{
CMObjectType.MISCMAGIC,
CMObjectType.ITEM,
CMObjectType.ARMOR,
CMObjectType.CLANITEM,
CMObjectType.MISCMAGIC,
CMObjectType.TECH,
CMObjectType.SHIPTECH,
CMObjectType.SOFTWARE,
CMObjectType.WEAPON
};
/** static int for the web macro object with the longest name, used for web optimization */
public static int longestWebMacro=-1;
protected Hashtable<String,CMCommon> common=new Hashtable<String,CMCommon>();
protected XVector<Race> races=new XVector<Race>();
protected XVector<CharClass> charClasses=new XVector<CharClass>();
protected XVector<MOB> MOBs=new XVector<MOB>();
protected XVector<Ability> abilities=new XVector<Ability>();
protected XVector<Room> locales=new XVector<Room>();
protected XVector<Exit> exits=new XVector<Exit>();
protected XVector<Item> items=new XVector<Item>();
protected XVector<Behavior> behaviors=new XVector<Behavior>();
protected XVector<Weapon> weapons=new XVector<Weapon>();
protected XVector<Armor> armor=new XVector<Armor>();
protected XVector<MiscMagic> miscMagic=new XVector<MiscMagic>();
protected XVector<Electronics> tech=new XVector<Electronics>();
protected XVector<ClanItem> clanItems=new XVector<ClanItem>();
protected XVector<Area> areaTypes=new XVector<Area>();
protected XVector<Command> commands=new XVector<Command>();
protected XVector<CMLibrary> libraries=new XVector<CMLibrary>();
protected Hashtable<String,WebMacro> webMacros=new Hashtable<String,WebMacro>();
protected Hashtable<String,Command> commandWords=new Hashtable<String,Command>();
protected static final LinkedList<CMMsg> MSGS_CACHE=new LinkedList<CMMsg>();
protected static final LinkedList<MOB> MOB_CACHE=new LinkedList<MOB>();
protected static final int MAX_MSGS=10000+((Runtime.getRuntime().maxMemory()==Integer.MAX_VALUE)?10000:(int)(Runtime.getRuntime().maxMemory()/10000));
protected static final int MAX_MOBS=50+(MAX_MSGS/200);
/*
* removed to save memory and processing time -- but left for future use
protected static final long[] OBJECT_CREATIONS=new long[OBJECT_TOTAL];
protected static final long[] OBJECT_DESTRUCTIONS=new long[OBJECT_TOTAL];
protected static final Map<CMObject,Object>[] OBJECT_CACHE=new WeakHashMap[OBJECT_TOTAL];
protected static final boolean KEEP_OBJECT_CACHE=false;
static
{
if(KEEP_OBJECT_CACHE)
for(int i=0;i<OBJECT_TOTAL;i++)
OBJECT_CACHE[i]=new WeakHashMap<CMObject,Object>();
}
public final static void bumpCounter(final CMObject O, final int which)
{
if(KEEP_OBJECT_CACHE)
{
if(OBJECT_CACHE[which].containsKey(O))
{
Log.errOut("Duplicate!",new Exception("Duplicate Found!"));
return;
}
OBJECT_CACHE[which].put(O,OBJECT_CACHE);
}
OBJECT_CREATIONS[which]++;
}
public final static void unbumpCounter(final CMObject O, final int which)
{
if(KEEP_OBJECT_CACHE)
{
if(OBJECT_CACHE[which].containsKey(O)) // yes, if its in there, its bad
{
OBJECT_CACHE[which].remove(O);
Log.errOut("bumped!",O.getClass().getName());
return;
}
}
OBJECT_DESTRUCTIONS[which]++;
}
public static final String getCounterReport()
{
StringBuffer str=new StringBuffer("");
for(int i=0;i<OBJECT_TOTAL;i++)
if(OBJECT_CREATIONS[i]>0)
str.append(L("@x1: Created: @x2, Destroyed: @x3, Remaining: @x4\n\r",CMStrings.padRight(OBJECT_DESCS[i],12),OBJECT_CREATIONS[i],OBJECT_DESTRUCTIONS[i],(OBJECT_CREATIONS[i]-OBJECT_DESTRUCTIONS[i])));
return str.toString();
}
public static final long numRemainingObjectCounts(final int type)
{
return OBJECT_CREATIONS[type] - OBJECT_DESTRUCTIONS[type];
}
*/
/**
* Returns whether the given class exists in the vm,
* not necessarily any given classloader.
* Requires a fully qualified java class name.
* @param className a fully qualified java class name.
* @return whether the given class exists in the vm
*/
public final static boolean exists(String className)
{
try
{
Class.forName (className);
return true;
}
catch (final ClassNotFoundException exception)
{
return false;
}
}
/**
* Checks the given object against the given object type
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param O the object to inspect
* @param type the type to compare against
* @return true if theres a match, and false otherwise
*/
public final static boolean isType(final Object O, final CMObjectType type)
{
switch(type)
{
case RACE:
return O instanceof Race;
case CHARCLASS:
return O instanceof CharClass;
case MOB:
return O instanceof MOB;
case ABILITY:
return O instanceof Ability;
case LOCALE:
return O instanceof Room;
case EXIT:
return O instanceof Exit;
case ITEM:
return O instanceof Item;
case BEHAVIOR:
return O instanceof Behavior;
case CLAN:
return O instanceof Clan;
case WEAPON:
return O instanceof Weapon;
case ARMOR:
return O instanceof Armor;
case MISCMAGIC:
return O instanceof MiscMagic;
case AREA:
return O instanceof Area;
case COMMAND:
return O instanceof Command;
case CLANITEM:
return O instanceof ClanItem;
case TECH:
return O instanceof Electronics;
case WEBMACRO:
return O instanceof WebMacro;
case COMMON:
return O instanceof CMCommon;
case LIBRARY:
return O instanceof CMLibrary;
case SOFTWARE:
return O instanceof Software;
case SHIPTECH:
return O instanceof ShipComponent;
}
return false;
}
/**
* Returns a newInstance of an object of the given type and ID. NULL if not found.
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param ID the ID of the object to look for
* @param type the type of object to check
* @return a newInstance of an object of the given type and ID.
*/
public final static CMObject getByType(final String ID, final CMObjectType type)
{
switch(type)
{
case RACE:
return CMClass.getRace(ID);
case CHARCLASS:
return CMClass.getCharClass(ID);
case MOB:
return CMClass.getMOB(ID);
case ABILITY:
return CMClass.getAbility(ID);
case LOCALE:
return CMClass.getLocale(ID);
case EXIT:
return CMClass.getExit(ID);
case ITEM:
return CMClass.getBasicItem(ID);
case BEHAVIOR:
return CMClass.getBehavior(ID);
case CLAN:
return CMClass.getCommon(ID);
case WEAPON:
return CMClass.getWeapon(ID);
case ARMOR:
return CMClass.getAreaType(ID);
case MISCMAGIC:
return CMClass.getMiscMagic(ID);
case AREA:
return CMClass.getAreaType(ID);
case COMMAND:
return CMClass.getCommand(ID);
case CLANITEM:
return CMClass.getClanItem(ID);
case TECH:
return CMClass.getTech(ID);
case WEBMACRO:
return CMClass.getWebMacro(ID);
case COMMON:
return CMClass.getCommon(ID);
case LIBRARY:
return CMClass.getLibrary(ID);
case SHIPTECH:
return CMClass.getTech(ID);
case SOFTWARE:
return CMClass.getTech(ID);
}
return null;
}
/**
* Returns the object type of the given object
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param O the object to inspect
* @return the cmobjectype type
*/
public final static CMObjectType getType(final Object O)
{
if(O instanceof Race)
return CMObjectType.RACE;
if(O instanceof CharClass)
return CMObjectType.CHARCLASS;
if(O instanceof Ability)
return CMObjectType.ABILITY;
if(O instanceof Room)
return CMObjectType.LOCALE;
if(O instanceof MOB)
return CMObjectType.MOB;
if(O instanceof Exit)
return CMObjectType.EXIT;
if(O instanceof Behavior)
return CMObjectType.BEHAVIOR;
if(O instanceof WebMacro)
return CMObjectType.WEBMACRO;
if(O instanceof Area)
return CMObjectType.AREA;
if(O instanceof CMLibrary)
return CMObjectType.LIBRARY;
if(O instanceof CMCommon)
return CMObjectType.COMMON;
if(O instanceof Command)
return CMObjectType.COMMAND;
if(O instanceof Clan)
return CMObjectType.CLAN;
if(O instanceof ClanItem)
return CMObjectType.CLANITEM;
if(O instanceof MiscMagic)
return CMObjectType.MISCMAGIC;
if(O instanceof Armor)
return CMObjectType.ARMOR;
if(O instanceof Weapon)
return CMObjectType.WEAPON;
if(O instanceof Item)
return CMObjectType.ITEM;
if(O instanceof Software)
return CMObjectType.SOFTWARE;
if(O instanceof ShipComponent)
return CMObjectType.SHIPTECH;
if(O instanceof Electronics)
return CMObjectType.TECH;
return null;
}
/**
* Given a string, Integer, or some other stringable object, this will return the
* cmobjecttype based on its name or ordinal relationship.
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param nameOrOrdinal the string, integer, or whatever object
* @return the cmobjecttype it refers to
*/
public static CMObjectType getTypeByNameOrOrdinal(final Object nameOrOrdinal)
{
if(nameOrOrdinal==null)
return null;
if(nameOrOrdinal instanceof Integer)
{
final int itemtypeord = ((Integer)nameOrOrdinal).intValue();
if((itemtypeord>=0)&&(itemtypeord<CMObjectType.values().length))
return CMClass.CMObjectType.values()[itemtypeord];
}
if(nameOrOrdinal instanceof Long)
{
final int itemtypeord = ((Long)nameOrOrdinal).intValue();
if((itemtypeord>=0)&&(itemtypeord<CMObjectType.values().length))
return CMClass.CMObjectType.values()[itemtypeord];
}
final String s=nameOrOrdinal.toString();
if(s.length()==0)
return null;
if(CMath.isInteger(s))
{
final int itemtypeord=CMath.s_int(s);
if((itemtypeord>=0)&&(itemtypeord<CMObjectType.values().length))
return CMClass.CMObjectType.values()[itemtypeord];
}
try
{
return CMClass.CMObjectType.valueOf(s);
}
catch(final Exception e)
{
return (CMClass.CMObjectType)CMath.s_valueOf(CMClass.CMObjectType.values(), s.toUpperCase().trim());
}
}
protected static final Object getClassSet(final String type)
{
return getClassSet(findObjectType(type));
}
protected static final Object getClassSet(final CMObjectType code)
{
switch(code)
{
case RACE:
return c().races;
case CHARCLASS:
return c().charClasses;
case MOB:
return c().MOBs;
case ABILITY:
return c().abilities;
case LOCALE:
return c().locales;
case EXIT:
return c().exits;
case ITEM:
return c().items;
case BEHAVIOR:
return c().behaviors;
case CLAN:
return null;
case WEAPON:
return c().weapons;
case ARMOR:
return c().armor;
case MISCMAGIC:
return c().miscMagic;
case AREA:
return c().areaTypes;
case COMMAND:
return c().commands;
case CLANITEM:
return c().clanItems;
case TECH:
return c().tech;
case WEBMACRO:
return c().webMacros;
case COMMON:
return c().common;
case LIBRARY:
return c().libraries;
case SHIPTECH:
return c().tech;
case SOFTWARE:
return c().tech;
}
return null;
}
/**
* Returns the total number of template/prototypes of the given type stored by
* this CMClass instance.
* @see com.planet_ink.coffee_mud.core.CMClass.CMObjectType
* @param type the type of object to count
* @return the number stored
*/
public static final int numPrototypes(final CMObjectType type)
{
final Object o = getClassSet(type);
if(o instanceof Set)
return ((Set)o).size();
if(o instanceof List)
return ((List)o).size();
if(o instanceof Collection)
return ((Collection)o).size();
if(o instanceof HashSet)
return ((HashSet)o).size();
if(o instanceof Hashtable)
return ((Hashtable)o).size();
if(o instanceof Vector)
return ((Vector)o).size();
return 0;
}
/**
* An enumeration of all the stored races in this classloader for this thread
* @return an enumeration of all the stored races in this classloader for this thread
*/
public static final Enumeration<Race> races(){return c().races.elements();}
/**
* An enumeration of all the stored common Objects in this classloader for this thread
* @return an enumeration of all the stored common Objects in this classloader for this thread
*/
public static final Enumeration<CMCommon> commonObjects(){return c().common.elements();}
/**
* An enumeration of all the stored char Classes in this classloader for this thread
* @return an enumeration of all the stored char Classes in this classloader for this thread
*/
public static final Enumeration<CharClass> charClasses(){return c().charClasses.elements();}
/**
* An enumeration of all the stored mob Types in this classloader for this thread
* @return an enumeration of all the stored mob Types in this classloader for this thread
*/
public static final Enumeration<MOB> mobTypes(){return c().MOBs.elements();}
/**
* An enumeration of all the stored races in this classloader for this thread
* @return an enumeration of all the stored races in this classloader for this thread
*/
public static final Enumeration<CMLibrary> libraries(){return c().libraries.elements();}
/**
* An enumeration of all the stored locales in this classloader for this thread
* @return an enumeration of all the stored locales in this classloader for this thread
*/
public static final Enumeration<Room> locales(){return c().locales.elements();}
/**
* An enumeration of all the stored exits in this classloader for this thread
* @return an enumeration of all the stored exits in this classloader for this thread
*/
public static final Enumeration<Exit> exits(){return c().exits.elements();}
/**
* An enumeration of all the stored behaviors in this classloader for this thread
* @return an enumeration of all the stored behaviors in this classloader for this thread
*/
public static final Enumeration<Behavior> behaviors(){return c().behaviors.elements();}
/**
* An enumeration of all the stored basic Items in this classloader for this thread
* @return an enumeration of all the stored basic Items in this classloader for this thread
*/
public static final Enumeration<Item> basicItems(){return c().items.elements();}
/**
* An enumeration of all the stored weapons in this classloader for this thread
* @return an enumeration of all the stored weapons in this classloader for this thread
*/
public static final Enumeration<Weapon> weapons(){return c().weapons.elements();}
/**
* An enumeration of all the stored armor in this classloader for this thread
* @return an enumeration of all the stored armor in this classloader for this thread
*/
public static final Enumeration<Armor> armor(){return c().armor.elements();}
/**
* An enumeration of all the stored misc Magic in this classloader for this thread
* @return an enumeration of all the stored misc Magic in this classloader for this thread
*/
public static final Enumeration<MiscMagic> miscMagic(){return c().miscMagic.elements();}
/**
* An enumeration of all the stored misc Magic in this classloader for this thread
* @param f the filterer to help select which ones you want
* @return an enumeration of all the stored misc Magic in this classloader for this thread
*/
public static final Enumeration<MiscMagic> miscMagic(Filterer<MiscMagic> f){return new FilteredEnumeration<MiscMagic>(c().miscMagic.elements(),f);}
/**
* An enumeration of all the stored misc Tech in this classloader for this thread
* @return an enumeration of all the stored misc Tech in this classloader for this thread
*/
public static final Enumeration<Electronics>tech(){return c().tech.elements();}
/**
* An enumeration of all the stored misc Tech in this classloader for this thread
* @param f the filterer to help select which ones you want
* @return an enumeration of all the stored misc Tech in this classloader for this thread
*/
public static final Enumeration<Electronics>tech(Filterer<Electronics> f){return new FilteredEnumeration<Electronics>(c().tech.elements(),f);}
/**
* An enumeration of all the stored clan Items in this classloader for this thread
* @return an enumeration of all the stored clan Items in this classloader for this thread
*/
public static final Enumeration<ClanItem> clanItems(){return c().clanItems.elements();}
/**
* An enumeration of all the stored area Types in this classloader for this thread
* @return an enumeration of all the stored area Types in this classloader for this thread
*/
public static final Enumeration<Area> areaTypes(){return c().areaTypes.elements();}
/**
* An enumeration of all the stored commands in this classloader for this thread
* @return an enumeration of all the stored commands in this classloader for this thread
*/
public static final Enumeration<Command> commands(){return c().commands.elements();}
/**
* An enumeration of all the stored abilities in this classloader for this thread
* @return an enumeration of all the stored abilities in this classloader for this thread
*/
public static final Enumeration<Ability> abilities(){return c().abilities.elements();}
/**
* An enumeration of all the stored abilities in this classloader for this thread
* @param f the filterer to help select which ones you want
* @return an enumeration of all the stored abilities in this classloader for this thread
*/
public static final Enumeration<Ability> abilities(Filterer<Ability> f){return new FilteredEnumeration<Ability>(c().abilities.elements(),f);}
/**
* An enumeration of all the stored webmacros in this classloader for this thread
* @return an enumeration of all the stored webmacros in this classloader for this thread
*/
public static final Enumeration<WebMacro> webmacros(){return c().webMacros.elements();}
/**
* Returns a random available race prototype from your classloader
* @return a random available race prototype
*/
public static final Race randomRace(){return c().races.elementAt((int)Math.round(Math.floor(Math.random()*(c().races.size()))));}
/**
* Returns a random available char class prototype from your classloader
* @return a random available char class prototype
*/
public static final CharClass randomCharClass(){return c().charClasses.elementAt((int)Math.round(Math.floor(Math.random()*(c().charClasses.size()))));}
/**
* Returns a random available ability prototype from your classloader
* @return a random available ability prototype
*/
public static final Ability randomAbility(){ return c().abilities.elementAt((int)Math.round(Math.floor(Math.random()*(c().abilities.size()))));}
/**
* Returns a random available area prototype from your classloader
* @return a random available area prototype
*/
public static final Area randomArea(){return c().areaTypes.elementAt((int)Math.round(Math.floor(Math.random()*(c().areaTypes.size()))));}
/**
* Returns a new instance of a locale object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a locale object of the given ID
*/
public static final Room getLocale(final String calledThis){ return (Room)getNewGlobal(c().locales,calledThis); }
/**
* Returns a reference to the prototype for the library of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a reference to the prototype for the library of the given ID
*/
public static final CMLibrary getLibrary(final String calledThis) { return (CMLibrary)getGlobal(c().libraries,calledThis); }
/**
* Returns a new instance of a area object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a area object of the given ID
*/
public static final Area getAreaType(final String calledThis) { return (Area)getNewGlobal(c().areaTypes,calledThis); }
/**
* Returns a new instance of a exit object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a exit object of the given ID
*/
public static final Exit getExit(final String calledThis) { return (Exit)getNewGlobal(c().exits,calledThis);}
/**
* Returns a new instance of a MOB object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a MOB object of the given ID
*/
public static final MOB getMOB(final String calledThis) { return (MOB)getNewGlobal(c().MOBs,calledThis); }
/**
* Returns a new instance of a weapon object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a weapon object of the given ID
*/
public static final Weapon getWeapon(final String calledThis) { return (Weapon)getNewGlobal(c().weapons,calledThis); }
/**
* Returns a new instance of a clan item object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a clan item object of the given ID
*/
public static final ClanItem getClanItem(final String calledThis) { return (ClanItem)getNewGlobal(c().clanItems,calledThis); }
/**
* Returns a new instance of a misc magic object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a misc magic object of the given ID
*/
public static final Item getMiscMagic(final String calledThis) { return (Item)getNewGlobal(c().miscMagic,calledThis); }
/**
* Returns a new instance of a misc tech object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a misc tech object of the given ID
*/
public static final Item getTech(final String calledThis) { return (Item)getNewGlobal(c().tech,calledThis);}
/**
* Returns a new instance of a armor object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a armor object of the given ID
*/
public static final Armor getArmor(final String calledThis) { return (Armor)getNewGlobal(c().armor,calledThis); }
/**
* Returns a new instance of a basic item object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a basic item object of the given ID
*/
public static final Item getBasicItem(final String calledThis) { return (Item)getNewGlobal(c().items,calledThis); }
/**
* Returns a new instance of a behavior object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a behavior object of the given ID
*/
public static final Behavior getBehavior(final String calledThis) { return (Behavior)getNewGlobal(c().behaviors,calledThis); }
/**
* Returns a new instance of a ability object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a ability object of the given ID
*/
public static final Ability getAbility(final String calledThis) { return (Ability)getNewGlobal(c().abilities,calledThis); }
/**
* Returns the prototype instance of the ability object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return the prototype instance of a ability object of the given ID
*/
public static final Ability getAbilityPrototype(final String calledThis) { return (Ability)getGlobal(c().abilities,calledThis); }
/**
* Returns a reference to the prototype for the char class of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a reference to the prototype for the char class of the given ID
*/
public static final CharClass getCharClass(final String calledThis){ return (CharClass)getGlobal(c().charClasses,calledThis);}
/**
* Returns a new instance of a common object of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a new instance of a common object of the given ID
*/
public static final CMCommon getCommon(final String calledThis){return (CMCommon)getNewGlobal(c().common,calledThis);}
/**
* Returns a reference to the prototype for the command of the given ID from your classloader
* @param word the ID() of the object to return
* @return a reference to the prototype for the command of the given ID
*/
public static final Command getCommand(final String word){return (Command)getGlobal(c().commands,word);}
/**
* Returns a reference to the prototype for the web macro of the given ID from your classloader
* @param macroName the ID() of the object to return
* @return a reference to the prototype for the web macro of the given ID
*/
public static final WebMacro getWebMacro(final String macroName){return c().webMacros.get(macroName);}
/**
* Returns a reference to the prototype for the race of the given ID from your classloader
* @param calledThis the ID() of the object to return
* @return a reference to the prototype for the race of the given ID
*/
public static final Race getRace(final String calledThis){return (Race)getGlobal(c().races,calledThis);}
/**
* Returns the number of prototypes in the classloader of the given set of types
* @param types the types to count
* @return the number of prototypes in the classloader of the given set of types
*/
public static final int numPrototypes(final CMObjectType[] types)
{
int total=0;
for (final CMObjectType type : types)
total+=numPrototypes(type);
return total;
}
/**
* Fills the given list with the IDs of the various Item types, subject to the given filters
* @param namesList the list to populate with IDs
* @param NonArchon true to not include Archon items
* @param NonGeneric true to not include Gen items
* @param NonStandard true to not include Standard items
* @param themeCode the theme mask to respect, sortof
*/
public static final void addAllItemClassNames(final List<String> namesList, final boolean NonArchon,
final boolean NonGeneric, final boolean NonStandard,
final int themeCode)
{
namesList.addAll(getAllItemClassNames(basicItems(),NonArchon,NonGeneric,NonStandard));
namesList.addAll(getAllItemClassNames(weapons(),NonArchon,NonGeneric,NonStandard));
if(CMath.bset(themeCode,Area.THEME_FANTASY))
{
namesList.addAll(getAllItemClassNames(armor(),NonArchon,NonGeneric,NonStandard));
namesList.addAll(getAllItemClassNames(miscMagic(),NonArchon,NonGeneric,NonStandard));
}
if(CMath.bset(themeCode,Area.THEME_TECHNOLOGY))
namesList.addAll(getAllItemClassNames(tech(),NonArchon,NonGeneric,NonStandard));
namesList.addAll(getAllItemClassNames(clanItems(),NonArchon,NonGeneric,NonStandard));
}
private static List<String> getAllItemClassNames(final Enumeration<? extends Item> i,
final boolean NonArchon, final boolean NonGeneric, final boolean NonStandard)
{
final Vector<String> V=new Vector<String>();
for(;i.hasMoreElements();)
{
final Item I=i.nextElement();
if(((!NonArchon)||(!(I instanceof ArchonOnly)))
&&((!NonStandard)||(I.isGeneric()))
&&((!NonGeneric)||(!I.isGeneric())))
V.addElement(CMClass.classID(I));
}
return V;
}
/**
* Returns a new instance of an item object of the given ID from your classloader
* Will search basic, armor, weapons, misc magic, clan items, and misc tech respectively
* @param calledThis the ID() of the object to return
* @return a new instance of an item object of the given ID
*/
public static Item getItem(final String calledThis)
{
Item thisItem=(Item)getNewGlobal(c().items,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().armor,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().weapons,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().miscMagic,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().clanItems,calledThis);
if(thisItem==null)
thisItem=(Item)getNewGlobal(c().tech,calledThis);
return thisItem;
}
protected Item sampleItem=null;
/**
* Returns the saved copy of the first basic item prototype
* @return the saved copy of the first basic item prototype
*/
public static final Item sampleItem()
{
final CMClass myC=c();
if((myC.sampleItem==null)&&(myC.items.size()>0))
myC.sampleItem= (Item)myC.items.firstElement().copyOf();
return myC.sampleItem;
}
/**
* Returns a reference to the prototype of an item object of the given ID from your classloader
* Will search basic, armor, weapons, misc magic, clan items, and misc tech respectively
* @param itemID the ID() of the object to return
* @return a reference to the prototype of an item object of the given ID
*/
public static final Item getItemPrototype(final String itemID)
{
Item thisItem=(Item)getGlobal(c().items,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().armor,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().weapons,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().miscMagic,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().clanItems,itemID);
if(thisItem==null)
thisItem=(Item)getGlobal(c().tech,itemID);
return thisItem;
}
/**
* Returns a reference to the prototype of a mob object of the given ID from your classloader
* @param mobID the ID() of the object to return
* @return a reference to the prototype of an mob object of the given ID
*/
public static final MOB getMOBPrototype(final String mobID)
{
return (MOB)CMClass.getGlobal(c().MOBs,mobID);
}
protected MOB sampleMOB=null;
/**
* Returns the saved copy of the first mob prototype
* @return the saved copy of the first mob prototype
*/
public static final MOB sampleMOB()
{
final CMClass myC=c();
if((myC.sampleMOB==null)&&(myC.MOBs.size()>0))
{
myC.sampleMOB=(MOB)myC.MOBs.firstElement().copyOf();
myC.sampleMOB.basePhyStats().setDisposition(PhyStats.IS_NOT_SEEN);
myC.sampleMOB.phyStats().setDisposition(PhyStats.IS_NOT_SEEN);
}
if(myC.sampleMOB.location()==null)
myC.sampleMOB.setLocation(CMLib.map().getRandomRoom());
return myC.sampleMOB;
}
protected MOB samplePlayer=null;
/**
* Returns the saved copy of the first mob prototype as a player
* @return the saved copy of the first mob prototype as a player
*/
public static final MOB samplePlayer()
{
final CMClass myC=c();
if((myC.samplePlayer==null)&&(myC.MOBs.size()>0))
{
myC.samplePlayer=(MOB)myC.MOBs.firstElement().copyOf();
myC.samplePlayer.basePhyStats().setDisposition(PhyStats.IS_NOT_SEEN);
myC.samplePlayer.phyStats().setDisposition(PhyStats.IS_NOT_SEEN);
final PlayerStats playerStats = (PlayerStats)getCommon("DefaultPlayerStats");
if(playerStats != null)
{
if(CMProps.isUsingAccountSystem())
{
final PlayerAccount account = (PlayerAccount)getCommon("DefaultPlayerAccount");
if(account != null)
playerStats.setAccount(account);
}
myC.samplePlayer.setPlayerStats(playerStats);
}
}
if(myC.samplePlayer.location()==null)
myC.samplePlayer.setLocation(CMLib.map().getRandomRoom());
return myC.samplePlayer;
}
/**
* Searches the command prototypes for a trigger word match and returns the command.
* @param word the command word to search for
* @param exactOnly true for a whole word match, false for a startsWith match
* @return the command prototypes for a trigger word match and returns the command.
*/
public static final Command findCommandByTrigger(final String word, final boolean exactOnly)
{
final CMClass myC=c();
final Command C=myC.commandWords.get(word.trim().toUpperCase());
if((exactOnly)||(C!=null))
return C;
final String upword=word.toUpperCase();
String key;
for(final Enumeration<String> e=myC.commandWords.keys();e.hasMoreElements();)
{
key=e.nextElement();
if(key.toUpperCase().startsWith(upword))
return myC.commandWords.get(key);
}
return null;
}
protected final int totalLocalClasses()
{
return races.size()+charClasses.size()+MOBs.size()+abilities.size()+locales.size()+exits.size()
+items.size()+behaviors.size()+weapons.size()+armor.size()+miscMagic.size()+clanItems.size()
+tech.size()+areaTypes.size()+common.size()+libraries.size()+commands.size()
+webMacros.size();
}
/**
* Returns the total number of prototypes of all classes in your classloader
* @return the total number of prototypes of all classes in your classloader
*/
public static final int totalClasses(){ return c().totalLocalClasses();}
/**
* Deletes the class of the given object type from your classloader
* @param type the type of object that the given object belongs to
* @param O the specific prototype class to remove
* @return true
*/
public static final boolean delClass(final CMObjectType type, final CMObject O)
{
if(O==null)
return false;
if(classes.containsKey(O.getClass().getName()))
classes.remove(O.getClass().getName());
final Object set=getClassSet(type);
if(set==null)
return false;
CMClass.lastUpdateTime=System.currentTimeMillis();
if(set instanceof List)
{
((List)set).remove(O);
if(set instanceof XVector)
((XVector)set).sort();
}
else
if(set instanceof Hashtable)
((Hashtable)set).remove(O.ID().trim());
else
if(set instanceof HashSet)
((HashSet)set).remove(O);
else
return false;
if(set==c().commands)
reloadCommandWords();
//if(set==libraries) CMLib.registerLibraries(libraries.elements());
return true;
}
/**
* Adds a new prototype of the given object type from your classloader
* @param type the type of object that the given object belongs to
* @param O the specific prototype class to add
* @return true
*/
public static final boolean addClass(final CMObjectType type, final CMObject O)
{
final Object set=getClassSet(type);
if(set==null)
return false;
CMClass.lastUpdateTime=System.currentTimeMillis();
if(set instanceof List)
{
((List)set).add(O);
if(set instanceof XVector)
((XVector)set).sort();
}
else
if(set instanceof Hashtable)
((Hashtable)set).put(O.ID().trim().toUpperCase(), O);
else
if(set instanceof HashSet)
((HashSet)set).add(O);
else
return false;
if(set==c().commands)
reloadCommandWords();
if(set==c().libraries)
CMLib.registerLibraries(c().libraries.elements());
return true;
}
/**
* Searches for a match to the given object type name,
* preferring exact, but accepting prefixes.
* @param name the object type name to search for
* @return the matching object type or NULL
*/
public final static CMObjectType findObjectType(final String name)
{
for(final CMObjectType o : CMObjectType.values())
{
if(o.toString().equalsIgnoreCase(name))
return o;
}
final String upperName=name.toUpperCase();
for(final CMObjectType o : CMObjectType.values())
{
if(o.toString().toUpperCase().startsWith(upperName))
return o;
}
for(final CMObjectType o : CMObjectType.values())
{
if(upperName.startsWith(o.toString().toUpperCase()))
return o;
}
return null;
}
/**
* Searches for a match to the given object type name,
* preferring exact, but accepting prefixes. Returns
* the ancestor java class type
* @param code the object type name to search for
* @return the matching object type interface/ancestor or NULL
*/
public final static String findTypeAncestor(final String code)
{
final CMObjectType typ=findObjectType(code);
if(typ!=null)
return typ.ancestorName;
return "";
}
/**
* Returns the internal object type to which the given object example
* belongs by checking its interface implementations/ancestry
* @param O the object to find the type of
* @return the type of object this is, or NULL
*/
public final static CMObjectType getObjectType(final Object O)
{
for(final CMObjectType o : CMObjectType.values())
{
try
{
final Class<?> ancestorCl = instance().loadClass(o.ancestorName);
if(CMClass.checkAncestry(O.getClass(),ancestorCl))
return o;
}catch(final Exception e){}
}
return null;
}
/**
* Loads the class with the given coffeemud or java path to your classloader.
* @param classType the type of object to load
* @param path the file or java path of the class to load
* @param quiet true to not report errors to the log, false otherwise
* @return true if the prototype was loaded
*/
public static final boolean loadClass(final CMObjectType classType, final String path, final boolean quiet)
{
debugging=CMSecurity.isDebugging(CMSecurity.DbgFlag.CLASSLOADER);
final Object set=getClassSet(classType);
if(set==null)
return false;
CMClass.lastUpdateTime=System.currentTimeMillis();
if(!loadListToObj(set,path,classType.ancestorName,quiet))
return false;
if(set instanceof List)
{
if(set instanceof XVector)
((XVector)set).sort();
if(set==c().commands)
reloadCommandWords();
if(set==c().libraries)
CMLib.registerLibraries(c().libraries.elements());
}
return true;
}
protected static String makeDotClassPath(final String path)
{
String pathLess=path;
final String upperPathLess=pathLess.toUpperCase();
if(upperPathLess.endsWith(".CLASS"))
pathLess=pathLess.substring(0,pathLess.length()-6);
else
if(upperPathLess.endsWith(".JAVA"))
pathLess=pathLess.substring(0,pathLess.length()-5);
else
if(upperPathLess.endsWith(".JS"))
pathLess=pathLess.substring(0,pathLess.length()-3);
pathLess=pathLess.replace('/','.');
pathLess=pathLess.replace('\\','.');
return pathLess;
}
protected static String makeFilePath(final String path)
{
final String upperPath=path.toUpperCase();
if((!upperPath.endsWith(".CLASS"))
&&(!upperPath.endsWith(".JAVA"))
&&(!upperPath.endsWith(".JS")))
return path.replace('.','/')+".class";
return path;
}
/**
* If the given class exists in the classloader, a new instance will be returned.
* If it does not, it will be loaded, and then a new instance of it will be returned.
* @param classType the type of class as a filter
* @param path the path of some sort to get a new instance of
* @param quiet true to not post errors to the log, false otherwise
* @return a new instance of the given class
*/
public static final Object getLoadNewClassInstance(final CMObjectType classType, final String path, final boolean quiet)
{
if((path==null)||(path.length()==0))
return null;
try
{
final String pathLess=makeDotClassPath(path);
if(classes.containsKey(pathLess))
return (classes.get(pathLess)).newInstance();
}catch(final Exception e){}
final Vector<Object> V=new Vector<Object>(1);
if(!loadListToObj(V,makeFilePath(path),classType.ancestorName,quiet))
return null;
if(V.size()==0)
return null;
final Object o = V.firstElement();
try
{
return o.getClass().newInstance();
}
catch(final Exception e)
{
return o;
}
}
/**
* Returns true if the given class has been loaded into the classloader, or if it is loadable
* through the cm class loading system.
* @param classType the type of class to check for (for ancestry confirmation)
* @param path the path of the class to check for
* @return true if it is loaded or loadable, false otherwise
*/
public final static boolean checkForCMClass(final CMObjectType classType, final String path)
{
if((path==null)||(path.length()==0))
return false;
try
{
final String pathLess=makeDotClassPath(path);
if(classes.containsKey(pathLess))
return true;
}catch(final Exception e){}
final Vector<Object> V=new Vector<Object>(1);
if(!loadListToObj(V,makeFilePath(path),classType.ancestorName,true))
return false;
if(V.size()==0)
return false;
return true;
}
/**
* Returns the base prototype of the given type, by id
* @param type the cmobjecttype to return
* @param calledThis the ID of the cmobjecttype
* @return the base prototype of the given type, by id
*/
public static final CMObject getPrototypeByID(final CMObjectType type, final String calledThis)
{
final Object set=getClassSet(type);
if(set==null)
return null;
CMObject thisItem;
if(set instanceof List)
thisItem=getGlobal((List)set,calledThis);
else
if(set instanceof Map)
thisItem=getGlobal((Map)set,calledThis);
else
return null;
return thisItem;
}
/**
* Returns either a new instance of the class of the given full java name,
* or the coffeemud prototype of the class with the given id. Checks all
* cmobjecttypes.
* @param calledThis the ID or the given full java name.
* @return a new instance of the class, or the prototype
*/
public static final Object getObjectOrPrototype(final String calledThis)
{
String shortThis=calledThis;
final int x=shortThis.lastIndexOf('.');
if(x>0)
{
shortThis=shortThis.substring(x+1);
try{
return classes.get(calledThis).newInstance();
}catch(final Exception e){}
}
for(final CMObjectType o : CMObjectType.values())
{
final Object thisItem=getPrototypeByID(o,shortThis);
if(thisItem!=null)
return thisItem;
}
return null;
}
/**
* Returns a new instance of a Environmental of the given id, prefers items,
* but also checks mobs and abilities as well.
* @param calledThis the id of the cmobject
* @return a new instance of a Environmental
*/
public static final Environmental getUnknown(final String calledThis)
{
Environmental thisItem=(Environmental)getNewGlobal(c().items,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().armor,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().weapons,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().miscMagic,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().tech,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().MOBs,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().abilities,calledThis);
if(thisItem==null)
thisItem=(Environmental)getNewGlobal(c().clanItems,calledThis);
if((thisItem==null)&&(c().charClasses.size()>0)&&(calledThis.length()>0))
Log.sysOut("CMClass","Unknown Unknown '"+calledThis+"'.");
return thisItem;
}
/**
* Does a search for a race of the given name, first checking
* for identical matches, then case insensitive name matches.
* @param calledThis the name or id
* @return the race object
*/
public static final Race findRace(final String calledThis)
{
final Race thisItem=getRace(calledThis);
if(thisItem!=null)
return thisItem;
Race R;
final CMClass c=c();
for(int i=0;i<c.races.size();i++)
{
R=c.races.elementAt(i);
if(R.name().equalsIgnoreCase(calledThis))
return R;
}
return null;
}
/**
* Does a search for a Char Class of the given name, first checking
* for identical matches, then case insensitive name matches.
* @param calledThis the name or id
* @return the Char Class object
*/
public static final CharClass findCharClass(final String calledThis)
{
final CharClass thisItem=getCharClass(calledThis);
if(thisItem!=null)
return thisItem;
CharClass C;
final CMClass c=c();
for(int i=0;i<c.charClasses.size();i++)
{
C=c.charClasses.elementAt(i);
for(int n=0;n<C.nameSet().length;n++)
if(C.nameSet()[n].equalsIgnoreCase(calledThis))
return C;
}
return null;
}
/**
* Returns a new instance of the cmobject of the given id from the given list
* @param list the list to search, must be alphabetized
* @param ID the perfect cmobject ID of the object
* @return a new instance of the cmobject of the given id from the given list
*/
public static final CMObject getNewGlobal(final List<? extends CMObject> list, final String ID)
{
final CMObject O=getGlobal(list,ID);
if(O!=null)
return O.newInstance();
return null;
}
/**
* Returns the prototype of the cmobject of the given id from the given list
* @param list the list to search, must be alphabetized
* @param ID the perfect cmobject ID of the object
* @return the prototype of the cmobject of the given id from the given list
*/
public static final CMObject getGlobal(final List<? extends CMObject> list, final String ID)
{
if(list.size()==0)
return null;
int start=0;
int end=list.size()-1;
while(start<=end)
{
final int mid=(end+start)/2;
final int comp=classID(list.get(mid)).compareToIgnoreCase(ID);
if(comp==0)
return list.get(mid);
else
if(comp>0)
end=mid-1;
else
start=mid+1;
}
return null;
}
/**
* Searches for an Ability object using the given search term.
* This "finder" matches the ID, and searches the name and display text.
* @param calledThis the search term to use
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis)
{
return findAbility(calledThis,-1,-1,false);
}
/**
* Searches for an Ability object using the given search term and filters.
* This "finder" matches the ID, and searches the name and display text.
* @param calledThis the search term to use
* @param ofClassDomain a class/domain filter, or -1 to skip
* @param ofFlags an ability flag filter, or -1 to skip
* @param exactOnly true to match only case-insensitive whole strings, false otherwise
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis, final int ofClassDomain, final long ofFlags, final boolean exactOnly)
{
final Vector<Ability> ableV;
Ability A;
if((ofClassDomain>=0)||(ofFlags>=0))
{
ableV = new Vector<Ability>();
for(final Enumeration<Ability> e=c().abilities.elements();e.hasMoreElements();)
{
A=e.nextElement();
if((ofClassDomain<0)
||((A.classificationCode() & Ability.ALL_ACODES)==ofClassDomain)
||((A.classificationCode() & Ability.ALL_DOMAINS)==ofClassDomain))
{
if((ofFlags<0)
||(CMath.bset(A.flags(),ofFlags)))
ableV.addElement(A);
}
}
}
else
ableV = c().abilities;
A=(Ability)getGlobal(ableV,calledThis);
if(A==null)
A=(Ability)CMLib.english().fetchEnvironmental(ableV,calledThis,true);
if((A==null)&&(!exactOnly))
A=(Ability)CMLib.english().fetchEnvironmental(ableV,calledThis,false);
if(A!=null)
A=(Ability)A.newInstance();
return A;
}
/**
* Searches for a Behavior object using the given search term.
* This "finder" matches the ID, and searches the name.
* @param calledThis the search term to use
* @return the first behavior found matching the search term
*/
public static final Behavior findBehavior(final String calledThis)
{
Behavior B=(Behavior)getGlobal(c().behaviors,calledThis);
if(B==null)
B=getBehaviorByName(calledThis,true);
if(B==null)
B=getBehaviorByName(calledThis,false);
if(B!=null)
B=(Behavior)B.copyOf();
return B;
}
/**
* Searches for a Behavior object using the given search term and filters.
* This "finder" matches the name only, no ID.
* @param calledThis the search term to use
* @param exact true for whole string match, false otherwise
* @return the first behavior found matching the search term
*/
public static final Behavior getBehaviorByName(final String calledThis, final boolean exact)
{
if(calledThis==null)
return null;
Behavior B=null;
for(final Enumeration<Behavior> e=behaviors();e.hasMoreElements();)
{
B=e.nextElement();
if(B.name().equalsIgnoreCase(calledThis))
return (Behavior)B.copyOf();
}
if(exact)
return null;
for(final Enumeration<Behavior> e=behaviors();e.hasMoreElements();)
{
B=e.nextElement();
if(CMLib.english().containsString(B.name(),calledThis))
return (Behavior)B.copyOf();
}
return null;
}
/**
* Searches for an Ability object using the given search term.
* This "finder" matches the name only, no ID
* @param calledThis the search term to use
* @param exact true for whole string match, false otherwise
* @return the first ability found matching the search term
*/
public static final Ability getAbilityByName(final String calledThis, final boolean exact)
{
if(calledThis==null)
return null;
Ability A=null;
for(final Enumeration<Ability> e=abilities();e.hasMoreElements();)
{
A=e.nextElement();
if(A.name().equalsIgnoreCase(calledThis))
return A;
}
if(exact)
return null;
for(final Enumeration<Ability> e=abilities();e.hasMoreElements();)
{
A=e.nextElement();
if(CMLib.english().containsString(A.name(),calledThis))
return A;
}
return null;
}
/**
* Searches for an Ability object using the given search term and filters.
* This "finder" searches the name and display text, and finally the ID.
* The filter here is to allow you to filter only abilities that a given
* mob qualifies for by sending their charstats as a "character class" set.
* @param calledThis the search term to use
* @param charStats only the abilities qualified for by the classes herein
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis, final CharStats charStats)
{
Ability A=null;
final List<Ability> As=new LinkedList<Ability>();
for(final Enumeration<Ability> e=abilities();e.hasMoreElements();)
{
A=e.nextElement();
for(int c=0;c<charStats.numClasses();c++)
{
final CharClass C=charStats.getMyClass(c);
if(CMLib.ableMapper().getQualifyingLevel(C.ID(),true,A.ID())>=0)
{ As.add(A); break;}
}
}
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,true);
if(A==null)
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,false);
if(A==null)
A=(Ability)getGlobal(c().abilities,calledThis);
if(A!=null)
A=(Ability)A.newInstance();
return A;
}
/**
* Searches for an Ability object using the given search term and filters.
* This "finder" searches the name and display text, and finally the ID.
* The filter here is to allow you to filter only abilities that a given
* mob actually has.
* @param calledThis the search term to use
* @param mob the dude to search
* @return the first ability found matching the search term
*/
public static final Ability findAbility(final String calledThis, final MOB mob)
{
final List<Ability> As=new LinkedList<Ability>();
Ability A=null;
for(final Enumeration<Ability> a=mob.allAbilities();a.hasMoreElements();)
{
A=a.nextElement();
if(A!=null)
As.add(A);
}
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,true);
if(A==null)
A=(Ability)CMLib.english().fetchEnvironmental(As,calledThis,false);
if(A==null)
A=(Ability)getGlobal(c().abilities,calledThis);
if(A!=null)
A=(Ability)A.newInstance();
return A;
}
/**
* Given a map of CMObjects with ID()s defined, this will return the one matched by the given ID.
* If the ID is not found in the map, it will iterate and look for a case-insensitive match before
* giving up. It returns a brand new object.
* @param list the map of IDs to objects
* @param ID the ID to search for
* @return the CMObject that the ID belongs to, after newInstance is called.
*/
public static final CMObject getNewGlobal(final Map<String,? extends CMObject> list, final String ID)
{
final CMObject O=getGlobal(list,ID);
if(O!=null)
return O.newInstance();
return null;
}
/**
* Given a map of CMObjects with ID()s defined, this will return the one matched by the given class name.
* If the name is not found in the map, it will iterate and look for a case-insensitive match before
* giving up. If returns the actual map reference.
* @param list the map of IDs to objects
* @param ID the ID to search for
* @return the CMObject that the ID belongs to, straight from the map.
*/
public static final CMObject getGlobal(final Map<String,? extends CMObject> list, final String ID)
{
CMObject o=list.get(ID);
if(o==null)
{
for(final String s : list.keySet())
{
o=list.get(s);
if(classID(o).equalsIgnoreCase(ID))
return o;
}
return null;
}
return o;
}
/**
* Adds a new Race to the class sets.
* @param GR the race to add
*/
public static final void addRace(final Race GR)
{
Race R;
for(int i=0;i<c().races.size();i++)
{
R=c().races.elementAt(i);
if(R.ID().compareToIgnoreCase(GR.ID())>=0)
{
if(R.ID().compareToIgnoreCase(GR.ID())==0)
c().races.setElementAt(GR,i);
else
c().races.insertElementAt(GR,i);
return;
}
}
c().races.addElement(GR);
}
/**
* Adds a new character class to the set
* @param CR the character class to add
*/
public static final void addCharClass(final CharClass CR)
{
for(int i=0;i<c().charClasses.size();i++)
{
final CharClass C=c().charClasses.elementAt(i);
if(C.ID().compareToIgnoreCase(CR.ID())>=0)
{
if(C.ID().compareToIgnoreCase(CR.ID())==0)
c().charClasses.setElementAt(CR,i);
else
c().charClasses.insertElementAt(CR,i);
return;
}
}
c().charClasses.addElement(CR);
}
/**
* Removes the given characterclass from this set
* @param C the character class to remove
*/
public static final void delCharClass(final CharClass C)
{
c().charClasses.removeElement(C);
}
/**
* Removes the given race from this set
* @param R the race to remove
*/
public static final void delRace(final Race R)
{
c().races.removeElement(R);
}
/**
* Given a list of CMObjects, this will sort them, by {@link CMObject#ID()}
* @param V the list of objects to sort.
*/
public static final void sortCMObjectsByID(final List<CMObject> V)
{
Collections.sort(V,new Comparator<CMObject>()
{
@Override
public int compare(CMObject o1, CMObject o2)
{
if(o1 == null)
{
if (o2 == null)
return 0;
return -1;
}
else
if(o2 == null)
return 1;
return o1.ID().compareTo(o2.ID());
}
});
}
/**
* Given a list of environmentals, this will sort them by {@link Environmental#ID()}
* @param V the list of environmentals
*/
public static final void sortEnvironmentalsByName(final List<Environmental> V)
{
Collections.sort(V,new Comparator<Environmental>()
{
@Override
public int compare(Environmental o1, Environmental o2)
{
if(o1 == null)
{
if (o2 == null)
return 0;
return -1;
}
else
if(o2 == null)
return 1;
return o1.name().compareToIgnoreCase(o2.name());
}
});
}
/**
* Calls "initializeclass" on all the given CMObjects.
* @param V the list of CMObjects to initialize.
*/
private final void initializeClassGroup(final List<? extends CMObject> V)
{
for(int v=0;v<V.size();v++)
((CMObject)V.get(v)).initializeClass();
}
/**
* Calls "initializeclass" on all the given CMObjects.
* @param H the set of CMObjects to initialize.
*/
private final void initializeClassGroup(final Map<String,? extends CMObject> H)
{
for(final Object o : H.keySet())
((CMObject)H.get(o)).initializeClass();
}
/**
* Initializes ALL the internal classes in these sets. All of them. All types.
*/
public final void intializeClasses()
{
final char tCode=Thread.currentThread().getThreadGroup().getName().charAt(0);
for(final CMObjectType o : CMObjectType.values())
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe(o.toString())))
{
final Object set = CMClass.getClassSet(o);
if(set instanceof List)
initializeClassGroup((List)set);
else
if(set instanceof Hashtable)
initializeClassGroup((Map)set);
}
}
/**
* Given the "stock" default path to a set of coffeemud classes, and a potential set of requested paths, this
* method will follow requested paths (observing the default if default is listed in the request paths), and
* load all the coffeemud classes therein, making sure they respect the given interface/ancestor name.
* @param defaultPath the path to use when default is a requested path
* @param requestedPathList the ; separated list of paths to look for classes in
* @param ancestor the full class name of an acester/interface
* @return a hashtable mapping the IDs of the classes with a prototype instance of the classes
*/
public static Hashtable loadHashListToObj(final String defaultPath, String requestedPathList, final String ancestor)
{
final Hashtable<String,Object> h=new Hashtable<String,Object>();
int x=requestedPathList.indexOf(';');
String path;
while(x>=0)
{
path=requestedPathList.substring(0,x).trim();
requestedPathList=requestedPathList.substring(x+1).trim();
loadObjectListToObj(h,defaultPath,path,ancestor);
x=requestedPathList.indexOf(';');
}
loadObjectListToObj(h,defaultPath,requestedPathList,ancestor);
return h;
}
/**
* Given the "stock" default path to a set of coffeemud classes, and a potential set of requested paths, this
* method will follow requested paths (observing the default if default is listed in the request paths), and
* load all the coffeemud classes therein, making sure they respect the given interface/ancestor name.
* @param defaultPath the path to use when default is a requested path
* @param requestedPathList the ; separated list of paths to look for classes in
* @param ancestor the full class name of an acester/interface
* @return a vector of all the prototype instance of the classes
*/
public static final XVector loadVectorListToObj(final String defaultPath, String requestedPathList, final String ancestor)
{
final Vector<Object> v=new Vector<Object>();
int x=requestedPathList.indexOf(';');
String path;
while(x>=0)
{
path=requestedPathList.substring(0,x).trim();
requestedPathList=requestedPathList.substring(x+1).trim();
loadObjectListToObj(v,defaultPath,path,ancestor);
x=requestedPathList.indexOf(';');
}
loadObjectListToObj(v,defaultPath,requestedPathList,ancestor);
return new XVector(new TreeSet(v));
}
/**
* Given the "stock" default path to a set of coffeemud classes, and a potential set of requested paths, this
* method will follow requested paths (observing the default if default is listed in the request paths), and
* load all the coffeemud classes therein, making sure they respect the given interface/ancestor class.
* @param defaultPath the path to use when default is a requested path
* @param requestedPathList the ; separated list of paths to look for classes in
* @param ancestorC1 the full class of an acester/interface
* @param subDir if given, this will be appended to all requested paths except default
* @param quiet true to not report errors to the log, false otherwise
* @return a vector of all the prototype instance of the classes
*/
public static final Vector<Object> loadClassList(final String defaultPath, String requestedPathList, final String subDir, final Class<?> ancestorC1, final boolean quiet)
{
final Vector<Object> v=new Vector<Object>();
int x=requestedPathList.indexOf(';');
while(x>=0)
{
String path=requestedPathList.substring(0,x).trim();
requestedPathList=requestedPathList.substring(x+1).trim();
if(path.equalsIgnoreCase("%default%"))
loadListToObj(v,defaultPath, ancestorC1, quiet);
else
{
if((subDir!=null)&&(subDir.length()>0))
path+=subDir;
loadListToObj(v,path,ancestorC1, quiet);
}
x=requestedPathList.indexOf(';');
}
if(requestedPathList.equalsIgnoreCase("%default%"))
loadListToObj(v,defaultPath, ancestorC1, quiet);
else
{
if((subDir!=null)&&(subDir.length()>0))
requestedPathList+=subDir;
loadListToObj(v,requestedPathList,ancestorC1, quiet);
}
return v;
}
/**
* Given a java collection type of some sort (hashtable, vector, etc), a default path, a requested path,
* and the name of an interface/ancestor that classes must implement, this method will load all classes
* in the appropriate path into the given collection.
* @param collection the collection type to use (map, list, set, etc, etc)
* @param defaultPath the path to use if the given path requests the default path
* @param path the requested path to use
* @param ancestor the full java class name of an interface ancestor to force classes to respect
* @return true if classes were loaded without errors, false otherwise
*/
public static final boolean loadObjectListToObj(final Object collection, final String defaultPath, final String path, final String ancestor)
{
if(path.length()>0)
{
final boolean success;
if(path.equalsIgnoreCase("%default%"))
success=loadListToObj(collection,defaultPath, ancestor, false);
else
success=loadListToObj(collection,path,ancestor, false);
return success;
}
return false;
}
/**
* Given a java collection type of some sort (hashtable, vector, etc), a file path,
* and the name of an interface/ancestor that classes must implement, this method will load all classes
* in the appropriate path into the given collection.
* @param collection the collection type to use (map, list, set, etc, etc)
* @param filePath the path to look for classes in
* @param ancestor the full java class name of an interface ancestor to force classes to respect
* @param quiet true to not report errors, false otherwise
* @return true if classes were loaded successfully, false otherwise
*/
public static final boolean loadListToObj(final Object collection, final String filePath, final String ancestor, final boolean quiet)
{
final CMClass loader=new CMClass();
Class<?> ancestorCl=null;
if (ancestor != null && ancestor.length() != 0)
{
try
{
ancestorCl = loader.loadClass(ancestor);
}
catch (final ClassNotFoundException e)
{
if(!quiet)
Log.sysOut("CMClass","WARNING: Couldn't load ancestor class: "+ancestor);
}
}
return loadListToObj(collection, filePath, ancestorCl, quiet);
}
/**
* Given a java collection type of some sort (hashtable, vector, etc), a file path,
* and the class of an interface/ancestor that classes must implement, this method will load all classes
* in the appropriate path into the given collection.
* @param collection the collection type to use (map, list, set, etc, etc)
* @param filePath the path to look for classes in
* @param ancestorCl the full java class of an interface ancestor to force classes to respect
* @param quiet true to not report errors, false otherwise
* @return true if classes were loaded successfully, false otherwise
*/
public static final boolean loadListToObj(final Object collection, final String filePath, final Class<?> ancestorCl, final boolean quiet)
{
final CMClass loader=new CMClass();
final CMFile file=new CMFile(filePath,null,CMFile.FLAG_LOGERRORS);
final Vector<String> fileList=new Vector<String>();
if(file.canRead())
{
if(file.isDirectory())
{
final CMFile[] list=file.listFiles();
for (final CMFile element : list)
if((element.getName().indexOf('$')<0)&&(element.getName().toUpperCase().endsWith(".CLASS")))
fileList.addElement(element.getVFSPathAndName());
for (final CMFile element : list)
if(element.getName().toUpperCase().endsWith(".JS"))
fileList.addElement(element.getVFSPathAndName());
}
else
{
fileList.addElement(file.getVFSPathAndName());
}
}
else
{
if(!quiet)
Log.errOut("CMClass","Unable to access path "+file.getVFSPathAndName());
return false;
}
String item;
for(int l=0;l<fileList.size();l++)
{
item=fileList.elementAt(l);
if(item.startsWith("/"))
item=item.substring(1);
try
{
Object O=null;
String packageName=item.replace('/','.');
if(packageName.toUpperCase().endsWith(".CLASS"))
packageName=packageName.substring(0,packageName.length()-6);
final Class<?> C=loader.loadClass(packageName,true);
if(C!=null)
{
if(!checkAncestry(C,ancestorCl))
{
if(!quiet)
Log.sysOut("CMClass","WARNING: class failed ancestral check: "+packageName);
}
else
O=C.newInstance();
}
if(O==null)
{
if(!quiet)
Log.sysOut("CMClass","Unable to create class '"+packageName+"'");
}
else
{
String itemName=O.getClass().getName();
final int x=itemName.lastIndexOf('.');
if(x>=0)
itemName=itemName.substring(x+1);
if(collection instanceof Map)
{
final Map H=(Map)collection;
if(H.containsKey(itemName.trim().toUpperCase()))
H.remove(itemName.trim().toUpperCase());
H.put(itemName.trim().toUpperCase(),O);
}
else
if(collection instanceof List)
{
final List V=(List)collection;
boolean doNotAdd=false;
for(int v=0;v<V.size();v++)
if(getSimpleClassName(V.get(v)).equals(itemName))
{
V.set(v,O);
doNotAdd=true;
break;
}
if(!doNotAdd)
V.add(O);
}
else
if(collection instanceof Collection)
{
final Collection V=(Collection)collection;
for(final Object o : V)
if(getSimpleClassName(o).equals(itemName))
{
V.remove(o);
break;
}
V.add(O);
}
}
}
catch(final Exception e)
{
if(!quiet)
Log.errOut("CMClass",e);
return false;
}
}
return true;
}
/**
* This strange method returns an environmentals name,
* plus a string of instance hex digits, which I guess make
* the name more unique.
* @param E the environmenal to make a unique name for
* @return the unique name
*/
public static final String getObjInstanceStr(Environmental E)
{
if(E==null)
return "NULL";
final int x=E.toString().indexOf('@');
if(x<0)
return E.Name()+E.toString();
return E.Name()+E.toString().substring(x);
}
/**
* Returns the simple class name of an object -- basically the name that comes
* after the final "." in a classpath.
* @param O the object to get the name for
* @return the simple name
*/
public static final String getSimpleClassName(final Object O)
{
if(O==null)
return "";
return getSimpleClassName(O.getClass());
}
/**
* Returns the simple class name of a class -- basically the name that comes
* after the final "." in a classpath.
* @param C the class to get the name for
* @return the simple name
*/
public static final String getSimpleClassName(final Class<?> C)
{
if(C==null)
return "";
final String name=C.getName();
final int lastDot=name.lastIndexOf('.');
if(lastDot>=0)
return name.substring(lastDot+1);
return name;
}
/**
* Given a class, this method will return a CMFile object for the directory containing
* that class.
* @param C the class to get a directory for
* @return the CMFile containing that class
*/
public static final CMFile getClassDir(final Class<?> C)
{
final URL location = C.getProtectionDomain().getCodeSource().getLocation();
String loc;
if(location == null)
{
return null;
}
loc=location.getPath();
loc=loc.replace('/',File.separatorChar);
String floc=new java.io.File(".").getAbsolutePath();
if(floc.endsWith("."))
floc=floc.substring(0,floc.length()-1);
if(floc.endsWith(File.separator))
floc=floc.substring(0,floc.length()-File.separator.length());
int x=floc.indexOf(File.separator);
if(x>=0)
floc=floc.substring(File.separator.length());
x=loc.indexOf(floc);
loc=loc.substring(x+floc.length());
loc=loc.replace(File.separatorChar,'/');
return new CMFile("/"+loc,null);
}
/**
* Returns true if the given class implements the given ancestor/interface
* @param cl the class to check
* @param ancestorCl the ancestor/interface
* @return true if one comes from the second
*/
public static final boolean checkAncestry(final Class<?> cl, final Class<?> ancestorCl)
{
if (cl == null) return false;
if (cl.isPrimitive() || cl.isInterface()) return false;
if ( Modifier.isAbstract( cl.getModifiers()) || !Modifier.isPublic( cl.getModifiers()) ) return false;
if (ancestorCl == null) return true;
return (ancestorCl.isAssignableFrom(cl)) ;
}
/**
* Returns the address part of an instance objects "default string name", which
* looks like com.planet_ink.coffee_mud.blah.ClassName{@literal @}ab476d87e
* where the part after the at sign is the address
* @param e the object to get an address for
* @return the address
*/
public static final String classPtrStr(final Object e)
{
final String ptr=""+e;
final int x=ptr.lastIndexOf('@');
if(x>0)
return ptr.substring(x+1);
return ptr;
}
/// *// *
// * This is a simple version for external clients since they
// * will always want the class resolved before it is returned
// * to them.
// */
/**
* Returns the ID() if the object is a CMObject, and otherwise
* the simple class name, which is the class name after the final
* dot in a class path
* @param e the object to get a simple class name for.
* @return the simple class name, or ID
*/
public static final String classID(final Object e)
{
if(e!=null)
{
if(e instanceof CMObject)
return ((CMObject)e).ID();
else
if(e instanceof Command)
return getSimpleClassName(e);
else
return getSimpleClassName(e);
}
return "";
}
/**
* Attempts to load the given class, by fully qualified name
* @param className the class name
* @return the class loaded
* @throws ClassNotFoundException something went wrong
*/
@Override
public final Class<?> loadClass(final String className) throws ClassNotFoundException
{
return (loadClass(className, true));
}
/**
* Finishes loading the class into the underlying classloader by handing the byte data to
* the classloader, after building a proper full class name.
* @param className the class name
* @param classData the byte data of the class to load
* @param overPackage the package the class belongs to
* @param resolveIt true to link the class, false if this is a drill
* @return the class defined
* @throws ClassFormatError something went wrong
*/
public final Class<?> finishDefineClass(String className, final byte[] classData, final String overPackage, final boolean resolveIt)
throws ClassFormatError
{
Class<?> result=null;
if(overPackage!=null)
{
final int x=className.lastIndexOf('.');
if(x>=0)
className=overPackage+className.substring(x);
else
className=overPackage+"."+className;
}
try{result=defineClass(className, classData, 0, classData.length);}
catch(final NoClassDefFoundError e)
{
if(e.getMessage().toLowerCase().indexOf("(wrong name:")>=0)
{
final int x=className.lastIndexOf('.');
if(x>=0)
{
final String notherName=className.substring(x+1);
result=defineClass(notherName, classData, 0, classData.length);
}
else
throw e;
}
else
throw e;
}
if (result==null)
{
throw new ClassFormatError();
}
if (resolveIt)
{
resolveClass(result);
}
if(debugging)
Log.debugOut("CMClass","Loaded: "+result.getName());
classes.put(className, result);
return result;
}
/**
* Attempts to load the given class, by fully qualified name. This is fun
* because it will also load javascript classes, if the className ends with
* .js instead of .class
* This is the required version of loadClass<?> which is called
* both from loadClass<?> above and from the internal function
* FindClassFromClass.
* @param className the class name
* @param resolveIt true to link the class, false if this is a drill
* @return the class loaded
* @throws ClassNotFoundException something went wrong
*/
@Override
public synchronized final Class<?> loadClass(String className, final boolean resolveIt)
throws ClassNotFoundException
{
String pathName=null;
if(className.endsWith(".class"))
className=className.substring(0,className.length()-6);
if(className.toUpperCase().endsWith(".JS"))
{
pathName=className.substring(0,className.length()-3).replace('.','/')+className.substring(className.length()-3);
className=className.substring(0,className.length()-3);
}
else
pathName=className.replace('.','/')+".class";
Class<?> result = classes.get(className);
if (result!=null)
{
if(debugging)
Log.debugOut("CMClass","Loaded: "+result.getName());
return result;
}
if((super.findLoadedClass(className)!=null)
||(className.indexOf("com.planet_ink.coffee_mud.")<0)
||(className.startsWith("com.planet_ink.coffee_mud.core."))
||(className.startsWith("com.planet_ink.coffee_mud.application."))
||(className.indexOf(".interfaces.")>=0))
{
try
{
result=super.findSystemClass(className);
if(result!=null)
{
if(debugging)
Log.debugOut("CMClass","Loaded: "+result.getName());
return result;
}
}
catch(final Exception t){}
}
/* Try to load it from our repository */
final CMFile CF=new CMFile(pathName,null);
final byte[] classData=CF.raw();
if((classData==null)||(classData.length==0))
{
throw new ClassNotFoundException("File "+pathName+" not readable!");
}
if(CF.getName().toUpperCase().endsWith(".JS"))
{
final String name=CF.getName().substring(0,CF.getName().length()-3);
final StringBuffer str=CF.textVersion(classData);
if((str==null)||(str.length()==0))
throw new ClassNotFoundException("JavaScript file "+pathName+" not readable!");
final List<String> V=Resources.getFileLineVector(str);
Class<?> extendsClass=null;
final Vector<Class<?>> implementsClasses=new Vector<Class<?>>();
String overPackage=null;
for(int v=0;v<V.size();v++)
{
if((extendsClass==null)&&V.get(v).trim().toUpperCase().startsWith("//EXTENDS "))
{
final String extendName=V.get(v).trim().substring(10).trim();
try
{
extendsClass=loadClass(extendName);
}
catch(final ClassNotFoundException e)
{
Log.errOut("CMClass","Could not load "+CF.getName()+" from "+className+" because "+extendName+" is an invalid extension.");
throw e;
}
}
if((overPackage==null)&&V.get(v).trim().toUpperCase().startsWith("//PACKAGE "))
overPackage=V.get(v).trim().substring(10).trim();
if(V.get(v).toUpperCase().startsWith("//IMPLEMENTS "))
{
final String extendName=V.get(v).substring(13).trim();
Class<?> C=null;
try
{
C=loadClass(extendName);
}
catch(final ClassNotFoundException e)
{
continue;
}
implementsClasses.addElement(C);
}
}
final Context X=Context.enter();
final JScriptLib jlib=new JScriptLib();
X.initStandardObjects(jlib);
jlib.defineFunctionProperties(JScriptLib.functions, JScriptLib.class, ScriptableObject.DONTENUM);
final CompilerEnvirons ce = new CompilerEnvirons();
ce.initFromContext(X);
final ClassCompiler cc = new ClassCompiler(ce);
if(extendsClass==null)
Log.errOut("CMClass","Warning: "+CF.getVFSPathAndName()+" does not extend any class!");
else
cc.setTargetExtends(extendsClass);
Class<?> mainClass=null;
if(implementsClasses.size()>0)
{
final Class[] CS=new Class[implementsClasses.size()];
for(int i=0;i<implementsClasses.size();i++)
CS[i]=implementsClasses.elementAt(i);
cc.setTargetImplements(CS);
}
final Object[] objs = cc.compileToClassFiles(str.toString(), "script", 1, name);
for (int i=0;i<objs.length;i+=2)
{
final Class<?> C=finishDefineClass((String)objs[i],(byte[])objs[i+1],overPackage,resolveIt);
if(mainClass==null)
mainClass=C;
}
Context.exit();
if((debugging)&&(mainClass!=null))
Log.debugOut("CMClass","Loaded: "+mainClass.getName());
return mainClass;
}
result=finishDefineClass(className,classData,null,resolveIt);
return result;
}
/**
* Causes the map of command words associated with command objects
* to be re-mapped, so that users can use them.
*/
protected static final void reloadCommandWords()
{
c().commandWords.clear();
Command C;
String[] wordList;
for(int c=0;c<c().commands.size();c++)
{
C=c().commands.elementAt(c);
wordList=C.getAccessWords();
if(wordList!=null)
for (final String element : wordList)
c().commandWords.put(element.trim().toUpperCase(),C);
}
}
/**
* Making good use of the class path directories from the INI file, this will load
* all the damn classes in coffeemud, being nice enough to report them to the log
* as it does so
* @param page the coffeemud.ini file
* @return true if success happened, and false otherwise
*/
public static final boolean loadAllCoffeeMudClasses(final CMProps page)
{
CMClass c=c();
if(c==null)
c=new CMClass();
final CMClass baseC=clss[MudHost.MAIN_HOST];
final char tCode=Thread.currentThread().getThreadGroup().getName().charAt(0);
// wait for baseC
while((tCode!=MudHost.MAIN_HOST)&&(!classLoaderSync[0]))
{
try
{
Thread.sleep(500);
}
catch(final Exception e)
{
break;
}
}
try
{
final String prefix="com/planet_ink/coffee_mud/";
debugging=CMSecurity.isDebugging(CMSecurity.DbgFlag.CLASSLOADER);
c.libraries=loadVectorListToObj(prefix+"Libraries/",page.getStr("LIBRARY"),CMObjectType.LIBRARY.ancestorName);
if(c.libraries.size()==0)
return false;
CMLib.registerLibraries(c.libraries.elements());
if(CMLib.unregistered().length()>0)
{
Log.errOut("CMClass","Fatal Error: libraries are unregistered: "+CMLib.unregistered().substring(0,CMLib.unregistered().length()-2));
return false;
}
CMLib.propertiesLoaded(); // cause props loaded on libraries, necc for some stuff
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("COMMON")))
c.common=baseC.common;
else
c.common=loadHashListToObj(prefix+"Common/",page.getStr("COMMON"),CMObjectType.COMMON.ancestorName);
if(c.common.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("WEBMACROS")))
c.webMacros=baseC.webMacros;
else
{
c.webMacros=CMClass.loadHashListToObj(prefix+"WebMacros/", "%DEFAULT%",CMObjectType.WEBMACRO.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"WebMacros loaded : "+c.webMacros.size());
for(final Enumeration e=c.webMacros.keys();e.hasMoreElements();)
{
final String key=(String)e.nextElement();
if(key.length()>longestWebMacro)
longestWebMacro=key.length();
}
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("RACE")))
c.races=baseC.races;
else
{
c.races=loadVectorListToObj(prefix+"Races/",page.getStr("RACES"),CMObjectType.RACE.ancestorName);
//Log.sysOut(Thread.currentThread().getName(),"Races loaded : "+c.races.size());
}
if(c.races.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("CHARCLASS")))
c.charClasses=baseC.charClasses;
else
{
c.charClasses=loadVectorListToObj(prefix+"CharClasses/",page.getStr("CHARCLASSES"),CMObjectType.CHARCLASS.ancestorName);
//Log.sysOut(Thread.currentThread().getName(),"Classes loaded : "+c.charClasses.size());
}
if(c.charClasses.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("MOB")))
c.MOBs=baseC.MOBs;
else
{
c.MOBs=loadVectorListToObj(prefix+"MOBS/",page.getStr("MOBS"),CMObjectType.MOB.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"MOB Types loaded : "+c.MOBs.size());
}
if(c.MOBs.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("EXIT")))
c.exits=baseC.exits;
else
{
c.exits=loadVectorListToObj(prefix+"Exits/",page.getStr("EXITS"),CMObjectType.EXIT.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Exit Types loaded : "+c.exits.size());
}
if(c.exits.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("AREA")))
c.areaTypes=baseC.areaTypes;
else
{
c.areaTypes=loadVectorListToObj(prefix+"Areas/",page.getStr("AREAS"),CMObjectType.AREA.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Area Types loaded : "+c.areaTypes.size());
}
if(c.areaTypes.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("LOCALE")))
c.locales=baseC.locales;
else
{
c.locales=loadVectorListToObj(prefix+"Locales/",page.getStr("LOCALES"),CMObjectType.LOCALE.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Locales loaded : "+c.locales.size());
}
if(c.locales.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("ABILITY")))
c.abilities=baseC.abilities;
else
{
c.abilities=loadVectorListToObj(prefix+"Abilities/",page.getStr("ABILITIES"),CMObjectType.ABILITY.ancestorName);
if(c.abilities.size()==0)
return false;
if((page.getStr("ABILITIES")!=null)
&&(page.getStr("ABILITIES").toUpperCase().indexOf("%DEFAULT%")>=0))
{
Vector<Ability> tempV;
int size=0;
tempV=loadVectorListToObj(prefix+"Abilities/Fighter/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Ranger/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Paladin/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Fighter Skills : "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Druid/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Chants loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Languages/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Languages loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Properties/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Diseases/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Poisons/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Misc/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
Log.sysOut(Thread.currentThread().getName(),"Properties loaded : "+size);
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Prayers/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Prayers loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Archon/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Skills/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Thief/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Common/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Specializations/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size+=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Skills loaded : "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Songs/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Songs loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/Spells/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Spells loaded : "+tempV.size());
c.abilities.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Abilities/SuperPowers/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Heroics loaded : "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Tech/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
size=tempV.size();
c.abilities.addAll(tempV);
if(size>0)
Log.sysOut(Thread.currentThread().getName(),"Tech Skills loaded: "+size);
tempV=loadVectorListToObj(prefix+"Abilities/Traps/","%DEFAULT%",CMObjectType.ABILITY.ancestorName);
if(tempV.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Traps loaded : "+tempV.size());
c.abilities.addAll(tempV);
c.abilities.sort();
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: reading generic abilities");
final List<DatabaseEngine.AckRecord> genAbilities=CMLib.database().DBReadAbilities();
if(genAbilities.size()>0)
{
int loaded=0;
for(final DatabaseEngine.AckRecord rec : genAbilities)
{
String type=rec.typeClass();
if((type==null)||(type.trim().length()==0))
type="GenAbility";
final Ability A=(Ability)(CMClass.getAbility(type).copyOf());
A.setStat("ALLXML",rec.data());
if((!A.ID().equals("GenAbility"))&&(!A.ID().equals(type)))
{
c.abilities.addElement(A);
loaded++;
}
}
if(loaded>0)
{
Log.sysOut(Thread.currentThread().getName(),"GenAbles loaded : "+loaded);
c.abilities.sort();
}
}
}
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("ITEM")))
c.items=baseC.items;
else
{
c.items=loadVectorListToObj(prefix+"Items/Basic/",page.getStr("ITEMS"),CMObjectType.ITEM.ancestorName);
if(c.items.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Basic Items loaded: "+c.items.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("WEAPON")))
c.weapons=baseC.weapons;
else
{
c.weapons=loadVectorListToObj(prefix+"Items/Weapons/",page.getStr("WEAPONS"),CMObjectType.WEAPON.ancestorName);
if(c.weapons.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Weapons loaded : "+c.weapons.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("ARMOR")))
c.armor=baseC.armor;
else
{
c.armor=loadVectorListToObj(prefix+"Items/Armor/",page.getStr("ARMOR"),CMObjectType.ARMOR.ancestorName);
if(c.armor.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Armor loaded : "+c.armor.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("MISCMAGIC")))
c.miscMagic=baseC.miscMagic;
else
{
c.miscMagic=loadVectorListToObj(prefix+"Items/MiscMagic/",page.getStr("MISCMAGIC"),CMObjectType.MISCMAGIC.ancestorName);
if(c.miscMagic.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Magic Items loaded: "+c.miscMagic.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("CLANITEMS")))
c.clanItems=baseC.clanItems;
else
{
c.clanItems=loadVectorListToObj(prefix+"Items/ClanItems/",page.getStr("CLANITEMS"),CMObjectType.CLANITEM.ancestorName);
if(c.clanItems.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Clan Items loaded : "+c.clanItems.size());
}
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("TECH")))
c.tech=baseC.tech;
else
{
Vector<Electronics> tempV;
c.tech=loadVectorListToObj(prefix+"Items/BasicTech/",page.getStr("TECH"),CMObjectType.TECH.ancestorName);
tempV=loadVectorListToObj(prefix+"Items/ShipTech/",page.getStr("SHIPTECH"),CMObjectType.SHIPTECH.ancestorName);
if(tempV.size()>0)
c.tech.addAll(tempV);
tempV=loadVectorListToObj(prefix+"Items/Software/",page.getStr("SOFTWARE"),CMObjectType.SOFTWARE.ancestorName);
if(tempV.size()>0)
c.tech.addAll(tempV);
if(c.tech.size()>0)
Log.sysOut(Thread.currentThread().getName(),"Electronics loaded: "+c.tech.size());
c.tech.sort();
}
if((c.items.size()+c.weapons.size()+c.armor.size()+c.tech.size()+c.miscMagic.size()+c.clanItems.size())==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("BEHAVIOR")))
c.behaviors=baseC.behaviors;
else
{
c.behaviors=loadVectorListToObj(prefix+"Behaviors/",page.getStr("BEHAVIORS"),CMObjectType.BEHAVIOR.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Behaviors loaded : "+c.behaviors.size());
}
if(c.behaviors.size()==0)
return false;
if((tCode!=MudHost.MAIN_HOST)&&(!CMProps.isPrivateToMe("COMMAND")))
{
c.commands=baseC.commands;
c.commandWords=baseC.commandWords;
}
else
{
c.commands=loadVectorListToObj(prefix+"Commands/",page.getStr("COMMANDS"),CMObjectType.COMMAND.ancestorName);
Log.sysOut(Thread.currentThread().getName(),"Commands loaded : "+c.commands.size());
}
if(c.commands.size()==0)
return false;
}
catch(final Exception t)
{
t.printStackTrace();
return false;
}
reloadCommandWords();
// misc startup stuff
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("CHARCLASS")))
for(int i=0;i<c.charClasses.size();i++)
{
final CharClass C=c.charClasses.elementAt(i);
C.copyOf();
}
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("RACE")))
{
int numRaces=c.races.size();
for(int r=0;r<c.races.size();r++)
{
final Race R=c.races.elementAt(r);
R.copyOf();
}
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: reading genRaces");
final List<DatabaseEngine.AckRecord> genRaces=CMLib.database().DBReadRaces();
if(genRaces.size()>0)
{
for(int r=0;r<genRaces.size();r++)
{
final Race GR=(Race)getRace("GenRace").copyOf();
GR.setRacialParms(genRaces.get(r).data());
if(!GR.ID().equals("GenRace"))
{
addRace(GR);
numRaces++;
}
}
}
Log.sysOut(Thread.currentThread().getName(),"Races loaded : "+numRaces);
}
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("CHARCLASS")))
{
int numCharClasses=c.charClasses.size();
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: reading genClasses");
final List<DatabaseEngine.AckRecord> genClasses=CMLib.database().DBReadClasses();
if(genClasses.size()>0)
{
for(int r=0;r<genClasses.size();r++)
{
final CharClass CR=(CharClass)(CMClass.getCharClass("GenCharClass").copyOf());
CR.setClassParms(genClasses.get(r).data());
if(!CR.ID().equals("GenCharClass"))
{
addCharClass(CR);
numCharClasses++;
}
}
}
Log.sysOut(Thread.currentThread().getName(),"Classes loaded : "+numCharClasses);
}
CMProps.setUpLowVar(CMProps.Str.MUDSTATUS,"Booting: initializing classes");
c.intializeClasses();
if((tCode==MudHost.MAIN_HOST)||(CMProps.isPrivateToMe("EXPERTISES")))
{
CMLib.expertises().recompileExpertises();
Log.sysOut(Thread.currentThread().getName(),"Expertises defined: "+CMLib.expertises().numExpertises());
}
if(tCode==MudHost.MAIN_HOST)
classLoaderSync[0]=true;
CMClass.lastUpdateTime=System.currentTimeMillis();
return true;
}
/**
* Returns a timestamp of the last time there was a change in the full set of classes.
* @return the last time there was a change
*/
public static long getLastClassUpdatedTime(){ return lastUpdateTime; }
/**
* The helper class for full blown JavaScript objects.
* @author Bo Zimmerman
*
*/
protected static final class JScriptLib extends ScriptableObject
{
@Override public String getClassName(){ return "JScriptLib";}
static final long serialVersionUID=47;
public static String[] functions = {"toJavaString"};
public String toJavaString(Object O){return Context.toString(O);}
}
/**
* CMMsg objects are normally re-used, and this method is the recycle bin.
* If the msg were to have been garbage collected, it would get returned here
* anyway, but this is the nice way to get it done.
* @param msg the CMMsg we are done using
* @return true if it was returned to the bin, and false if it was allowed to die
*/
public static final boolean returnMsg(final CMMsg msg)
{
if(MSGS_CACHE.size()<MAX_MSGS)
{
synchronized(CMClass.MSGS_CACHE)
{
if(MSGS_CACHE.size()<MAX_MSGS)
{
MSGS_CACHE.addLast(msg);
return true;
}
}
}
return false;
}
/**
* Returns either a CMMsg object from the cache, if one is available, or makes
* a new one.
* @return a CMMsg object, ready to use.
*/
public final static CMMsg getMsg()
{
try
{
synchronized(MSGS_CACHE)
{
return MSGS_CACHE.removeFirst();
}
}
catch(final Exception e)
{
return (CMMsg)getCommon("DefaultMessage");
}
}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final int newAllCode, final String allMessage)
{ final CMMsg M=getMsg(); M.modify(source,newAllCode,allMessage); return M;}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#value()
* @param source the agent source of the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @param newValue the value to set on the message
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final int newAllCode, final String allMessage, final int newValue)
{ final CMMsg M=getMsg(); M.modify(source,newAllCode,allMessage,newValue); return M;}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final int newAllCode, final String allMessage)
{ final CMMsg M=getMsg(); M.modify(source,target,newAllCode,allMessage); return M;}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newAllCode the source, target, and others code to use
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newAllCode, final String allMessage)
{ final CMMsg M=getMsg(); M.modify(source,target,tool,newAllCode,allMessage); return M;}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newSourceCode the source code for this action
* @param newTargetCode the target code for this action
* @param newOthersCode the others/observed code for this action
* @param allMessage the source, target, and others string msg to send
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newSourceCode, final int newTargetCode,
final int newOthersCode, final String allMessage)
{ final CMMsg M=getMsg(); M.modify(source,target,tool,newSourceCode,newTargetCode,newOthersCode,allMessage); return M;}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newAllCode the source, target, and others code to use
* @param sourceMessage the action/message as seen by the source
* @param targetMessage the action/message as seen by the target
* @param othersMessage the action/message as seen by everyone else
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newAllCode, final String sourceMessage,
final String targetMessage, final String othersMessage)
{ final CMMsg M=getMsg(); M.modify(source,target,tool,newAllCode,sourceMessage,newAllCode,targetMessage,newAllCode,othersMessage); return M;}
/**
* Creates and configures a CMMsg object for use in the game
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#source()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#target()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#tool()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersCode()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#sourceMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#targetMessage()
* @see com.planet_ink.coffee_mud.Common.interfaces.CMMsg#othersMessage()
* @param source the agent source of the action
* @param target the target of the action
* @param tool the tool used by the source to do the action
* @param newSourceCode the source code for this action
* @param sourceMessage the action/message as seen by the source
* @param newTargetCode the target code for this action
* @param targetMessage the action/message as seen by the target
* @param newOthersCode the others/observed code for this action
* @param othersMessage the action/message as seen by everyone else
* @return the CMMsg Object
*/
public static final CMMsg getMsg(final MOB source, final Environmental target, final Environmental tool, final int newSourceCode, final String sourceMessage,
final int newTargetCode, final String targetMessage, final int newOthersCode, final String othersMessage)
{ final CMMsg M=getMsg(); M.modify(source,target,tool,newSourceCode,sourceMessage,newTargetCode,targetMessage,newOthersCode,othersMessage); return M;}
/**
* Factory mob objects are normally re-used, and this method is the recycle bin.
* If the mob were to have been garbage collected, it would get returned here
* anyway, but this is the nice way to get it done.
* @param mob the mob we are done using
* @return true if it was returned to the bin, and false if it was allowed to die
*/
public static final boolean returnMob(final MOB mob)
{
if(MOB_CACHE.size()<MAX_MOBS)
{
synchronized(CMClass.MOB_CACHE)
{
MOB_CACHE.addLast(mob);
return true;
}
}
return false;
}
/**
* Returns either a StdFactoryMOB object from the cache, if one is available, or makes
* a new one.
* @return a StdFactoryMOB object, ready to use.
*/
public final static MOB getFactoryMOB()
{
try
{
synchronized(MOB_CACHE)
{
return MOB_CACHE.removeFirst();
}
}
catch(final Exception e)
{
return getMOB("StdFactoryMOB");
}
}
/**
* Returns either a StdFactoryMOB object from the cache, if one is available, or makes
* a new one, giving him the name, level, and room location given.
* @param name the name to give the mob
* @param level the level to give the mob
* @param room the room to set the mobs location at
* @return a StdFactoryMOB object, ready to use.
*/
public final static MOB getFactoryMOB(final String name, final int level, final Room room)
{
final MOB mob2=CMClass.getFactoryMOB();
mob2.setName(name);
mob2.basePhyStats().setLevel(level);
mob2.phyStats().setLevel(level);
mob2.setLocation(room);
return mob2;
}
/**
* Unloads all the classes in this system.
* Why, I do not know.
*/
public static final void shutdown()
{
for (final CMClass cls : clss)
if(cls!=null)
cls.unload();
classLoaderSync[0]=false;
}
/**
* Clears all the class sets in this loader.
* I don't know why.
*/
public final void unload()
{
common.clear();
races.clear();
charClasses.clear();
MOBs.clear();
abilities.clear();
locales.clear();
exits.clear();
items.clear();
behaviors.clear();
weapons.clear();
armor.clear();
miscMagic.clear();
tech.clear();
areaTypes.clear();
clanItems.clear();
commands.clear();
webMacros.clear();
commandWords.clear();
}
}
|
git-svn-id: svn://192.168.1.10/public/CoffeeMud@13409 0d6f1817-ed0e-0410-87c9-987e46238f29
|
com/planet_ink/coffee_mud/core/CMClass.java
| ||
Java
|
apache-2.0
|
0824e1a6946e5726de8f13fb4f7904f6fecd0104
| 0
|
sathishk/zols,lokinell/zols,sathishk/zols,sathishk/zols,lokinell/zols,lokinell/zols,lokinell/zols
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.zols.templates;
import java.io.File;
import java.util.List;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.thymeleaf.ThymeleafProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.thymeleaf.spring4.SpringTemplateEngine;
import org.thymeleaf.spring4.resourceresolver.SpringResourceResourceResolver;
import org.thymeleaf.templateresolver.FileTemplateResolver;
import org.thymeleaf.templateresolver.TemplateResolver;
import org.thymeleaf.templateresolver.UrlTemplateResolver;
import org.zols.templates.domain.TemplateRepository;
import org.zols.templates.service.TemplateRepositoryService;
@Configuration
@EnableConfigurationProperties(ThymeleafProperties.class)
public class TemplateConfiguration {
@Autowired
private ThymeleafProperties properties;
@Autowired
private TemplateRepositoryService templateRepositoryService;
@Autowired
private SpringTemplateEngine templateEngine;
@PostConstruct
public void intializeTemplates() {
TemplateResolver resolver;
File file;
try {
List<TemplateRepository> templateRepositories = templateRepositoryService.list();
if (templateRepositories != null) {
for (TemplateRepository templateRepository : templateRepositories) {
switch (templateRepository.getType()) {
case "file":
resolver = new FileTemplateResolver();
file = new File(templateRepository.getPath());
resolver.setPrefix(file.getAbsolutePath() + File.separator);
intializeResolver(resolver);
templateEngine.addTemplateResolver(resolver);
break;
case "url":
resolver = new UrlTemplateResolver();
resolver.setPrefix(templateRepository.getPath() + "/");
intializeResolver(resolver);
templateEngine.addTemplateResolver(resolver);
break;
}
}
}
} catch (Exception e) {
}
addZolsTemplates();
}
private void addZolsTemplates() {
TemplateResolver resolver = new TemplateResolver();
resolver.setResourceResolver(thymeleafResourceResolver());
resolver.setPrefix("classpath:/zolstemplates/");
intializeResolver(resolver);
templateEngine.addTemplateResolver(resolver);
}
@Bean
public SpringResourceResourceResolver thymeleafResourceResolver() {
return new SpringResourceResourceResolver();
}
private void intializeResolver(TemplateResolver resolver) {
// resolver.setPrefix(this.properties.getPrefix());
resolver.setSuffix(this.properties.getSuffix());
resolver.setTemplateMode(this.properties.getMode());
resolver.setCharacterEncoding(this.properties.getEncoding());
resolver.setCacheable(this.properties.isCache());
}
}
|
zols-cms-plugin/src/main/java/org/zols/templates/TemplateConfiguration.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.zols.templates;
import java.io.File;
import java.util.List;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.thymeleaf.ThymeleafProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.thymeleaf.spring4.SpringTemplateEngine;
import org.thymeleaf.spring4.resourceresolver.SpringResourceResourceResolver;
import org.thymeleaf.templateresolver.FileTemplateResolver;
import org.thymeleaf.templateresolver.TemplateResolver;
import org.thymeleaf.templateresolver.UrlTemplateResolver;
import org.zols.templates.domain.TemplateRepository;
import org.zols.templates.service.TemplateRepositoryService;
@Configuration
@EnableConfigurationProperties(ThymeleafProperties.class)
public class TemplateConfiguration {
@Autowired
private ThymeleafProperties properties;
@Autowired
private TemplateRepositoryService templateRepositoryService;
@Autowired
private SpringTemplateEngine templateEngine;
@PostConstruct
public void intializeTemplates() {
addZolsTemplates();
TemplateResolver resolver;
File file;
try {
List<TemplateRepository> templateRepositories = templateRepositoryService.list();
if (templateRepositories != null) {
for (TemplateRepository templateRepository : templateRepositories) {
switch (templateRepository.getType()) {
case "file":
resolver = new FileTemplateResolver();
file = new File(templateRepository.getPath());
resolver.setPrefix(file.getAbsolutePath() + File.separator);
intializeResolver(resolver);
templateEngine.addTemplateResolver(resolver);
break;
case "url":
resolver = new UrlTemplateResolver();
resolver.setPrefix(templateRepository.getPath() + "/");
intializeResolver(resolver);
templateEngine.addTemplateResolver(resolver);
break;
}
}
}
} catch (Exception e) {
}
}
private void addZolsTemplates() {
TemplateResolver resolver = new TemplateResolver();
resolver.setResourceResolver(thymeleafResourceResolver());
resolver.setPrefix("classpath:/zolstemplates/");
intializeResolver(resolver);
templateEngine.addTemplateResolver(resolver);
}
@Bean
public SpringResourceResourceResolver thymeleafResourceResolver() {
return new SpringResourceResourceResolver();
}
private void intializeResolver(TemplateResolver resolver) {
// resolver.setPrefix(this.properties.getPrefix());
resolver.setSuffix(this.properties.getSuffix());
resolver.setTemplateMode(this.properties.getMode());
resolver.setCharacterEncoding(this.properties.getEncoding());
resolver.setCacheable(this.properties.isCache());
}
}
|
[Template] Overriding index.html #85
|
zols-cms-plugin/src/main/java/org/zols/templates/TemplateConfiguration.java
|
[Template] Overriding index.html #85
|
|
Java
|
apache-2.0
|
29b628b56e14837c7090ff4b86bea3367f6582cc
| 0
|
jcnoir/dmix,joansmith/dmix,0359xiaodong/dmix,hurzl/dmix,joansmith/dmix,abarisain/dmix,hurzl/dmix,0359xiaodong/dmix,abarisain/dmix,jcnoir/dmix
|
/*
* Copyright (C) 2010-2014 The MPDroid Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.namelessdev.mpdroid;
import com.namelessdev.mpdroid.helpers.MPDAsyncHelper.ConnectionListener;
import org.a0z.mpd.MPD;
import org.a0z.mpd.MPDStatus;
import org.a0z.mpd.event.StatusChangeListener;
import org.a0z.mpd.exception.MPDServerException;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.AudioManager.OnAudioFocusChangeListener;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.StrictMode;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.widget.Toast;
import java.io.IOException;
/**
* StreamingService hooks Android's audio framework to MPD's streaming server to
* allow local audio playback.
*
* @author Arnaud Barisain Monrose (Dream_Team)
* @version $Id: $
*/
public class StreamingService extends Service implements
/**
* OnInfoListener is not used because it is broken (never gets called, ever)..
* OnBufferingUpdateListener is not used because it depends on a stream completion time.
*/
ConnectionListener,
OnAudioFocusChangeListener,
OnCompletionListener,
OnErrorListener,
OnPreparedListener,
StatusChangeListener {
public static final String ACTION_DIE = "com.namelessdev.mpdroid.DIE";
public static final String ACTION_START = "com.namelessdev.mpdroid.START_STREAMING";
public static final String ACTION_STOP = "com.namelessdev.mpdroid.STOP_STREAMING";
public static final String ACTION_RESET = "com.namelessdev.mpdroid.RESET_STREAMING";
public static final String CMD_REMOTE = "com.namelessdev.mpdroid.REMOTE_COMMAND";
public static final String CMD_COMMAND = "COMMAND";
public static final String CMD_PAUSE = "PAUSE";
public static final String CMD_STOP = "STOP";
public static final String CMD_PLAY = "PLAY";
public static final String CMD_PLAYPAUSE = "PLAYPAUSE";
public static final String CMD_PREV = "PREV";
public static final String CMD_NEXT = "NEXT";
public static final String CMD_DIE = "DIE"; // Just in case
static final String TAG = "MPDroidStreamingService";
/**
* How long to wait before queuing the message into the current handler
* queue.
*/
private static final int IDLE_DELAY = 60000;
private MPDApplication app;
private MediaPlayer mediaPlayer;
private AudioManager audioManager;
/** This field will contain the URL of the MPD server streaming source */
private String streamSource;
private String prevMpdState;
/** Is MPD playing? */
private boolean isPlaying;
/** Keep track when mediaPlayer is preparing a stream */
private boolean preparingStreaming = false;
/**
* isPaused is required (along with isPlaying) so the service doesn't start
* when it's not wanted.
*/
private boolean isPaused;
/** Set up the message handler. */
private Handler delayedStopHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (isPlaying || isPaused) {
return;
}
die();
}
};
/**
* Setup for the method which allows MPDroid to override behavior during
* phone events.
*/
private PhoneStateListener phoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String incomingNumber) {
if (!app.getApplicationState().streamingMode) {
stopSelf();
return;
}
if (state == TelephonyManager.CALL_STATE_RINGING) {
AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int ringvolume = audioManager.getStreamVolume(AudioManager.STREAM_RING);
if (ringvolume > 0 && isPlaying) {
isPaused = true;
stopStreaming();
}
} else if (state == TelephonyManager.CALL_STATE_OFFHOOK) {
// pause the music while a conversation is in progress
if (!isPlaying) {
return;
}
isPaused = (isPaused || isPlaying) && (app.getApplicationState().streamingMode);
stopStreaming();
} else if (state == TelephonyManager.CALL_STATE_IDLE) {
// Resume playback only if music was playing when the call was
// answered
if (isPaused) {
// resume play back only if music was playing
// when the call was answered
beginStreaming();
}
}
}
};
/**
* Set up a handler for an Android MediaPlayer bug, for more
* information, see the target in beginStreaming().
*/
private Handler delayedPlayHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
mediaPlayer.prepareAsync();
}
};
/**
* Field containing the ID used to stopSelfResult() which will stop the
* streaming service.
*/
private Integer lastStartID;
private String getState() {
Log.d(TAG, "getState()");
String state = null;
try {
state = app.oMPDAsyncHelper.oMPD.getStatus().getState();
} catch (MPDServerException e) {
Log.w(TAG, "Failed to get the current MPD state.", e);
}
return state;
}
/**
* If streaming mode is activated this will setup the Android mediaPlayer
* framework, register the media button events, register the remote control
* client then setup and the framework streaming.
*/
private void beginStreaming() {
Log.d(TAG, "StreamingService.beginStreaming()");
// just to be sure, we do not want to start when we're not supposed to
if (mediaPlayer == null || preparingStreaming || mediaPlayer.isPlaying() ||
!app.getApplicationState().streamingMode) {
Log.d(TAG, "beginStreaming() called while preparation already in progress.");
return;
}
sendIntent(NotificationService.STREAM_BUFFERING_BEGIN, NotificationService.class);
preparingStreaming = true;
mediaPlayer.reset();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
try {
mediaPlayer.setDataSource(streamSource);
} catch (IOException e) {
/**
* TODO: Notify the user
*/
endBuffering();
isPlaying = false;
} catch (IllegalStateException e) {
// wtf what state ?
isPlaying = false;
preparingStreaming = false;
}
/**
* With MediaPlayer, there is a racy bug which affects, minimally, Android KitKat and lower.
* If mediaPlayer.prepareAsync() is called too soon after mediaPlayer.setDataSource(), and
* after the initial mediaPlayer.play(), general and non-specific errors are usually emitted
* for the first few 100 milliseconds.
*
* Sometimes, these errors result in nagging Log errors, sometimes these errors result in
* unrecoverable errors. This handler sets up a 1.5 second delay between
* mediaPlayer.setDataSource() and mediaPlayer.AsyncPrepare() whether first play after
* service start or not.
*
* The magic number here can be adjusted if there are any more problems. I have witnessed
* these errors occur at 750ms, but never higher. It's worth doubling, even in optimal
* conditions, stream buffering is pretty slow anyhow. Adjust if necessary.
*/
Message msg = delayedPlayHandler.obtainMessage();
delayedPlayHandler.sendMessageDelayed(msg, 1500);
}
@Override
public void connectionFailed(String message) {
}
@Override
public void connectionStateChanged(boolean connected, boolean connectionLost) {
}
@Override
public void connectionSucceeded(String message) {
}
/**
* This turns streaming mode off and stops the StreamingService.
*/
private void die() {
Log.d(TAG, "StreamingService.die()");
onDestroy();
stopSelfResult(lastStartID);
}
/** A method to send a quick message to another class. */
private void sendIntent(String msg, Class dest) {
Log.d(TAG, "Sending intent " + msg + " to " + dest + ".");
Intent i = new Intent(this, dest);
i.setAction(msg);
this.startService(i);
}
/**
* Send a message to the NotificationService to let it know to end the buffering banner.
*/
private void endBuffering() {
Log.d(TAG, "StreamingService.endBuffering()");
sendIntent(NotificationService.STREAM_BUFFERING_END, NotificationService.class);
}
/**
* A JMPDComm callback to be invoked during library state changes.
*
* @param updating true when updating, false when not updating.
*/
@Override
public void libraryStateChanged(boolean updating) {
}
/**
* This sends the next command to MPD, stops and resumes streaming.
*/
private void next() {
Log.d(TAG, "StreamingService.next()");
MPD mpd = app.oMPDAsyncHelper.oMPD;
try {
mpd.next();
} catch (MPDServerException e) {
/** Do nothing. */
}
}
/**
* Handle the change of volume if a notification, or any other kind of
* interrupting audio event.
*/
@Override
public void onAudioFocusChange(int focusChange) {
Log.d(TAG, "StreamingService.onAudioFocusChange()");
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) {
mediaPlayer.setVolume(0.2f, 0.2f);
} else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) {
mediaPlayer.setVolume(1f, 1f);
} else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) {
stopStreaming();
}
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
/**
* A MediaPlayer callback to be invoked when playback of a media source has completed.
*
* @param mp the MediaPlayer that reached the end of the file
*/
@Override
public void onCompletion(MediaPlayer mp) {
Log.d(TAG, "StreamingService.onCompletion()");
Message msg = delayedStopHandler.obtainMessage();
delayedStopHandler.sendMessageDelayed(msg, IDLE_DELAY);
// Somethings happening, like crappy network or MPD just stopped..
die();
}
public void onCreate() {
Log.d(TAG, "StreamingService.onCreate()");
super.onCreate();
app = (MPDApplication) getApplication();
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
/** If streaming mode is not enabled, return */
if (app == null || !app.getApplicationState().streamingMode) {
stopSelf();
return;
}
mediaPlayer = new MediaPlayer();
audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
prevMpdState = "";
lastStartID = 0;
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setOnErrorListener(this);
if (audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC,
AudioManager.AUDIOFOCUS_GAIN) == AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
Toast.makeText(this, R.string.audioFocusFailed, Toast.LENGTH_LONG).show();
stopStreaming();
}
TelephonyManager tmgr = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
tmgr.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
app.oMPDAsyncHelper.addStatusChangeListener(this);
app.oMPDAsyncHelper.addConnectionListener(this);
app.setActivity(this);
streamSource = "http://"
+ app.oMPDAsyncHelper.getConnectionSettings().getConnectionStreamingServer() + ":"
+ app.oMPDAsyncHelper.getConnectionSettings().iPortStreaming + "/"
+ app.oMPDAsyncHelper.getConnectionSettings().sSuffixStreaming;
/** Seed the prevMpdState, onStatusUpdate() will keep it up-to-date afterwards. */
prevMpdState = getState();
isPlaying = MPDStatus.MPD_STATE_PLAYING.equals(prevMpdState);
}
@Override
public void onDestroy() {
Log.d(TAG, "StreamingSerice.onDestroy()");
delayedStopHandler.removeCallbacksAndMessages(null);
/** Remove the current MPD listeners */
app.oMPDAsyncHelper.removeStatusChangeListener(this);
app.oMPDAsyncHelper.removeConnectionListener(this);
if (audioManager != null) {
audioManager.abandonAudioFocus(this);
}
if (mediaPlayer != null) {
if (mediaPlayer.isPlaying()) {
stopStreaming();
}
mediaPlayer.reset();
mediaPlayer.release();
mediaPlayer = null;
}
app.unsetActivity(this);
app.getApplicationState().streamingMode = false;
super.onDestroy();
}
/**
* A MediaPlayer callback to be invoked when there has been an error during an asynchronous
* operation (other errors will throw exceptions at method call time).
*
* @param mp the MediaPlayer the error pertains to.
* @param what the type of error that has occurred.
* @param extra an extra code, specific to the error. Typically implementation dependent.
* @return True if the method handled the error, false if it didn't. Returning false, or not
* having an OnErrorListener at all, will cause the OnCompletionListener to be called.
*/
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.d(TAG, "StreamingService.onError()");
stopStreaming();
beginStreaming();
return true;
}
/**
* A MediaPlayer callback to be invoked when the media source is ready for playback.
*/
@Override
public void onPrepared(MediaPlayer mp) {
Log.d(TAG, "StreamingService.onPrepared()");
sendIntent(NotificationService.STREAM_BUFFERING_END, NotificationService.class);
prevMpdState = "";
mediaPlayer.start();
preparingStreaming = false;
}
/**
* Called by the system every time a client explicitly starts the service
* by calling startService(Intent).
*
* @param intent The Intent supplied to startService(Intent), as given. This may be null if
* the
* service is being restarted after its process has gone away, and it had
* previously returned anything except START_STICKY_COMPATIBILITY.
* @param flags Additional data about this start request. Currently either 0,
* START_FLAG_REDELIVERY, or START_FLAG_RETRY.
* @param startId A unique integer representing this specific request to start. Use with
* stopSelfResult(int).
* @return The return value indicates what semantics the system should use for the service's
* current started state.
*/
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "StreamingService.onStartCommand()");
lastStartID = startId;
if (!app.getApplicationState().streamingMode) {
stopSelfResult(lastStartID);
return 0;
}
switch (intent.getAction()) {
case ACTION_DIE:
die();
break;
case ACTION_RESET:
stopStreaming();
beginStreaming();
break;
case ACTION_START:
beginStreaming();
break;
case ACTION_STOP:
stopStreaming();
break;
case CMD_REMOTE:
String cmd = intent.getStringExtra(CMD_COMMAND);
switch (cmd) {
case CMD_NEXT:
next();
break;
case CMD_PREV:
prev();
break;
case CMD_PLAYPAUSE:
if (isPaused) {
beginStreaming();
} else {
stopStreaming();
}
break;
case CMD_PAUSE:
stopStreaming();
break;
}
}
/**
* We want this service to continue running until it is explicitly
* stopped, so return sticky.
*/
return START_STICKY;
}
@Override
public void playlistChanged(MPDStatus mpdStatus, int oldPlaylistVersion) {
}
/**
* This sends the previous command to MPD, stops and resumes streaming.
*/
private void prev() {
Log.d(TAG, "StreamingService.prev()");
MPD mpd = app.oMPDAsyncHelper.oMPD;
try {
mpd.previous();
} catch (MPDServerException e) {
}
}
@Override
public void randomChanged(boolean random) {
}
@Override
public void repeatChanged(boolean repeating) {
}
@Override
public void stateChanged(MPDStatus mpdStatus, String oldState) {
Log.d(TAG, "StreamingService.stateChanged()");
Message msg = delayedStopHandler.obtainMessage();
delayedStopHandler.sendMessageDelayed(msg, IDLE_DELAY);
final String state = mpdStatus.getState();
if (state == null || state.equals(prevMpdState)) {
return;
}
isPlaying = MPDStatus.MPD_STATE_PLAYING.equals(state);
prevMpdState = state;
if (isPlaying) {
beginStreaming();
} else {
stopStreaming();
}
}
private void stopStreaming() {
Log.d(TAG, "StreamingService.stopStreaming()");
prevMpdState = "";
if (mediaPlayer == null) {
return;
}
mediaPlayer.stop();
/** Send a message to the NotificationService that streaming is ending */
sendIntent(NotificationService.ACTION_STREAMING_END, NotificationService.class);
}
@Override
public void trackChanged(MPDStatus mpdStatus, int oldTrack) {
Log.d(TAG, "StreamingService.trackChanged()");
prevMpdState = "";
}
@Override
public void volumeChanged(MPDStatus mpdStatus, int oldVolume) {
}
}
|
MPDroid/src/com/namelessdev/mpdroid/StreamingService.java
|
/*
* Copyright (C) 2010-2014 The MPDroid Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.namelessdev.mpdroid;
import com.namelessdev.mpdroid.helpers.MPDAsyncHelper.ConnectionListener;
import org.a0z.mpd.MPD;
import org.a0z.mpd.MPDStatus;
import org.a0z.mpd.event.StatusChangeListener;
import org.a0z.mpd.exception.MPDServerException;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.AudioManager.OnAudioFocusChangeListener;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.StrictMode;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.widget.Toast;
import java.io.IOException;
/**
* StreamingService hooks Android's audio framework to MPD's streaming server to
* allow local audio playback.
*
* @author Arnaud Barisain Monrose (Dream_Team)
* @version $Id: $
*/
public class StreamingService extends Service implements
/**
* OnInfoListener is not used because it is broken (never gets called, ever)..
* OnBufferingUpdateListener is not used because it depends on a stream completion time.
*/
ConnectionListener,
OnAudioFocusChangeListener,
OnCompletionListener,
OnErrorListener,
OnPreparedListener,
StatusChangeListener {
public static final String ACTION_DIE = "com.namelessdev.mpdroid.DIE";
public static final String ACTION_START = "com.namelessdev.mpdroid.START_STREAMING";
public static final String ACTION_STOP = "com.namelessdev.mpdroid.STOP_STREAMING";
public static final String ACTION_RESET = "com.namelessdev.mpdroid.RESET_STREAMING";
public static final String CMD_REMOTE = "com.namelessdev.mpdroid.REMOTE_COMMAND";
public static final String CMD_COMMAND = "COMMAND";
public static final String CMD_PAUSE = "PAUSE";
public static final String CMD_STOP = "STOP";
public static final String CMD_PLAY = "PLAY";
public static final String CMD_PLAYPAUSE = "PLAYPAUSE";
public static final String CMD_PREV = "PREV";
public static final String CMD_NEXT = "NEXT";
public static final String CMD_DIE = "DIE"; // Just in case
static final String TAG = "MPDroidStreamingService";
/**
* How long to wait before queuing the message into the current handler
* queue.
*/
private static final int IDLE_DELAY = 60000;
private MPDApplication app;
private MediaPlayer mediaPlayer;
private AudioManager audioManager;
/** This field will contain the URL of the MPD server streaming source */
private String streamSource;
private String prevMpdState;
/** Is MPD playing? */
private boolean isPlaying;
/** Keep track when mediaPlayer is preparing a stream */
private boolean preparingStreaming = false;
/**
* isPaused is required (along with isPlaying) so the service doesn't start
* when it's not wanted.
*/
private boolean isPaused;
/** Set up the message handler. */
private Handler delayedStopHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (isPlaying || isPaused) {
return;
}
die();
}
};
/**
* Setup for the method which allows MPDroid to override behavior during
* phone events.
*/
private PhoneStateListener phoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String incomingNumber) {
if (!app.getApplicationState().streamingMode) {
stopSelf();
return;
}
if (state == TelephonyManager.CALL_STATE_RINGING) {
AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int ringvolume = audioManager.getStreamVolume(AudioManager.STREAM_RING);
if (ringvolume > 0 && isPlaying) {
isPaused = true;
stopStreaming();
}
} else if (state == TelephonyManager.CALL_STATE_OFFHOOK) {
// pause the music while a conversation is in progress
if (!isPlaying) {
return;
}
isPaused = (isPaused || isPlaying) && (app.getApplicationState().streamingMode);
stopStreaming();
} else if (state == TelephonyManager.CALL_STATE_IDLE) {
// Resume playback only if music was playing when the call was
// answered
if (isPaused) {
// resume play back only if music was playing
// when the call was answered
beginStreaming();
}
}
}
};
/**
* Set up a handler for an Android MediaPlayer bug, for more
* information, see the target in beginStreaming().
*/
private Handler delayedPlayHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
mediaPlayer.prepareAsync();
}
};
/**
* Field containing the ID used to stopSelfResult() which will stop the
* streaming service.
*/
private Integer lastStartID;
/**
* If streaming mode is activated this will setup the Android mediaPlayer
* framework, register the media button events, register the remote control
* client then setup and the framework streaming.
*/
private void beginStreaming() {
Log.d(TAG, "StreamingService.beginStreaming()");
// just to be sure, we do not want to start when we're not supposed to
if (mediaPlayer == null || preparingStreaming || mediaPlayer.isPlaying() ||
!app.getApplicationState().streamingMode) {
Log.d(TAG, "beginStreaming() called while preparation already in progress.");
return;
}
sendIntent(NotificationService.STREAM_BUFFERING_BEGIN, NotificationService.class);
preparingStreaming = true;
isPlaying = true;
isPaused = false;
mediaPlayer.reset();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
try {
mediaPlayer.setDataSource(streamSource);
} catch (IOException e) {
/**
* TODO: Notify the user
*/
endBuffering();
isPlaying = false;
} catch (IllegalStateException e) {
// wtf what state ?
isPlaying = false;
preparingStreaming = false;
}
/**
* With MediaPlayer, there is a racy bug which affects, minimally, Android KitKat and lower.
* If mediaPlayer.prepareAsync() is called too soon after mediaPlayer.setDataSource(), and
* after the initial mediaPlayer.play(), general and non-specific errors are usually emitted
* for the first few 100 milliseconds.
*
* Sometimes, these errors result in nagging Log errors, sometimes these errors result in
* unrecoverable errors. This handler sets up a 1.5 second delay between
* mediaPlayer.setDataSource() and mediaPlayer.AsyncPrepare() whether first play after
* service start or not.
*
* The magic number here can be adjusted if there are any more problems. I have witnessed
* these errors occur at 750ms, but never higher. It's worth doubling, even in optimal
* conditions, stream buffering is pretty slow anyhow. Adjust if necessary.
*/
Message msg = delayedPlayHandler.obtainMessage();
delayedPlayHandler.sendMessageDelayed(msg, 1500);
}
@Override
public void connectionFailed(String message) {
}
@Override
public void connectionStateChanged(boolean connected, boolean connectionLost) {
}
@Override
public void connectionSucceeded(String message) {
}
/**
* This turns streaming mode off and stops the StreamingService.
*/
private void die() {
Log.d(TAG, "StreamingService.die()");
onDestroy();
stopSelfResult(lastStartID);
}
/** A method to send a quick message to another class. */
private void sendIntent(String msg, Class dest) {
Log.d(TAG, "Sending intent " + msg + " to " + dest + ".");
Intent i = new Intent(this, dest);
i.setAction(msg);
this.startService(i);
}
/**
* Send a message to the NotificationService to let it know to end the buffering banner.
*/
private void endBuffering() {
Log.d(TAG, "StreamingService.endBuffering()");
sendIntent(NotificationService.STREAM_BUFFERING_END, NotificationService.class);
}
/**
* A JMPDComm callback to be invoked during library state changes.
*
* @param updating true when updating, false when not updating.
*/
@Override
public void libraryStateChanged(boolean updating) {
}
/**
* This sends the next command to MPD, stops and resumes streaming.
*/
private void next() {
Log.d(TAG, "StreamingService.next()");
MPD mpd = app.oMPDAsyncHelper.oMPD;
try {
mpd.next();
} catch (MPDServerException e) {
/** Do nothing. */
}
}
/**
* Handle the change of volume if a notification, or any other kind of
* interrupting audio event.
*/
@Override
public void onAudioFocusChange(int focusChange) {
Log.d(TAG, "StreamingService.onAudioFocusChange()");
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) {
mediaPlayer.setVolume(0.2f, 0.2f);
} else if (focusChange == AudioManager.AUDIOFOCUS_GAIN) {
mediaPlayer.setVolume(1f, 1f);
} else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) {
stopStreaming();
}
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
/**
* A MediaPlayer callback to be invoked when playback of a media source has completed.
*
* @param mp the MediaPlayer that reached the end of the file
*/
@Override
public void onCompletion(MediaPlayer mp) {
Log.d(TAG, "StreamingService.onCompletion()");
Message msg = delayedStopHandler.obtainMessage();
delayedStopHandler.sendMessageDelayed(msg, IDLE_DELAY);
// Somethings happening, like crappy network or MPD just stopped..
die();
}
public void onCreate() {
Log.d(TAG, "StreamingService.onCreate()");
super.onCreate();
app = (MPDApplication) getApplication();
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
/** If streaming mode is not enabled, return */
if (app == null || !app.getApplicationState().streamingMode) {
stopSelf();
return;
}
mediaPlayer = new MediaPlayer();
audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
prevMpdState = "";
isPlaying = true;
isPaused = false;
lastStartID = 0;
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setOnErrorListener(this);
if (audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC,
AudioManager.AUDIOFOCUS_GAIN) == AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
Toast.makeText(this, R.string.audioFocusFailed, Toast.LENGTH_LONG).show();
stopStreaming();
}
TelephonyManager tmgr = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
tmgr.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
app.oMPDAsyncHelper.addStatusChangeListener(this);
app.oMPDAsyncHelper.addConnectionListener(this);
app.setActivity(this);
streamSource = "http://"
+ app.oMPDAsyncHelper.getConnectionSettings().getConnectionStreamingServer() + ":"
+ app.oMPDAsyncHelper.getConnectionSettings().iPortStreaming + "/"
+ app.oMPDAsyncHelper.getConnectionSettings().sSuffixStreaming;
}
@Override
public void onDestroy() {
Log.d(TAG, "StreamingSerice.onDestroy()");
delayedStopHandler.removeCallbacksAndMessages(null);
/** Remove the current MPD listeners */
app.oMPDAsyncHelper.removeStatusChangeListener(this);
app.oMPDAsyncHelper.removeConnectionListener(this);
if (audioManager != null) {
audioManager.abandonAudioFocus(this);
}
if (mediaPlayer != null) {
if (mediaPlayer.isPlaying()) {
stopStreaming();
}
mediaPlayer.reset();
mediaPlayer.release();
mediaPlayer = null;
}
app.unsetActivity(this);
app.getApplicationState().streamingMode = false;
super.onDestroy();
}
/**
* A MediaPlayer callback to be invoked when there has been an error during an asynchronous
* operation (other errors will throw exceptions at method call time).
*
* @param mp the MediaPlayer the error pertains to.
* @param what the type of error that has occurred.
* @param extra an extra code, specific to the error. Typically implementation dependent.
* @return True if the method handled the error, false if it didn't. Returning false, or not
* having an OnErrorListener at all, will cause the OnCompletionListener to be called.
*/
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.d(TAG, "StreamingService.onError()");
stopStreaming();
beginStreaming();
return true;
}
/**
* A MediaPlayer callback to be invoked when the media source is ready for playback.
*/
@Override
public void onPrepared(MediaPlayer mp) {
Log.d(TAG, "StreamingService.onPrepared()");
sendIntent(NotificationService.STREAM_BUFFERING_END, NotificationService.class);
prevMpdState = "";
mediaPlayer.start();
preparingStreaming = false;
}
/**
* Called by the system every time a client explicitly starts the service
* by calling startService(Intent).
*
* @param intent The Intent supplied to startService(Intent), as given. This may be null if
* the
* service is being restarted after its process has gone away, and it had
* previously returned anything except START_STICKY_COMPATIBILITY.
* @param flags Additional data about this start request. Currently either 0,
* START_FLAG_REDELIVERY, or START_FLAG_RETRY.
* @param startId A unique integer representing this specific request to start. Use with
* stopSelfResult(int).
* @return The return value indicates what semantics the system should use for the service's
* current started state.
*/
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.d(TAG, "StreamingService.onStartCommand()");
lastStartID = startId;
if (!app.getApplicationState().streamingMode) {
stopSelfResult(lastStartID);
return 0;
}
switch (intent.getAction()) {
case ACTION_DIE:
die();
break;
case ACTION_RESET:
stopStreaming();
beginStreaming();
break;
case ACTION_START:
beginStreaming();
break;
case ACTION_STOP:
stopStreaming();
break;
case CMD_REMOTE:
String cmd = intent.getStringExtra(CMD_COMMAND);
switch (cmd) {
case CMD_NEXT:
next();
break;
case CMD_PREV:
prev();
break;
case CMD_PLAYPAUSE:
if (isPaused) {
beginStreaming();
} else {
stopStreaming();
}
break;
case CMD_PAUSE:
stopStreaming();
break;
}
}
/**
* We want this service to continue running until it is explicitly
* stopped, so return sticky.
*/
return START_STICKY;
}
@Override
public void playlistChanged(MPDStatus mpdStatus, int oldPlaylistVersion) {
}
/**
* This sends the previous command to MPD, stops and resumes streaming.
*/
private void prev() {
Log.d(TAG, "StreamingService.prev()");
MPD mpd = app.oMPDAsyncHelper.oMPD;
try {
mpd.previous();
} catch (MPDServerException e) {
}
}
@Override
public void randomChanged(boolean random) {
}
@Override
public void repeatChanged(boolean repeating) {
}
@Override
public void stateChanged(MPDStatus mpdStatus, String oldState) {
Log.d(TAG, "StreamingService.stateChanged()");
Message msg = delayedStopHandler.obtainMessage();
delayedStopHandler.sendMessageDelayed(msg, IDLE_DELAY);
MPDStatus statusMpd = null;
try {
statusMpd = app.oMPDAsyncHelper.oMPD.getStatus();
} catch (MPDServerException e) {
// TODO: Properly handle exception for getStatus() failure.
}
if (statusMpd == null) {
return;
}
String state = statusMpd.getState();
if (state == null || state.equals(prevMpdState)) {
return;
}
if (state.equals(MPDStatus.MPD_STATE_PLAYING)) {
isPaused = false;
beginStreaming();
isPlaying = true;
} else {
prevMpdState = state;
isPlaying = false;
stopStreaming();
}
}
private void stopStreaming() {
Log.d(TAG, "StreamingService.stopStreaming()");
prevMpdState = "";
if (mediaPlayer == null) {
return;
}
mediaPlayer.stop();
/** Send a message to the NotificationService that streaming is ending */
sendIntent(NotificationService.ACTION_STREAMING_END, NotificationService.class);
}
@Override
public void trackChanged(MPDStatus mpdStatus, int oldTrack) {
Log.d(TAG, "StreamingService.trackChanged()");
prevMpdState = "";
}
@Override
public void volumeChanged(MPDStatus mpdStatus, int oldVolume) {
}
}
|
StreamingService: Set isPlaying() dynamically.
|
MPDroid/src/com/namelessdev/mpdroid/StreamingService.java
|
StreamingService: Set isPlaying() dynamically.
|
|
Java
|
apache-2.0
|
41d0ae5fd2a3071688d466eea22ee9b011e45bb3
| 0
|
allotria/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,allotria/intellij-community,ibinti/intellij-community,slisson/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,apixandru/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,allotria/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,diorcety/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,supersven/intellij-community,signed/intellij-community,diorcety/intellij-community,kdwink/intellij-community,FHannes/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,samthor/intellij-community,allotria/intellij-community,Distrotech/intellij-community,semonte/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,caot/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,holmes/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,fitermay/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,signed/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,holmes/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,izonder/intellij-community,adedayo/intellij-community,holmes/intellij-community,blademainer/intellij-community,xfournet/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,apixandru/intellij-community,hurricup/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,slisson/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,signed/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,FHannes/intellij-community,retomerz/intellij-community,semonte/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,signed/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,hurricup/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,da1z/intellij-community,retomerz/intellij-community,allotria/intellij-community,fnouama/intellij-community,blademainer/intellij-community,robovm/robovm-studio,ryano144/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,retomerz/intellij-community,xfournet/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,supersven/intellij-community,kool79/intellij-community,da1z/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,dslomov/intellij-community,semonte/intellij-community,kool79/intellij-community,fitermay/intellij-community,samthor/intellij-community,blademainer/intellij-community,da1z/intellij-community,hurricup/intellij-community,jagguli/intellij-community,retomerz/intellij-community,fnouama/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,kool79/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,petteyg/intellij-community,allotria/intellij-community,ibinti/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,hurricup/intellij-community,diorcety/intellij-community,vladmm/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,retomerz/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,blademainer/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,retomerz/intellij-community,samthor/intellij-community,slisson/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,signed/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,amith01994/intellij-community,diorcety/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,fitermay/intellij-community,adedayo/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,diorcety/intellij-community,izonder/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,semonte/intellij-community,fnouama/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,semonte/intellij-community,diorcety/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,semonte/intellij-community,holmes/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,fitermay/intellij-community,caot/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,slisson/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,xfournet/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,samthor/intellij-community,ryano144/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,FHannes/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,robovm/robovm-studio,petteyg/intellij-community,hurricup/intellij-community,supersven/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,amith01994/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,signed/intellij-community,vvv1559/intellij-community,kool79/intellij-community,FHannes/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,tmpgit/intellij-community,ahb0327/intellij-community,signed/intellij-community,caot/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,caot/intellij-community,semonte/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,ryano144/intellij-community,petteyg/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,kool79/intellij-community,vladmm/intellij-community,blademainer/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,da1z/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,ibinti/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,diorcety/intellij-community,dslomov/intellij-community,apixandru/intellij-community,samthor/intellij-community,robovm/robovm-studio,apixandru/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,kool79/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,robovm/robovm-studio,clumsy/intellij-community,da1z/intellij-community,semonte/intellij-community,wreckJ/intellij-community,supersven/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,izonder/intellij-community,retomerz/intellij-community,robovm/robovm-studio,slisson/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,holmes/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,ibinti/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,allotria/intellij-community,dslomov/intellij-community,ibinti/intellij-community,holmes/intellij-community,slisson/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,FHannes/intellij-community,kool79/intellij-community,kool79/intellij-community,dslomov/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,semonte/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,allotria/intellij-community,semonte/intellij-community,blademainer/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,retomerz/intellij-community,kool79/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,slisson/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,asedunov/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,caot/intellij-community,signed/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,clumsy/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,amith01994/intellij-community,amith01994/intellij-community,samthor/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,signed/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,caot/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,hurricup/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,signed/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,jagguli/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,kool79/intellij-community,supersven/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,da1z/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,vladmm/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,signed/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,izonder/intellij-community,xfournet/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,nicolargo/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community
|
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.ProjectTopics;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.PluginManagerCore;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.injected.editor.VirtualFileWindow;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.impl.EditorComponentImpl;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.ex.FileEditorProviderManager;
import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory;
import com.intellij.openapi.fileEditor.impl.text.TextEditorImpl;
import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider;
import com.intellij.openapi.fileTypes.FileTypeEvent;
import com.intellij.openapi.fileTypes.FileTypeListener;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.PossiblyDumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.impl.ProjectImpl;
import com.intellij.openapi.roots.ModuleRootAdapter;
import com.intellij.openapi.roots.ModuleRootEvent;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.openapi.vcs.FileStatusListener;
import com.intellij.openapi.vcs.FileStatusManager;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.ex.StatusBarEx;
import com.intellij.openapi.wm.impl.IdeFrameImpl;
import com.intellij.ui.FocusTrackback;
import com.intellij.ui.docking.DockContainer;
import com.intellij.ui.docking.DockManager;
import com.intellij.ui.docking.impl.DockManagerImpl;
import com.intellij.ui.tabs.impl.JBTabsImpl;
import com.intellij.util.Function;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.messages.impl.MessageListenerList;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.List;
/**
* @author Anton Katilin
* @author Eugene Belyaev
* @author Vladimir Kondratyev
*/
public class FileEditorManagerImpl extends FileEditorManagerEx implements ProjectComponent, JDOMExternalizable {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.FileEditorManagerImpl");
private static final Key<LocalFileSystem.WatchRequest> WATCH_REQUEST_KEY = Key.create("WATCH_REQUEST_KEY");
private static final Key<Boolean> DUMB_AWARE = Key.create("DUMB_AWARE");
private static final FileEditor[] EMPTY_EDITOR_ARRAY = {};
private static final FileEditorProvider[] EMPTY_PROVIDER_ARRAY = {};
public static final Key<Boolean> CLOSING_TO_REOPEN = Key.create("CLOSING_TO_REOPEN");
public static final String FILE_EDITOR_MANAGER = "FileEditorManager";
private volatile JPanel myPanels;
private EditorsSplitters mySplitters;
private final Project myProject;
private final List<Pair<VirtualFile, EditorWindow>> mySelectionHistory = new ArrayList<Pair<VirtualFile, EditorWindow>>();
private WeakReference<EditorComposite> myLastSelectedComposite = new WeakReference<EditorComposite>(null);
private final MergingUpdateQueue myQueue = new MergingUpdateQueue("FileEditorManagerUpdateQueue", 50, true, null);
private final BusyObject.Impl.Simple myBusyObject = new BusyObject.Impl.Simple();
/**
* Removes invalid myEditor and updates "modified" status.
*/
private final MyEditorPropertyChangeListener myEditorPropertyChangeListener = new MyEditorPropertyChangeListener();
private final DockManager myDockManager;
private DockableEditorContainerFactory myContentFactory;
public FileEditorManagerImpl(final Project project, DockManager dockManager) {
/* ApplicationManager.getApplication().assertIsDispatchThread(); */
myProject = project;
myDockManager = dockManager;
myListenerList =
new MessageListenerList<FileEditorManagerListener>(myProject.getMessageBus(), FileEditorManagerListener.FILE_EDITOR_MANAGER);
if (Extensions.getExtensions(FileEditorAssociateFinder.EP_NAME).length > 0) {
myListenerList.add(new FileEditorManagerAdapter() {
@Override
public void selectionChanged(@NotNull FileEditorManagerEvent event) {
EditorsSplitters splitters = getSplitters();
openAssociatedFile(event.getNewFile(), splitters.getCurrentWindow(), splitters);
}
});
}
myQueue.setTrackUiActivity(true);
}
void initDockableContentFactory() {
if (myContentFactory != null) return;
myContentFactory = new DockableEditorContainerFactory(myProject, this, myDockManager);
myDockManager.register(DockableEditorContainerFactory.TYPE, myContentFactory);
Disposer.register(myProject, myContentFactory);
}
public static boolean isDumbAware(FileEditor editor) {
return Boolean.TRUE.equals(editor.getUserData(DUMB_AWARE)) &&
(!(editor instanceof PossiblyDumbAware) || ((PossiblyDumbAware)editor).isDumbAware());
}
//-------------------------------------------------------------------------------
@Override
public JComponent getComponent() {
initUI();
return myPanels;
}
@NotNull
public EditorsSplitters getMainSplitters() {
initUI();
return mySplitters;
}
public Set<EditorsSplitters> getAllSplitters() {
HashSet<EditorsSplitters> all = new HashSet<EditorsSplitters>();
all.add(getMainSplitters());
Set<DockContainer> dockContainers = myDockManager.getContainers();
for (DockContainer each : dockContainers) {
if (each instanceof DockableEditorTabbedContainer) {
all.add(((DockableEditorTabbedContainer)each).getSplitters());
}
}
return Collections.unmodifiableSet(all);
}
private AsyncResult<EditorsSplitters> getActiveSplitters(boolean syncUsage) {
final boolean async = Registry.is("ide.windowSystem.asyncSplitters") && !syncUsage;
final AsyncResult<EditorsSplitters> result = new AsyncResult<EditorsSplitters>();
final IdeFocusManager fm = IdeFocusManager.getInstance(myProject);
Runnable run = new Runnable() {
@Override
public void run() {
if (myProject.isDisposed()) {
result.setRejected();
return;
}
Component focusOwner = fm.getFocusOwner();
if (focusOwner == null && !async) {
focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
}
if (focusOwner == null && !async) {
focusOwner = fm.getLastFocusedFor(fm.getLastFocusedFrame());
}
DockContainer container = myDockManager.getContainerFor(focusOwner);
if (container == null && !async) {
focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
container = myDockManager.getContainerFor(focusOwner);
}
if (container instanceof DockableEditorTabbedContainer) {
result.setDone(((DockableEditorTabbedContainer)container).getSplitters());
}
else {
result.setDone(getMainSplitters());
}
}
};
if (async) {
fm.doWhenFocusSettlesDown(run);
}
else {
run.run();
}
return result;
}
private final Object myInitLock = new Object();
private void initUI() {
if (myPanels == null) {
synchronized (myInitLock) {
if (myPanels == null) {
myPanels = new JPanel(new BorderLayout());
myPanels.setOpaque(false);
myPanels.setBorder(new MyBorder());
mySplitters = new EditorsSplitters(this, myDockManager, true);
myPanels.add(mySplitters, BorderLayout.CENTER);
}
}
}
}
private static class MyBorder implements Border {
@Override
public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
if (UIUtil.isUnderAquaLookAndFeel()) {
g.setColor(JBTabsImpl.MAC_AQUA_BG_COLOR);
final Insets insets = getBorderInsets(c);
if (insets.top > 0) {
g.fillRect(x, y, width, height + insets.top);
}
}
}
@Override
public Insets getBorderInsets(Component c) {
return JBInsets.NONE;
}
@Override
public boolean isBorderOpaque() {
return false;
}
}
@Override
public JComponent getPreferredFocusedComponent() {
assertReadAccess();
final EditorWindow window = getSplitters().getCurrentWindow();
if (window != null) {
final EditorWithProviderComposite editor = window.getSelectedEditor();
if (editor != null) {
return editor.getPreferredFocusedComponent();
}
}
return null;
}
//-------------------------------------------------------
/**
* @return color of the <code>file</code> which corresponds to the
* file's status
*/
public Color getFileColor(@NotNull final VirtualFile file) {
final FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
Color statusColor = fileStatusManager != null ? fileStatusManager.getStatus(file).getColor() : UIUtil.getLabelForeground();
if (statusColor == null) statusColor = UIUtil.getLabelForeground();
return statusColor;
}
public boolean isProblem(@NotNull final VirtualFile file) {
return false;
}
public String getFileTooltipText(VirtualFile file) {
return FileUtil.getLocationRelativeToUserHome(file.getPresentableUrl());
}
@Override
public void updateFilePresentation(@NotNull VirtualFile file) {
if (!isFileOpen(file)) return;
updateFileColor(file);
updateFileIcon(file);
updateFileName(file);
updateFileBackgroundColor(file);
}
/**
* Updates tab color for the specified <code>file</code>. The <code>file</code>
* should be opened in the myEditor, otherwise the method throws an assertion.
*/
private void updateFileColor(final VirtualFile file) {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileColor(file);
}
}
private void updateFileBackgroundColor(final VirtualFile file) {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileBackgroundColor(file);
}
}
/**
* Updates tab icon for the specified <code>file</code>. The <code>file</code>
* should be opened in the myEditor, otherwise the method throws an assertion.
*/
protected void updateFileIcon(final VirtualFile file) {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileIcon(file);
}
}
/**
* Updates tab title and tab tool tip for the specified <code>file</code>
*/
void updateFileName(@Nullable final VirtualFile file) {
// Queue here is to prevent title flickering when tab is being closed and two events arriving: with component==null and component==next focused tab
// only the last event makes sense to handle
myQueue.queue(new Update("UpdateFileName " + (file == null ? "" : file.getPath())) {
@Override
public boolean isExpired() {
return myProject.isDisposed() || !myProject.isOpen() || (file == null ? super.isExpired() : !file.isValid());
}
@Override
public void run() {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileName(file);
}
}
});
}
//-------------------------------------------------------
@Override
public VirtualFile getFile(@NotNull final FileEditor editor) {
final EditorComposite editorComposite = getEditorComposite(editor);
if (editorComposite != null) {
return editorComposite.getFile();
}
return null;
}
@Override
public void unsplitWindow() {
final EditorWindow currentWindow = getActiveSplitters(true).getResult().getCurrentWindow();
if (currentWindow != null) {
currentWindow.unsplit(true);
}
}
@Override
public void unsplitAllWindow() {
final EditorWindow currentWindow = getActiveSplitters(true).getResult().getCurrentWindow();
if (currentWindow != null) {
currentWindow.unsplitAll();
}
}
@Override
public int getWindowSplitCount() {
return getActiveSplitters(true).getResult().getSplitCount();
}
@Override
public boolean hasSplitOrUndockedWindows() {
Set<EditorsSplitters> splitters = getAllSplitters();
if (splitters.size() > 1) return true;
return getWindowSplitCount() > 1;
}
@Override
@NotNull
public EditorWindow[] getWindows() {
ArrayList<EditorWindow> windows = new ArrayList<EditorWindow>();
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
EditorWindow[] eachList = each.getWindows();
windows.addAll(Arrays.asList(eachList));
}
return windows.toArray(new EditorWindow[windows.size()]);
}
@Override
public EditorWindow getNextWindow(@NotNull final EditorWindow window) {
final EditorWindow[] windows = getSplitters().getOrderedWindows();
for (int i = 0; i != windows.length; ++i) {
if (windows[i].equals(window)) {
return windows[(i + 1) % windows.length];
}
}
LOG.error("Not window found");
return null;
}
@Override
public EditorWindow getPrevWindow(@NotNull final EditorWindow window) {
final EditorWindow[] windows = getSplitters().getOrderedWindows();
for (int i = 0; i != windows.length; ++i) {
if (windows[i].equals(window)) {
return windows[(i + windows.length - 1) % windows.length];
}
}
LOG.error("Not window found");
return null;
}
@Override
public void createSplitter(final int orientation, @Nullable final EditorWindow window) {
// window was available from action event, for example when invoked from the tab menu of an editor that is not the 'current'
if (window != null) {
window.split(orientation, true, null, false);
}
// otherwise we'll split the current window, if any
else {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
if (currentWindow != null) {
currentWindow.split(orientation, true, null, false);
}
}
}
@Override
public void changeSplitterOrientation() {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
if (currentWindow != null) {
currentWindow.changeOrientation();
}
}
@Override
public void flipTabs() {
/*
if (myTabs == null) {
myTabs = new EditorTabs (this, UISettings.getInstance().EDITOR_TAB_PLACEMENT);
remove (mySplitters);
add (myTabs, BorderLayout.CENTER);
initTabs ();
} else {
remove (myTabs);
add (mySplitters, BorderLayout.CENTER);
myTabs.dispose ();
myTabs = null;
}
*/
myPanels.revalidate();
}
@Override
public boolean tabsMode() {
return false;
}
private void setTabsMode(final boolean mode) {
if (tabsMode() != mode) {
flipTabs();
}
//LOG.assertTrue (tabsMode () == mode);
}
@Override
public boolean isInSplitter() {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
return currentWindow != null && currentWindow.inSplitter();
}
@Override
public boolean hasOpenedFile() {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
return currentWindow != null && currentWindow.getSelectedEditor() != null;
}
@Override
public VirtualFile getCurrentFile() {
return getActiveSplitters(true).getResult().getCurrentFile();
}
@Override
@NotNull
public AsyncResult<EditorWindow> getActiveWindow() {
return _getActiveWindow(false);
}
@NotNull
private AsyncResult<EditorWindow> _getActiveWindow(boolean now) {
return getActiveSplitters(now).subResult(new Function<EditorsSplitters, EditorWindow>() {
@Override
public EditorWindow fun(EditorsSplitters splitters) {
return splitters.getCurrentWindow();
}
});
}
@Override
public EditorWindow getCurrentWindow() {
return _getActiveWindow(true).getResult();
}
@Override
public void setCurrentWindow(final EditorWindow window) {
getActiveSplitters(true).getResult().setCurrentWindow(window, true);
}
public void closeFile(@NotNull final VirtualFile file, @NotNull final EditorWindow window, final boolean transferFocus) {
assertDispatchThread();
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
if (window.isFileOpen(file)) {
window.closeFile(file, true, transferFocus);
final List<EditorWindow> windows = window.getOwner().findWindows(file);
if (windows.isEmpty()) { // no more windows containing this file left
final LocalFileSystem.WatchRequest request = file.getUserData(WATCH_REQUEST_KEY);
if (request != null) {
LocalFileSystem.getInstance().removeWatchedRoot(request);
}
}
}
}
}, IdeBundle.message("command.close.active.editor"), null);
removeSelectionRecord(file, window);
}
@Override
public void closeFile(@NotNull final VirtualFile file, @NotNull final EditorWindow window) {
closeFile(file, window, true);
}
//============================= EditorManager methods ================================
@Override
public void closeFile(@NotNull final VirtualFile file) {
closeFile(file, true, false);
}
public void closeFile(@NotNull final VirtualFile file, final boolean moveFocus, final boolean closeAllCopies) {
assertDispatchThread();
final LocalFileSystem.WatchRequest request = file.getUserData(WATCH_REQUEST_KEY);
if (request != null) {
LocalFileSystem.getInstance().removeWatchedRoot(request);
}
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
closeFileImpl(file, moveFocus, closeAllCopies);
}
}, "", null);
}
private void closeFileImpl(@NotNull final VirtualFile file, final boolean moveFocus, boolean closeAllCopies) {
assertDispatchThread();
runChange(new FileEditorManagerChange() {
@Override
public void run(EditorsSplitters splitters) {
splitters.closeFile(file, moveFocus);
}
}, closeAllCopies ? null : getActiveSplitters(true).getResult());
}
//-------------------------------------- Open File ----------------------------------------
@Override
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> openFileWithProviders(@NotNull final VirtualFile file,
final boolean focusEditor,
boolean searchForSplitter) {
if (!file.isValid()) {
throw new IllegalArgumentException("file is not valid: " + file);
}
assertDispatchThread();
if (isOpenInNewWindow(EventQueue.getCurrentEvent())) {
return openFileInNewWindow(file);
}
EditorWindow wndToOpenIn = null;
if (searchForSplitter) {
Set<EditorsSplitters> all = getAllSplitters();
EditorsSplitters active = getActiveSplitters(true).getResult();
if (active.getCurrentWindow() != null && active.getCurrentWindow().isFileOpen(file)) {
wndToOpenIn = active.getCurrentWindow();
} else {
for (EditorsSplitters splitters : all) {
final EditorWindow window = splitters.getCurrentWindow();
if (window == null) continue;
if (window.isFileOpen(file)) {
wndToOpenIn = window;
break;
}
}
}
}
else {
wndToOpenIn = getSplitters().getCurrentWindow();
}
EditorsSplitters splitters = getSplitters();
if (wndToOpenIn == null) {
wndToOpenIn = splitters.getOrCreateCurrentWindow(file);
}
openAssociatedFile(file, wndToOpenIn, splitters);
return openFileImpl2(wndToOpenIn, file, focusEditor);
}
public Pair<FileEditor[], FileEditorProvider[]> openFileInNewWindow(VirtualFile file) {
return ((DockManagerImpl)DockManager.getInstance(getProject())).createNewDockContainerFor(file, this);
}
private static boolean isOpenInNewWindow(AWTEvent event) {
// Shift was used while clicking
if (event instanceof MouseEvent && ((MouseEvent)event).isShiftDown()) {
return true;
}
// Shift + Enter
if (event instanceof KeyEvent
&& ((KeyEvent)event).getKeyCode() == KeyEvent.VK_ENTER
&& ((KeyEvent)event).isShiftDown()) {
return true;
}
return false;
}
private void openAssociatedFile(VirtualFile file, EditorWindow wndToOpenIn, EditorsSplitters splitters) {
EditorWindow[] windows = splitters.getWindows();
if (file != null && windows.length == 2) {
for (FileEditorAssociateFinder finder : Extensions.getExtensions(FileEditorAssociateFinder.EP_NAME)) {
VirtualFile associatedFile = finder.getAssociatedFileToOpen(myProject, file);
if (associatedFile != null) {
EditorWindow currentWindow = splitters.getCurrentWindow();
int idx = windows[0] == wndToOpenIn ? 1 : 0;
openFileImpl2(windows[idx], associatedFile, false);
if (currentWindow != null) {
splitters.setCurrentWindow(currentWindow, false);
}
break;
}
}
}
}
@NotNull
@Override
public Pair<FileEditor[], FileEditorProvider[]> openFileWithProviders(@NotNull VirtualFile file,
boolean focusEditor,
@NotNull EditorWindow window) {
if (!file.isValid()) {
throw new IllegalArgumentException("file is not valid: " + file);
}
assertDispatchThread();
return openFileImpl2(window, file, focusEditor);
}
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> openFileImpl2(@NotNull final EditorWindow window,
@NotNull final VirtualFile file,
final boolean focusEditor) {
final Ref<Pair<FileEditor[], FileEditorProvider[]>> result = new Ref<Pair<FileEditor[], FileEditorProvider[]>>();
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
result.set(openFileImpl3(window, file, focusEditor, null, true));
}
}, "", null);
return result.get();
}
/**
* @param file to be opened. Unlike openFile method, file can be
* invalid. For example, all file were invalidate and they are being
* removed one by one. If we have removed one invalid file, then another
* invalid file become selected. That's why we do not require that
* passed file is valid.
* @param entry map between FileEditorProvider and FileEditorState. If this parameter
*/
@NotNull
Pair<FileEditor[], FileEditorProvider[]> openFileImpl3(@NotNull final EditorWindow window,
@NotNull final VirtualFile file,
final boolean focusEditor,
@Nullable final HistoryEntry entry,
boolean current) {
return openFileImpl4(window, file, focusEditor, entry, current, -1);
}
@NotNull
Pair<FileEditor[], FileEditorProvider[]> openFileImpl4(@NotNull final EditorWindow window,
@NotNull final VirtualFile file,
final boolean focusEditor,
@Nullable final HistoryEntry entry,
boolean current,
int index) {
// Open file
FileEditor[] editors;
FileEditorProvider[] providers;
final EditorWithProviderComposite newSelectedComposite;
boolean newEditorCreated = false;
final boolean open = window.isFileOpen(file);
if (open) {
// File is already opened. In this case we have to just select existing EditorComposite
newSelectedComposite = window.findFileComposite(file);
LOG.assertTrue(newSelectedComposite != null);
editors = newSelectedComposite.getEditors();
providers = newSelectedComposite.getProviders();
}
else {
if (UISettings.getInstance().EDITOR_TAB_PLACEMENT == UISettings.TABS_NONE || UISettings.getInstance().PRESENTATION_MODE) {
for (EditorWithProviderComposite composite : window.getEditors()) {
Disposer.dispose(composite);
}
}
// File is not opened yet. In this case we have to create editors
// and select the created EditorComposite.
final FileEditorProviderManager editorProviderManager = FileEditorProviderManager.getInstance();
providers = editorProviderManager.getProviders(myProject, file);
if (DumbService.getInstance(myProject).isDumb()) {
final List<FileEditorProvider> dumbAware = ContainerUtil.findAll(providers, new Condition<FileEditorProvider>() {
@Override
public boolean value(FileEditorProvider fileEditorProvider) {
return DumbService.isDumbAware(fileEditorProvider);
}
});
providers = dumbAware.toArray(new FileEditorProvider[dumbAware.size()]);
}
if (providers.length == 0) {
return Pair.create(EMPTY_EDITOR_ARRAY, EMPTY_PROVIDER_ARRAY);
}
newEditorCreated = true;
getProject().getMessageBus().syncPublisher(FileEditorManagerListener.Before.FILE_EDITOR_MANAGER).beforeFileOpened(this, file);
editors = new FileEditor[providers.length];
for (int i = 0; i < providers.length; i++) {
try {
final FileEditorProvider provider = providers[i];
LOG.assertTrue(provider != null, "Provider for file "+file+" is null. All providers: "+Arrays.asList(providers));
LOG.assertTrue(provider.accept(myProject, file), "Provider " + provider + " doesn't accept file " + file);
final FileEditor editor = provider.createEditor(myProject, file);
LOG.assertTrue(editor != null);
LOG.assertTrue(editor.isValid());
editors[i] = editor;
// Register PropertyChangeListener into editor
editor.addPropertyChangeListener(myEditorPropertyChangeListener);
editor.putUserData(DUMB_AWARE, DumbService.isDumbAware(provider));
if (current && editor instanceof TextEditorImpl) {
((TextEditorImpl)editor).initFolding();
}
}
catch (Exception e) {
LOG.error(e);
}
catch (AssertionError e) {
LOG.error(e);
}
}
// Now we have to create EditorComposite and insert it into the TabbedEditorComponent.
// After that we have to select opened editor.
newSelectedComposite = new EditorWithProviderComposite(file, editors, providers, this);
if (index >= 0) {
newSelectedComposite.getFile().putUserData(EditorWindow.INITIAL_INDEX_KEY, index);
}
}
window.setEditor(newSelectedComposite, focusEditor);
final EditorHistoryManager editorHistoryManager = EditorHistoryManager.getInstance(myProject);
for (int i = 0; i < editors.length; i++) {
final FileEditor editor = editors[i];
if (editor instanceof TextEditor) {
// hack!!!
// This code prevents "jumping" on next repaint.
((EditorEx)((TextEditor)editor).getEditor()).stopOptimizedScrolling();
}
final FileEditorProvider provider = providers[i];//getProvider(editor);
// Restore editor state
FileEditorState state = null;
if (entry != null) {
state = entry.getState(provider);
}
if (state == null && !open) {
// We have to try to get state from the history only in case
// if editor is not opened. Otherwise history entry might have a state
// out of sync with the current editor state.
state = editorHistoryManager.getState(file, provider);
}
if (state != null) {
if (!isDumbAware(editor)) {
final FileEditorState finalState = state;
DumbService.getInstance(getProject()).runWhenSmart(new Runnable() {
@Override
public void run() {
editor.setState(finalState);
}
});
}
else {
editor.setState(state);
}
}
}
// Restore selected editor
final FileEditorProvider[] _providers = newSelectedComposite.getProviders();
final FileEditorProvider selectedProvider;
if (entry == null) {
selectedProvider = ((FileEditorProviderManagerImpl)FileEditorProviderManager.getInstance())
.getSelectedFileEditorProvider(editorHistoryManager, file, _providers);
}
else {
selectedProvider = entry.mySelectedProvider;
}
if (selectedProvider != null) {
final FileEditor[] _editors = newSelectedComposite.getEditors();
for (int i = _editors.length - 1; i >= 0; i--) {
final FileEditorProvider provider = _providers[i];//getProvider(_editors[i]);
if (provider.equals(selectedProvider)) {
newSelectedComposite.setSelectedEditor(i);
break;
}
}
}
// Notify editors about selection changes
window.getOwner().setCurrentWindow(window, focusEditor);
window.getOwner().afterFileOpen(file);
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
newSelectedComposite.getSelectedEditor().selectNotify();
}
});
final IdeFocusManager focusManager = IdeFocusManager.getInstance(myProject);
if (newEditorCreated) {
if (window.isShowing()) {
window.setPaintBlocked(true);
}
notifyPublisher(new Runnable() {
@Override
public void run() {
window.setPaintBlocked(false);
if (isFileOpen(file)) {
getProject().getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER)
.fileOpened(FileEditorManagerImpl.this, file);
}
}
});
//Add request to watch this editor's virtual file
final VirtualFile parentDir = file.getParent();
if (parentDir != null) {
final LocalFileSystem.WatchRequest request = LocalFileSystem.getInstance().addRootToWatch(parentDir.getPath(), false);
file.putUserData(WATCH_REQUEST_KEY, request);
}
}
//[jeka] this is a hack to support back-forward navigation
// previously here was incorrect call to fireSelectionChanged() with a side-effect
((IdeDocumentHistoryImpl)IdeDocumentHistory.getInstance(myProject)).onSelectionChanged();
// Transfer focus into editor
if (!ApplicationManagerEx.getApplicationEx().isUnitTestMode()) {
if (focusEditor) {
//myFirstIsActive = myTabbedContainer1.equals(tabbedContainer);
window.setAsCurrentWindow(true);
ToolWindowManager.getInstance(myProject).activateEditorComponent();
focusManager.toFront(window.getOwner());
}
}
// Update frame and tab title
updateFileName(file);
// Make back/forward work
IdeDocumentHistory.getInstance(myProject).includeCurrentCommandAsNavigation();
return Pair.create(editors, providers);
}
@NotNull
@Override
public ActionCallback notifyPublisher(@NotNull final Runnable runnable) {
final IdeFocusManager focusManager = IdeFocusManager.getInstance(myProject);
final ActionCallback done = new ActionCallback();
return myBusyObject.execute(new ActiveRunnable() {
@NotNull
@Override
public ActionCallback run() {
focusManager.doWhenFocusSettlesDown(new ExpirableRunnable.ForProject(myProject) {
@Override
public void run() {
runnable.run();
done.setDone();
}
});
return done;
}
});
}
@Override
public void setSelectedEditor(@NotNull VirtualFile file, String fileEditorProviderId) {
EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite == null) {
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
if (composites.isEmpty()) return;
composite = composites.get(0);
}
final FileEditorProvider[] editorProviders = composite.getProviders();
final FileEditorProvider selectedProvider = composite.getSelectedEditorWithProvider().getSecond();
for (int i = 0; i < editorProviders.length; i++) {
if (editorProviders[i].getEditorTypeId().equals(fileEditorProviderId) && !selectedProvider.equals(editorProviders[i])) {
composite.setSelectedEditor(i);
composite.getSelectedEditor().selectNotify();
}
}
}
@Nullable
EditorWithProviderComposite newEditorComposite(final VirtualFile file) {
if (file == null) {
return null;
}
final FileEditorProviderManager editorProviderManager = FileEditorProviderManager.getInstance();
final FileEditorProvider[] providers = editorProviderManager.getProviders(myProject, file);
final FileEditor[] editors = new FileEditor[providers.length];
for (int i = 0; i < providers.length; i++) {
final FileEditorProvider provider = providers[i];
LOG.assertTrue(provider != null);
LOG.assertTrue(provider.accept(myProject, file));
final FileEditor editor = provider.createEditor(myProject, file);
editors[i] = editor;
LOG.assertTrue(editor.isValid());
editor.addPropertyChangeListener(myEditorPropertyChangeListener);
}
final EditorWithProviderComposite newComposite = new EditorWithProviderComposite(file, editors, providers, this);
final EditorHistoryManager editorHistoryManager = EditorHistoryManager.getInstance(myProject);
for (int i = 0; i < editors.length; i++) {
final FileEditor editor = editors[i];
if (editor instanceof TextEditor) {
// hack!!!
// This code prevents "jumping" on next repaint.
//((EditorEx)((TextEditor)editor).getEditor()).stopOptimizedScrolling();
}
final FileEditorProvider provider = providers[i];
// Restore myEditor state
FileEditorState state = editorHistoryManager.getState(file, provider);
if (state != null) {
editor.setState(state);
}
}
return newComposite;
}
@Override
@NotNull
public List<FileEditor> openEditor(@NotNull final OpenFileDescriptor descriptor, final boolean focusEditor) {
assertDispatchThread();
if (descriptor.getFile() instanceof VirtualFileWindow) {
VirtualFileWindow delegate = (VirtualFileWindow)descriptor.getFile();
int hostOffset = delegate.getDocumentWindow().injectedToHost(descriptor.getOffset());
OpenFileDescriptor realDescriptor = new OpenFileDescriptor(descriptor.getProject(), delegate.getDelegate(), hostOffset);
realDescriptor.setUseCurrentWindow(descriptor.isUseCurrentWindow());
return openEditor(realDescriptor, focusEditor);
}
final List<FileEditor> result = new SmartList<FileEditor>();
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
VirtualFile file = descriptor.getFile();
final FileEditor[] editors = openFile(file, focusEditor, !descriptor.isUseCurrentWindow());
ContainerUtil.addAll(result, editors);
boolean navigated = false;
for (final FileEditor editor : editors) {
if (editor instanceof NavigatableFileEditor &&
getSelectedEditor(descriptor.getFile()) == editor) { // try to navigate opened editor
navigated = navigateAndSelectEditor((NavigatableFileEditor)editor, descriptor);
if (navigated) break;
}
}
if (!navigated) {
for (final FileEditor editor : editors) {
if (editor instanceof NavigatableFileEditor && getSelectedEditor(descriptor.getFile()) != editor) { // try other editors
if (navigateAndSelectEditor((NavigatableFileEditor)editor, descriptor)) {
break;
}
}
}
}
}
}, "", null);
return result;
}
private boolean navigateAndSelectEditor(final NavigatableFileEditor editor, final OpenFileDescriptor descriptor) {
if (editor.canNavigateTo(descriptor)) {
setSelectedEditor(editor);
editor.navigateTo(descriptor);
return true;
}
return false;
}
private void setSelectedEditor(final FileEditor editor) {
final EditorWithProviderComposite composite = getEditorComposite(editor);
if (composite == null) return;
final FileEditor[] editors = composite.getEditors();
for (int i = 0; i < editors.length; i++) {
final FileEditor each = editors[i];
if (editor == each) {
composite.setSelectedEditor(i);
composite.getSelectedEditor().selectNotify();
break;
}
}
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@Nullable
public Editor openTextEditor(@NotNull final OpenFileDescriptor descriptor, final boolean focusEditor) {
final Collection<FileEditor> fileEditors = openEditor(descriptor, focusEditor);
for (FileEditor fileEditor : fileEditors) {
if (fileEditor instanceof TextEditor) {
setSelectedEditor(descriptor.getFile(), TextEditorProvider.getInstance().getEditorTypeId());
Editor editor = ((TextEditor)fileEditor).getEditor();
return getOpenedEditor(editor, focusEditor);
}
}
return null;
}
protected Editor getOpenedEditor(final Editor editor, final boolean focusEditor) {
return editor;
}
@Override
public Editor getSelectedTextEditor() {
assertReadAccess();
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
if (currentWindow != null) {
final EditorWithProviderComposite selectedEditor = currentWindow.getSelectedEditor();
if (selectedEditor != null && selectedEditor.getSelectedEditor() instanceof TextEditor) {
return ((TextEditor)selectedEditor.getSelectedEditor()).getEditor();
}
}
return null;
}
@Override
public boolean isFileOpen(@NotNull final VirtualFile file) {
return !getEditorComposites(file).isEmpty();
}
@Override
@NotNull
public VirtualFile[] getOpenFiles() {
HashSet<VirtualFile> openFiles = new HashSet<VirtualFile>();
for (EditorsSplitters each : getAllSplitters()) {
openFiles.addAll(Arrays.asList(each.getOpenFiles()));
}
return VfsUtilCore.toVirtualFileArray(openFiles);
}
@Override
@NotNull
public VirtualFile[] getSelectedFiles() {
HashSet<VirtualFile> selectedFiles = new HashSet<VirtualFile>();
for (EditorsSplitters each : getAllSplitters()) {
selectedFiles.addAll(Arrays.asList(each.getSelectedFiles()));
}
return VfsUtilCore.toVirtualFileArray(selectedFiles);
}
@Override
@NotNull
public FileEditor[] getSelectedEditors() {
HashSet<FileEditor> selectedEditors = new HashSet<FileEditor>();
for (EditorsSplitters each : getAllSplitters()) {
selectedEditors.addAll(Arrays.asList(each.getSelectedEditors()));
}
return selectedEditors.toArray(new FileEditor[selectedEditors.size()]);
}
@Override
@NotNull
public EditorsSplitters getSplitters() {
EditorsSplitters active = getActiveSplitters(true).getResult();
return active == null ? getMainSplitters() : active;
}
@Override
@Nullable
public FileEditor getSelectedEditor(@NotNull final VirtualFile file) {
final Pair<FileEditor, FileEditorProvider> selectedEditorWithProvider = getSelectedEditorWithProvider(file);
return selectedEditorWithProvider == null ? null : selectedEditorWithProvider.getFirst();
}
@Override
@Nullable
public Pair<FileEditor, FileEditorProvider> getSelectedEditorWithProvider(@NotNull VirtualFile file) {
if (file instanceof VirtualFileWindow) file = ((VirtualFileWindow)file).getDelegate();
final EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite != null) {
return composite.getSelectedEditorWithProvider();
}
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
return composites.isEmpty() ? null : composites.get(0).getSelectedEditorWithProvider();
}
@Override
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> getEditorsWithProviders(@NotNull final VirtualFile file) {
assertReadAccess();
final EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite != null) {
return Pair.create(composite.getEditors(), composite.getProviders());
}
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
if (!composites.isEmpty()) {
return Pair.create(composites.get(0).getEditors(), composites.get(0).getProviders());
}
else {
return Pair.create(EMPTY_EDITOR_ARRAY, EMPTY_PROVIDER_ARRAY);
}
}
@Override
@NotNull
public FileEditor[] getEditors(@NotNull VirtualFile file) {
assertReadAccess();
if (file instanceof VirtualFileWindow) file = ((VirtualFileWindow)file).getDelegate();
final EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite != null) {
return composite.getEditors();
}
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
if (!composites.isEmpty()) {
return composites.get(0).getEditors();
}
else {
return EMPTY_EDITOR_ARRAY;
}
}
@NotNull
@Override
public FileEditor[] getAllEditors(@NotNull VirtualFile file) {
List<EditorWithProviderComposite> editorComposites = getEditorComposites(file);
if (editorComposites.isEmpty()) return EMPTY_EDITOR_ARRAY;
List<FileEditor> editors = new ArrayList<FileEditor>();
for (EditorWithProviderComposite composite : editorComposites) {
ContainerUtil.addAll(editors, composite.getEditors());
}
return editors.toArray(new FileEditor[editors.size()]);
}
@Nullable
private EditorWithProviderComposite getCurrentEditorWithProviderComposite(@NotNull final VirtualFile virtualFile) {
final EditorWindow editorWindow = getSplitters().getCurrentWindow();
if (editorWindow != null) {
return editorWindow.findFileComposite(virtualFile);
}
return null;
}
@NotNull
public List<EditorWithProviderComposite> getEditorComposites(final VirtualFile file) {
ArrayList<EditorWithProviderComposite> result = new ArrayList<EditorWithProviderComposite>();
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
result.addAll(each.findEditorComposites(file));
}
return result;
}
@Override
@NotNull
public FileEditor[] getAllEditors() {
assertReadAccess();
final ArrayList<FileEditor> result = new ArrayList<FileEditor>();
final Set<EditorsSplitters> allSplitters = getAllSplitters();
for (EditorsSplitters splitter : allSplitters) {
final EditorWithProviderComposite[] editorsComposites = splitter.getEditorsComposites();
for (EditorWithProviderComposite editorsComposite : editorsComposites) {
final FileEditor[] editors = editorsComposite.getEditors();
ContainerUtil.addAll(result, editors);
}
}
return result.toArray(new FileEditor[result.size()]);
}
@Override
public void showEditorAnnotation(@NotNull FileEditor editor, @NotNull JComponent annotationComponent) {
addTopComponent(editor, annotationComponent);
}
@Override
public void removeEditorAnnotation(@NotNull FileEditor editor, @NotNull JComponent annotationComponent) {
removeTopComponent(editor, annotationComponent);
}
@Override
public void addTopComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.addTopComponent(editor, component);
}
}
@Override
public void removeTopComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.removeTopComponent(editor, component);
}
}
@Override
public void addBottomComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.addBottomComponent(editor, component);
}
}
@Override
public void removeBottomComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.removeBottomComponent(editor, component);
}
}
private final MessageListenerList<FileEditorManagerListener> myListenerList;
@Override
public void addFileEditorManagerListener(@NotNull final FileEditorManagerListener listener) {
myListenerList.add(listener);
}
@Override
public void addFileEditorManagerListener(@NotNull final FileEditorManagerListener listener, @NotNull final Disposable parentDisposable) {
myListenerList.add(listener, parentDisposable);
}
@Override
public void removeFileEditorManagerListener(@NotNull final FileEditorManagerListener listener) {
myListenerList.remove(listener);
}
// ProjectComponent methods
@Override
public void projectOpened() {
//myFocusWatcher.install(myWindows.getComponent ());
getMainSplitters().startListeningFocus();
MessageBusConnection connection = myProject.getMessageBus().connect(myProject);
final FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
if (fileStatusManager != null) {
/**
* Updates tabs colors
*/
final MyFileStatusListener myFileStatusListener = new MyFileStatusListener();
fileStatusManager.addFileStatusListener(myFileStatusListener, myProject);
}
connection.subscribe(FileTypeManager.TOPIC, new MyFileTypeListener());
connection.subscribe(ProjectTopics.PROJECT_ROOTS, new MyRootsListener());
/**
* Updates tabs names
*/
final MyVirtualFileListener myVirtualFileListener = new MyVirtualFileListener();
VirtualFileManager.getInstance().addVirtualFileListener(myVirtualFileListener, myProject);
/**
* Extends/cuts number of opened tabs. Also updates location of tabs.
*/
final MyUISettingsListener myUISettingsListener = new MyUISettingsListener();
UISettings.getInstance().addUISettingsListener(myUISettingsListener, myProject);
StartupManager.getInstance(myProject).registerPostStartupActivity(new DumbAwareRunnable() {
@Override
public void run() {
if (myProject.isDisposed()) return;
setTabsMode(UISettings.getInstance().EDITOR_TAB_PLACEMENT != UISettings.TABS_NONE);
ToolWindowManager.getInstance(myProject).invokeLater(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
LaterInvocator.invokeLater(new Runnable() {
@Override
public void run() {
long currentTime = System.nanoTime();
Long startTime = myProject.getUserData(ProjectImpl.CREATION_TIME);
if (startTime != null) {
LOG.info("Project opening took " + (currentTime - startTime.longValue()) / 1000000 + " ms");
PluginManagerCore.dumpPluginClassStatistics();
}
}
}, myProject.getDisposed());
// group 1
}
}, "", null);
}
});
}
});
}
@Override
public void projectClosed() {
//myFocusWatcher.deinstall(myWindows.getComponent ());
getMainSplitters().dispose();
// Dispose created editors. We do not use use closeEditor method because
// it fires event and changes history.
closeAllFiles();
}
// BaseCompomemnt methods
@Override
@NotNull
public String getComponentName() {
return FILE_EDITOR_MANAGER;
}
@Override
public void initComponent() {
}
@Override
public void disposeComponent() { /* really do nothing */ }
//JDOMExternalizable methods
@Override
public void writeExternal(final Element element) {
getMainSplitters().writeExternal(element);
}
@Override
public void readExternal(final Element element) {
getMainSplitters().readExternal(element);
}
@Nullable
private EditorWithProviderComposite getEditorComposite(@NotNull final FileEditor editor) {
for (EditorsSplitters splitters : getAllSplitters()) {
final EditorWithProviderComposite[] editorsComposites = splitters.getEditorsComposites();
for (int i = editorsComposites.length - 1; i >= 0; i--) {
final EditorWithProviderComposite composite = editorsComposites[i];
final FileEditor[] editors = composite.getEditors();
for (int j = editors.length - 1; j >= 0; j--) {
final FileEditor _editor = editors[j];
LOG.assertTrue(_editor != null);
if (editor.equals(_editor)) {
return composite;
}
}
}
}
return null;
}
//======================= Misc =====================
private static void assertDispatchThread() {
ApplicationManager.getApplication().assertIsDispatchThread();
}
private static void assertReadAccess() {
ApplicationManager.getApplication().assertReadAccessAllowed();
}
public void fireSelectionChanged(final EditorComposite newSelectedComposite) {
final Trinity<VirtualFile, FileEditor, FileEditorProvider> oldData = extract(myLastSelectedComposite.get());
final Trinity<VirtualFile, FileEditor, FileEditorProvider> newData = extract(newSelectedComposite);
myLastSelectedComposite = new WeakReference<EditorComposite>(newSelectedComposite);
final boolean filesEqual = oldData.first == null ? newData.first == null : oldData.first.equals(newData.first);
final boolean editorsEqual = oldData.second == null ? newData.second == null : oldData.second.equals(newData.second);
if (!filesEqual || !editorsEqual) {
if (oldData.first != null && newData.first != null) {
for (FileEditorAssociateFinder finder : Extensions.getExtensions(FileEditorAssociateFinder.EP_NAME)) {
VirtualFile associatedFile = finder.getAssociatedFileToOpen(myProject, oldData.first);
if (Comparing.equal(associatedFile, newData.first)) {
return;
}
}
}
final FileEditorManagerEvent event =
new FileEditorManagerEvent(this, oldData.first, oldData.second, oldData.third, newData.first, newData.second, newData.third);
final FileEditorManagerListener publisher = getProject().getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER);
if (newData.first != null) {
final JComponent component = newData.second.getComponent();
final EditorWindowHolder holder = UIUtil.getParentOfType(EditorWindowHolder.class, component);
if (holder != null) {
addSelectionRecord(newData.first, holder.getEditorWindow());
}
}
notifyPublisher(new Runnable() {
@Override
public void run() {
publisher.selectionChanged(event);
}
});
}
}
@NotNull
private static Trinity<VirtualFile, FileEditor, FileEditorProvider> extract(@Nullable EditorComposite composite) {
final VirtualFile file;
final FileEditor editor;
final FileEditorProvider provider;
if (composite == null || composite.isDisposed()) {
file = null;
editor = null;
provider = null;
}
else {
file = composite.getFile();
final Pair<FileEditor, FileEditorProvider> pair = composite.getSelectedEditorWithProvider();
editor = pair.first;
provider = pair.second;
}
return new Trinity<VirtualFile, FileEditor, FileEditorProvider>(file, editor, provider);
}
@Override
public boolean isChanged(@NotNull final EditorComposite editor) {
final FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
if (fileStatusManager != null) {
VirtualFile file = editor.getFile();
FileStatus status = fileStatusManager.getStatus(file);
if (status == FileStatus.UNKNOWN && !file.isWritable()) {
return false;
}
if (!status.equals(FileStatus.NOT_CHANGED)) {
return true;
}
}
return false;
}
public void disposeComposite(@NotNull EditorWithProviderComposite editor) {
if (getAllEditors().length == 0) {
setCurrentWindow(null);
}
if (editor.equals(getLastSelected())) {
editor.getSelectedEditor().deselectNotify();
getSplitters().setCurrentWindow(null, false);
}
final FileEditor[] editors = editor.getEditors();
final FileEditorProvider[] providers = editor.getProviders();
final FileEditor selectedEditor = editor.getSelectedEditor();
for (int i = editors.length - 1; i >= 0; i--) {
final FileEditor editor1 = editors[i];
final FileEditorProvider provider = providers[i];
if (!editor.equals(selectedEditor)) { // we already notified the myEditor (when fire event)
if (selectedEditor.equals(editor1)) {
editor1.deselectNotify();
}
}
editor1.removePropertyChangeListener(myEditorPropertyChangeListener);
provider.disposeEditor(editor1);
}
Disposer.dispose(editor);
}
@Nullable
EditorComposite getLastSelected() {
final EditorWindow currentWindow = getActiveSplitters(true).getResult().getCurrentWindow();
if (currentWindow != null) {
return currentWindow.getSelectedEditor();
}
return null;
}
public void runChange(FileEditorManagerChange change, EditorsSplitters splitters) {
Set<EditorsSplitters> target = new HashSet<EditorsSplitters>();
if (splitters == null) {
target.addAll(getAllSplitters());
} else {
target.add(splitters);
}
for (EditorsSplitters each : target) {
each.myInsideChange++;
try {
change.run(each);
}
finally {
each.myInsideChange--;
}
}
}
//================== Listeners =====================
/**
* Closes deleted files. Closes file which are in the deleted directories.
*/
private final class MyVirtualFileListener extends VirtualFileAdapter {
@Override
public void beforeFileDeletion(VirtualFileEvent e) {
assertDispatchThread();
boolean moveFocus = moveFocusOnDelete();
final VirtualFile file = e.getFile();
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
if (VfsUtilCore.isAncestor(file, openFiles[i], false)) {
closeFile(openFiles[i], moveFocus, true);
}
}
}
@Override
public void propertyChanged(VirtualFilePropertyEvent e) {
if (VirtualFile.PROP_NAME.equals(e.getPropertyName())) {
assertDispatchThread();
final VirtualFile file = e.getFile();
if (isFileOpen(file)) {
updateFileName(file);
updateFileIcon(file); // file type can change after renaming
updateFileBackgroundColor(file);
}
}
else if (VirtualFile.PROP_WRITABLE.equals(e.getPropertyName()) || VirtualFile.PROP_ENCODING.equals(e.getPropertyName())) {
// TODO: message bus?
updateIconAndStatusBar(e);
}
}
private void updateIconAndStatusBar(final VirtualFilePropertyEvent e) {
assertDispatchThread();
final VirtualFile file = e.getFile();
if (isFileOpen(file)) {
updateFileIcon(file);
if (file.equals(getSelectedFiles()[0])) { // update "write" status
final StatusBarEx statusBar = (StatusBarEx)WindowManager.getInstance().getStatusBar(myProject);
assert statusBar != null;
statusBar.updateWidgets();
}
}
}
@Override
public void fileMoved(VirtualFileMoveEvent e) {
final VirtualFile file = e.getFile();
final VirtualFile[] openFiles = getOpenFiles();
for (final VirtualFile openFile : openFiles) {
if (VfsUtilCore.isAncestor(file, openFile, false)) {
updateFileName(openFile);
updateFileBackgroundColor(openFile);
}
}
}
}
private static boolean moveFocusOnDelete() {
final Window window = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusedWindow();
if (window != null) {
final Component component = FocusTrackback.getFocusFor(window);
if (component != null) {
return component instanceof EditorComponentImpl;
}
return window instanceof IdeFrameImpl;
}
return true;
}
@Override
public boolean isInsideChange() {
return getSplitters().isInsideChange();
}
private final class MyEditorPropertyChangeListener implements PropertyChangeListener {
@Override
public void propertyChange(final PropertyChangeEvent e) {
assertDispatchThread();
final String propertyName = e.getPropertyName();
if (FileEditor.PROP_MODIFIED.equals(propertyName)) {
final FileEditor editor = (FileEditor)e.getSource();
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
updateFileIcon(composite.getFile());
}
}
else if (FileEditor.PROP_VALID.equals(propertyName)) {
final boolean valid = ((Boolean)e.getNewValue()).booleanValue();
if (!valid) {
final FileEditor editor = (FileEditor)e.getSource();
LOG.assertTrue(editor != null);
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
closeFile(composite.getFile());
}
}
}
}
}
/**
* Gets events from VCS and updates color of myEditor tabs
*/
private final class MyFileStatusListener implements FileStatusListener {
@Override
public void fileStatusesChanged() { // update color of all open files
assertDispatchThread();
LOG.debug("FileEditorManagerImpl.MyFileStatusListener.fileStatusesChanged()");
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
LOG.assertTrue(file != null);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (LOG.isDebugEnabled()) {
LOG.debug("updating file status in tab for " + file.getPath());
}
updateFileStatus(file);
}
}, ModalityState.NON_MODAL, myProject.getDisposed());
}
}
@Override
public void fileStatusChanged(@NotNull final VirtualFile file) { // update color of the file (if necessary)
assertDispatchThread();
if (isFileOpen(file)) {
updateFileStatus(file);
}
}
private void updateFileStatus(final VirtualFile file) {
updateFileColor(file);
updateFileIcon(file);
}
}
/**
* Gets events from FileTypeManager and updates icons on tabs
*/
private final class MyFileTypeListener extends FileTypeListener.Adapter {
@Override
public void fileTypesChanged(final FileTypeEvent event) {
assertDispatchThread();
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
LOG.assertTrue(file != null);
updateFileIcon(file);
}
}
}
private class MyRootsListener extends ModuleRootAdapter {
@Override
public void rootsChanged(ModuleRootEvent event) {
EditorFileSwapper[] swappers = Extensions.getExtensions(EditorFileSwapper.EP_NAME);
for (EditorWindow eachWindow : getWindows()) {
EditorWithProviderComposite selected = eachWindow.getSelectedEditor();
EditorWithProviderComposite[] editors = eachWindow.getEditors();
for (int i = 0; i < editors.length; i++) {
EditorWithProviderComposite editor = editors[i];
VirtualFile file = editor.getFile();
if (!file.isValid()) continue;
Pair<VirtualFile, Integer> newFilePair = null;
for (EditorFileSwapper each : swappers) {
newFilePair = each.getFileToSwapTo(myProject, editor);
if (newFilePair != null) break;
}
if (newFilePair == null) continue;
VirtualFile newFile = newFilePair.first;
if (newFile == null) continue;
// already open
if (eachWindow.findFileIndex(newFile) != -1) continue;
try {
newFile.putUserData(EditorWindow.INITIAL_INDEX_KEY, i);
Pair<FileEditor[], FileEditorProvider[]> pair = openFileImpl2(eachWindow, newFile, editor == selected);
if (newFilePair.second != null) {
TextEditorImpl openedEditor = EditorFileSwapper.findSinglePsiAwareEditor(pair.first);
if (openedEditor != null) {
openedEditor.getEditor().getCaretModel().moveToOffset(newFilePair.second);
openedEditor.getEditor().getScrollingModel().scrollToCaret(ScrollType.CENTER);
}
}
}
finally {
newFile.putUserData(EditorWindow.INITIAL_INDEX_KEY, null);
}
closeFile(file, eachWindow);
}
}
}
}
/**
* Gets notifications from UISetting component to track changes of RECENT_FILES_LIMIT
* and EDITOR_TAB_LIMIT, etc values.
*/
private final class MyUISettingsListener implements UISettingsListener {
@Override
public void uiSettingsChanged(final UISettings source) {
assertDispatchThread();
setTabsMode(source.EDITOR_TAB_PLACEMENT != UISettings.TABS_NONE && !UISettings.getInstance().PRESENTATION_MODE);
for (EditorsSplitters each : getAllSplitters()) {
each.setTabsPlacement(source.EDITOR_TAB_PLACEMENT);
each.trimToSize(source.EDITOR_TAB_LIMIT);
// Tab layout policy
if (source.SCROLL_TAB_LAYOUT_IN_EDITOR) {
each.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT);
}
else {
each.setTabLayoutPolicy(JTabbedPane.WRAP_TAB_LAYOUT);
}
}
// "Mark modified files with asterisk"
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
updateFileIcon(file);
updateFileName(file);
updateFileBackgroundColor(file);
}
}
}
@Override
public void closeAllFiles() {
final VirtualFile[] openFiles = getSplitters().getOpenFiles();
for (VirtualFile openFile : openFiles) {
closeFile(openFile);
}
}
@Override
@NotNull
public VirtualFile[] getSiblings(@NotNull VirtualFile file) {
return getOpenFiles();
}
protected void queueUpdateFile(final VirtualFile file) {
myQueue.queue(new Update(file) {
@Override
public void run() {
if (isFileOpen(file)) {
updateFileIcon(file);
updateFileColor(file);
updateFileBackgroundColor(file);
}
}
});
}
@Override
public EditorsSplitters getSplittersFor(Component c) {
EditorsSplitters splitters = null;
DockContainer dockContainer = myDockManager.getContainerFor(c);
if (dockContainer instanceof DockableEditorTabbedContainer) {
splitters = ((DockableEditorTabbedContainer)dockContainer).getSplitters();
}
if (splitters == null) {
splitters = getMainSplitters();
}
return splitters;
}
public List<Pair<VirtualFile, EditorWindow>> getSelectionHistory() {
List<Pair<VirtualFile, EditorWindow>> copy = new ArrayList<Pair<VirtualFile, EditorWindow>>();
for (Pair<VirtualFile, EditorWindow> pair : mySelectionHistory) {
if (pair.second.getFiles().length == 0) {
final EditorWindow[] windows = pair.second.getOwner().getWindows();
if (windows.length > 0 && windows[0] != null && windows[0].getFiles().length > 0) {
final Pair<VirtualFile, EditorWindow> p = Pair.create(pair.first, windows[0]);
if (!copy.contains(p)) {
copy.add(p);
}
}
} else {
if (!copy.contains(pair)) {
copy.add(pair);
}
}
}
mySelectionHistory.clear();
mySelectionHistory.addAll(copy);
return mySelectionHistory;
}
public void addSelectionRecord(VirtualFile file, EditorWindow window) {
final Pair<VirtualFile, EditorWindow> record = Pair.create(file, window);
mySelectionHistory.remove(record);
mySelectionHistory.add(0, record);
}
public void removeSelectionRecord(VirtualFile file, EditorWindow window) {
mySelectionHistory.remove(Pair.create(file, window));
}
@Override
public ActionCallback getReady(@NotNull Object requestor) {
return myBusyObject.getReady(requestor);
}
}
|
platform/platform-impl/src/com/intellij/openapi/fileEditor/impl/FileEditorManagerImpl.java
|
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.ProjectTopics;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.PluginManager;
import com.intellij.ide.ui.UISettings;
import com.intellij.ide.ui.UISettingsListener;
import com.intellij.injected.editor.VirtualFileWindow;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.components.ProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.impl.EditorComponentImpl;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.ex.FileEditorProviderManager;
import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory;
import com.intellij.openapi.fileEditor.impl.text.TextEditorImpl;
import com.intellij.openapi.fileEditor.impl.text.TextEditorProvider;
import com.intellij.openapi.fileTypes.FileTypeEvent;
import com.intellij.openapi.fileTypes.FileTypeListener;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.DumbAwareRunnable;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.PossiblyDumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.impl.ProjectImpl;
import com.intellij.openapi.roots.ModuleRootAdapter;
import com.intellij.openapi.roots.ModuleRootEvent;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.openapi.vcs.FileStatusListener;
import com.intellij.openapi.vcs.FileStatusManager;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.WindowManager;
import com.intellij.openapi.wm.ex.StatusBarEx;
import com.intellij.openapi.wm.impl.IdeFrameImpl;
import com.intellij.ui.FocusTrackback;
import com.intellij.ui.docking.DockContainer;
import com.intellij.ui.docking.DockManager;
import com.intellij.ui.docking.impl.DockManagerImpl;
import com.intellij.ui.tabs.impl.JBTabsImpl;
import com.intellij.util.Function;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.messages.impl.MessageListenerList;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.List;
/**
* @author Anton Katilin
* @author Eugene Belyaev
* @author Vladimir Kondratyev
*/
public class FileEditorManagerImpl extends FileEditorManagerEx implements ProjectComponent, JDOMExternalizable {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.FileEditorManagerImpl");
private static final Key<LocalFileSystem.WatchRequest> WATCH_REQUEST_KEY = Key.create("WATCH_REQUEST_KEY");
private static final Key<Boolean> DUMB_AWARE = Key.create("DUMB_AWARE");
private static final FileEditor[] EMPTY_EDITOR_ARRAY = {};
private static final FileEditorProvider[] EMPTY_PROVIDER_ARRAY = {};
public static final Key<Boolean> CLOSING_TO_REOPEN = Key.create("CLOSING_TO_REOPEN");
public static final String FILE_EDITOR_MANAGER = "FileEditorManager";
private volatile JPanel myPanels;
private EditorsSplitters mySplitters;
private final Project myProject;
private final List<Pair<VirtualFile, EditorWindow>> mySelectionHistory = new ArrayList<Pair<VirtualFile, EditorWindow>>();
private WeakReference<EditorComposite> myLastSelectedComposite = new WeakReference<EditorComposite>(null);
private final MergingUpdateQueue myQueue = new MergingUpdateQueue("FileEditorManagerUpdateQueue", 50, true, null);
private final BusyObject.Impl.Simple myBusyObject = new BusyObject.Impl.Simple();
/**
* Removes invalid myEditor and updates "modified" status.
*/
private final MyEditorPropertyChangeListener myEditorPropertyChangeListener = new MyEditorPropertyChangeListener();
private final DockManager myDockManager;
private DockableEditorContainerFactory myContentFactory;
public FileEditorManagerImpl(final Project project, DockManager dockManager) {
/* ApplicationManager.getApplication().assertIsDispatchThread(); */
myProject = project;
myDockManager = dockManager;
myListenerList =
new MessageListenerList<FileEditorManagerListener>(myProject.getMessageBus(), FileEditorManagerListener.FILE_EDITOR_MANAGER);
if (Extensions.getExtensions(FileEditorAssociateFinder.EP_NAME).length > 0) {
myListenerList.add(new FileEditorManagerAdapter() {
@Override
public void selectionChanged(@NotNull FileEditorManagerEvent event) {
EditorsSplitters splitters = getSplitters();
openAssociatedFile(event.getNewFile(), splitters.getCurrentWindow(), splitters);
}
});
}
myQueue.setTrackUiActivity(true);
}
void initDockableContentFactory() {
if (myContentFactory != null) return;
myContentFactory = new DockableEditorContainerFactory(myProject, this, myDockManager);
myDockManager.register(DockableEditorContainerFactory.TYPE, myContentFactory);
Disposer.register(myProject, myContentFactory);
}
public static boolean isDumbAware(FileEditor editor) {
return Boolean.TRUE.equals(editor.getUserData(DUMB_AWARE)) &&
(!(editor instanceof PossiblyDumbAware) || ((PossiblyDumbAware)editor).isDumbAware());
}
//-------------------------------------------------------------------------------
@Override
public JComponent getComponent() {
initUI();
return myPanels;
}
@NotNull
public EditorsSplitters getMainSplitters() {
initUI();
return mySplitters;
}
public Set<EditorsSplitters> getAllSplitters() {
HashSet<EditorsSplitters> all = new HashSet<EditorsSplitters>();
all.add(getMainSplitters());
Set<DockContainer> dockContainers = myDockManager.getContainers();
for (DockContainer each : dockContainers) {
if (each instanceof DockableEditorTabbedContainer) {
all.add(((DockableEditorTabbedContainer)each).getSplitters());
}
}
return Collections.unmodifiableSet(all);
}
private AsyncResult<EditorsSplitters> getActiveSplitters(boolean syncUsage) {
final boolean async = Registry.is("ide.windowSystem.asyncSplitters") && !syncUsage;
final AsyncResult<EditorsSplitters> result = new AsyncResult<EditorsSplitters>();
final IdeFocusManager fm = IdeFocusManager.getInstance(myProject);
Runnable run = new Runnable() {
@Override
public void run() {
if (myProject.isDisposed()) {
result.setRejected();
return;
}
Component focusOwner = fm.getFocusOwner();
if (focusOwner == null && !async) {
focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
}
if (focusOwner == null && !async) {
focusOwner = fm.getLastFocusedFor(fm.getLastFocusedFrame());
}
DockContainer container = myDockManager.getContainerFor(focusOwner);
if (container == null && !async) {
focusOwner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
container = myDockManager.getContainerFor(focusOwner);
}
if (container instanceof DockableEditorTabbedContainer) {
result.setDone(((DockableEditorTabbedContainer)container).getSplitters());
}
else {
result.setDone(getMainSplitters());
}
}
};
if (async) {
fm.doWhenFocusSettlesDown(run);
}
else {
run.run();
}
return result;
}
private final Object myInitLock = new Object();
private void initUI() {
if (myPanels == null) {
synchronized (myInitLock) {
if (myPanels == null) {
myPanels = new JPanel(new BorderLayout());
myPanels.setOpaque(false);
myPanels.setBorder(new MyBorder());
mySplitters = new EditorsSplitters(this, myDockManager, true);
myPanels.add(mySplitters, BorderLayout.CENTER);
}
}
}
}
private static class MyBorder implements Border {
@Override
public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
if (UIUtil.isUnderAquaLookAndFeel()) {
g.setColor(JBTabsImpl.MAC_AQUA_BG_COLOR);
final Insets insets = getBorderInsets(c);
if (insets.top > 0) {
g.fillRect(x, y, width, height + insets.top);
}
}
}
@Override
public Insets getBorderInsets(Component c) {
return JBInsets.NONE;
}
@Override
public boolean isBorderOpaque() {
return false;
}
}
@Override
public JComponent getPreferredFocusedComponent() {
assertReadAccess();
final EditorWindow window = getSplitters().getCurrentWindow();
if (window != null) {
final EditorWithProviderComposite editor = window.getSelectedEditor();
if (editor != null) {
return editor.getPreferredFocusedComponent();
}
}
return null;
}
//-------------------------------------------------------
/**
* @return color of the <code>file</code> which corresponds to the
* file's status
*/
public Color getFileColor(@NotNull final VirtualFile file) {
final FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
Color statusColor = fileStatusManager != null ? fileStatusManager.getStatus(file).getColor() : UIUtil.getLabelForeground();
if (statusColor == null) statusColor = UIUtil.getLabelForeground();
return statusColor;
}
public boolean isProblem(@NotNull final VirtualFile file) {
return false;
}
public String getFileTooltipText(VirtualFile file) {
return FileUtil.getLocationRelativeToUserHome(file.getPresentableUrl());
}
@Override
public void updateFilePresentation(@NotNull VirtualFile file) {
if (!isFileOpen(file)) return;
updateFileColor(file);
updateFileIcon(file);
updateFileName(file);
updateFileBackgroundColor(file);
}
/**
* Updates tab color for the specified <code>file</code>. The <code>file</code>
* should be opened in the myEditor, otherwise the method throws an assertion.
*/
private void updateFileColor(final VirtualFile file) {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileColor(file);
}
}
private void updateFileBackgroundColor(final VirtualFile file) {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileBackgroundColor(file);
}
}
/**
* Updates tab icon for the specified <code>file</code>. The <code>file</code>
* should be opened in the myEditor, otherwise the method throws an assertion.
*/
protected void updateFileIcon(final VirtualFile file) {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileIcon(file);
}
}
/**
* Updates tab title and tab tool tip for the specified <code>file</code>
*/
void updateFileName(@Nullable final VirtualFile file) {
// Queue here is to prevent title flickering when tab is being closed and two events arriving: with component==null and component==next focused tab
// only the last event makes sense to handle
myQueue.queue(new Update("UpdateFileName " + (file == null ? "" : file.getPath())) {
@Override
public boolean isExpired() {
return myProject.isDisposed() || !myProject.isOpen() || (file == null ? super.isExpired() : !file.isValid());
}
@Override
public void run() {
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
each.updateFileName(file);
}
}
});
}
//-------------------------------------------------------
@Override
public VirtualFile getFile(@NotNull final FileEditor editor) {
final EditorComposite editorComposite = getEditorComposite(editor);
if (editorComposite != null) {
return editorComposite.getFile();
}
return null;
}
@Override
public void unsplitWindow() {
final EditorWindow currentWindow = getActiveSplitters(true).getResult().getCurrentWindow();
if (currentWindow != null) {
currentWindow.unsplit(true);
}
}
@Override
public void unsplitAllWindow() {
final EditorWindow currentWindow = getActiveSplitters(true).getResult().getCurrentWindow();
if (currentWindow != null) {
currentWindow.unsplitAll();
}
}
@Override
public int getWindowSplitCount() {
return getActiveSplitters(true).getResult().getSplitCount();
}
@Override
public boolean hasSplitOrUndockedWindows() {
Set<EditorsSplitters> splitters = getAllSplitters();
if (splitters.size() > 1) return true;
return getWindowSplitCount() > 1;
}
@Override
@NotNull
public EditorWindow[] getWindows() {
ArrayList<EditorWindow> windows = new ArrayList<EditorWindow>();
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
EditorWindow[] eachList = each.getWindows();
windows.addAll(Arrays.asList(eachList));
}
return windows.toArray(new EditorWindow[windows.size()]);
}
@Override
public EditorWindow getNextWindow(@NotNull final EditorWindow window) {
final EditorWindow[] windows = getSplitters().getOrderedWindows();
for (int i = 0; i != windows.length; ++i) {
if (windows[i].equals(window)) {
return windows[(i + 1) % windows.length];
}
}
LOG.error("Not window found");
return null;
}
@Override
public EditorWindow getPrevWindow(@NotNull final EditorWindow window) {
final EditorWindow[] windows = getSplitters().getOrderedWindows();
for (int i = 0; i != windows.length; ++i) {
if (windows[i].equals(window)) {
return windows[(i + windows.length - 1) % windows.length];
}
}
LOG.error("Not window found");
return null;
}
@Override
public void createSplitter(final int orientation, @Nullable final EditorWindow window) {
// window was available from action event, for example when invoked from the tab menu of an editor that is not the 'current'
if (window != null) {
window.split(orientation, true, null, false);
}
// otherwise we'll split the current window, if any
else {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
if (currentWindow != null) {
currentWindow.split(orientation, true, null, false);
}
}
}
@Override
public void changeSplitterOrientation() {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
if (currentWindow != null) {
currentWindow.changeOrientation();
}
}
@Override
public void flipTabs() {
/*
if (myTabs == null) {
myTabs = new EditorTabs (this, UISettings.getInstance().EDITOR_TAB_PLACEMENT);
remove (mySplitters);
add (myTabs, BorderLayout.CENTER);
initTabs ();
} else {
remove (myTabs);
add (mySplitters, BorderLayout.CENTER);
myTabs.dispose ();
myTabs = null;
}
*/
myPanels.revalidate();
}
@Override
public boolean tabsMode() {
return false;
}
private void setTabsMode(final boolean mode) {
if (tabsMode() != mode) {
flipTabs();
}
//LOG.assertTrue (tabsMode () == mode);
}
@Override
public boolean isInSplitter() {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
return currentWindow != null && currentWindow.inSplitter();
}
@Override
public boolean hasOpenedFile() {
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
return currentWindow != null && currentWindow.getSelectedEditor() != null;
}
@Override
public VirtualFile getCurrentFile() {
return getActiveSplitters(true).getResult().getCurrentFile();
}
@Override
@NotNull
public AsyncResult<EditorWindow> getActiveWindow() {
return _getActiveWindow(false);
}
@NotNull
private AsyncResult<EditorWindow> _getActiveWindow(boolean now) {
return getActiveSplitters(now).subResult(new Function<EditorsSplitters, EditorWindow>() {
@Override
public EditorWindow fun(EditorsSplitters splitters) {
return splitters.getCurrentWindow();
}
});
}
@Override
public EditorWindow getCurrentWindow() {
return _getActiveWindow(true).getResult();
}
@Override
public void setCurrentWindow(final EditorWindow window) {
getActiveSplitters(true).getResult().setCurrentWindow(window, true);
}
public void closeFile(@NotNull final VirtualFile file, @NotNull final EditorWindow window, final boolean transferFocus) {
assertDispatchThread();
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
if (window.isFileOpen(file)) {
window.closeFile(file, true, transferFocus);
final List<EditorWindow> windows = window.getOwner().findWindows(file);
if (windows.isEmpty()) { // no more windows containing this file left
final LocalFileSystem.WatchRequest request = file.getUserData(WATCH_REQUEST_KEY);
if (request != null) {
LocalFileSystem.getInstance().removeWatchedRoot(request);
}
}
}
}
}, IdeBundle.message("command.close.active.editor"), null);
removeSelectionRecord(file, window);
}
@Override
public void closeFile(@NotNull final VirtualFile file, @NotNull final EditorWindow window) {
closeFile(file, window, true);
}
//============================= EditorManager methods ================================
@Override
public void closeFile(@NotNull final VirtualFile file) {
closeFile(file, true, false);
}
public void closeFile(@NotNull final VirtualFile file, final boolean moveFocus, final boolean closeAllCopies) {
assertDispatchThread();
final LocalFileSystem.WatchRequest request = file.getUserData(WATCH_REQUEST_KEY);
if (request != null) {
LocalFileSystem.getInstance().removeWatchedRoot(request);
}
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
closeFileImpl(file, moveFocus, closeAllCopies);
}
}, "", null);
}
private void closeFileImpl(@NotNull final VirtualFile file, final boolean moveFocus, boolean closeAllCopies) {
assertDispatchThread();
runChange(new FileEditorManagerChange() {
@Override
public void run(EditorsSplitters splitters) {
splitters.closeFile(file, moveFocus);
}
}, closeAllCopies ? null : getActiveSplitters(true).getResult());
}
//-------------------------------------- Open File ----------------------------------------
@Override
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> openFileWithProviders(@NotNull final VirtualFile file,
final boolean focusEditor,
boolean searchForSplitter) {
if (!file.isValid()) {
throw new IllegalArgumentException("file is not valid: " + file);
}
assertDispatchThread();
if (isOpenInNewWindow(EventQueue.getCurrentEvent())) {
return openFileInNewWindow(file);
}
EditorWindow wndToOpenIn = null;
if (searchForSplitter) {
Set<EditorsSplitters> all = getAllSplitters();
EditorsSplitters active = getActiveSplitters(true).getResult();
if (active.getCurrentWindow() != null && active.getCurrentWindow().isFileOpen(file)) {
wndToOpenIn = active.getCurrentWindow();
} else {
for (EditorsSplitters splitters : all) {
final EditorWindow window = splitters.getCurrentWindow();
if (window == null) continue;
if (window.isFileOpen(file)) {
wndToOpenIn = window;
break;
}
}
}
}
else {
wndToOpenIn = getSplitters().getCurrentWindow();
}
EditorsSplitters splitters = getSplitters();
if (wndToOpenIn == null) {
wndToOpenIn = splitters.getOrCreateCurrentWindow(file);
}
openAssociatedFile(file, wndToOpenIn, splitters);
return openFileImpl2(wndToOpenIn, file, focusEditor);
}
public Pair<FileEditor[], FileEditorProvider[]> openFileInNewWindow(VirtualFile file) {
return ((DockManagerImpl)DockManager.getInstance(getProject())).createNewDockContainerFor(file, this);
}
private static boolean isOpenInNewWindow(AWTEvent event) {
// Shift was used while clicking
if (event instanceof MouseEvent && ((MouseEvent)event).isShiftDown()) {
return true;
}
// Shift + Enter
if (event instanceof KeyEvent
&& ((KeyEvent)event).getKeyCode() == KeyEvent.VK_ENTER
&& ((KeyEvent)event).isShiftDown()) {
return true;
}
return false;
}
private void openAssociatedFile(VirtualFile file, EditorWindow wndToOpenIn, EditorsSplitters splitters) {
EditorWindow[] windows = splitters.getWindows();
if (file != null && windows.length == 2) {
for (FileEditorAssociateFinder finder : Extensions.getExtensions(FileEditorAssociateFinder.EP_NAME)) {
VirtualFile associatedFile = finder.getAssociatedFileToOpen(myProject, file);
if (associatedFile != null) {
EditorWindow currentWindow = splitters.getCurrentWindow();
int idx = windows[0] == wndToOpenIn ? 1 : 0;
openFileImpl2(windows[idx], associatedFile, false);
if (currentWindow != null) {
splitters.setCurrentWindow(currentWindow, false);
}
break;
}
}
}
}
@NotNull
@Override
public Pair<FileEditor[], FileEditorProvider[]> openFileWithProviders(@NotNull VirtualFile file,
boolean focusEditor,
@NotNull EditorWindow window) {
if (!file.isValid()) {
throw new IllegalArgumentException("file is not valid: " + file);
}
assertDispatchThread();
return openFileImpl2(window, file, focusEditor);
}
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> openFileImpl2(@NotNull final EditorWindow window,
@NotNull final VirtualFile file,
final boolean focusEditor) {
final Ref<Pair<FileEditor[], FileEditorProvider[]>> result = new Ref<Pair<FileEditor[], FileEditorProvider[]>>();
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
result.set(openFileImpl3(window, file, focusEditor, null, true));
}
}, "", null);
return result.get();
}
/**
* @param file to be opened. Unlike openFile method, file can be
* invalid. For example, all file were invalidate and they are being
* removed one by one. If we have removed one invalid file, then another
* invalid file become selected. That's why we do not require that
* passed file is valid.
* @param entry map between FileEditorProvider and FileEditorState. If this parameter
*/
@NotNull
Pair<FileEditor[], FileEditorProvider[]> openFileImpl3(@NotNull final EditorWindow window,
@NotNull final VirtualFile file,
final boolean focusEditor,
@Nullable final HistoryEntry entry,
boolean current) {
return openFileImpl4(window, file, focusEditor, entry, current, -1);
}
@NotNull
Pair<FileEditor[], FileEditorProvider[]> openFileImpl4(@NotNull final EditorWindow window,
@NotNull final VirtualFile file,
final boolean focusEditor,
@Nullable final HistoryEntry entry,
boolean current,
int index) {
// Open file
FileEditor[] editors;
FileEditorProvider[] providers;
final EditorWithProviderComposite newSelectedComposite;
boolean newEditorCreated = false;
final boolean open = window.isFileOpen(file);
if (open) {
// File is already opened. In this case we have to just select existing EditorComposite
newSelectedComposite = window.findFileComposite(file);
LOG.assertTrue(newSelectedComposite != null);
editors = newSelectedComposite.getEditors();
providers = newSelectedComposite.getProviders();
}
else {
if (UISettings.getInstance().EDITOR_TAB_PLACEMENT == UISettings.TABS_NONE || UISettings.getInstance().PRESENTATION_MODE) {
for (EditorWithProviderComposite composite : window.getEditors()) {
Disposer.dispose(composite);
}
}
// File is not opened yet. In this case we have to create editors
// and select the created EditorComposite.
final FileEditorProviderManager editorProviderManager = FileEditorProviderManager.getInstance();
providers = editorProviderManager.getProviders(myProject, file);
if (DumbService.getInstance(myProject).isDumb()) {
final List<FileEditorProvider> dumbAware = ContainerUtil.findAll(providers, new Condition<FileEditorProvider>() {
@Override
public boolean value(FileEditorProvider fileEditorProvider) {
return DumbService.isDumbAware(fileEditorProvider);
}
});
providers = dumbAware.toArray(new FileEditorProvider[dumbAware.size()]);
}
if (providers.length == 0) {
return Pair.create(EMPTY_EDITOR_ARRAY, EMPTY_PROVIDER_ARRAY);
}
newEditorCreated = true;
getProject().getMessageBus().syncPublisher(FileEditorManagerListener.Before.FILE_EDITOR_MANAGER).beforeFileOpened(this, file);
editors = new FileEditor[providers.length];
for (int i = 0; i < providers.length; i++) {
try {
final FileEditorProvider provider = providers[i];
LOG.assertTrue(provider != null, "Provider for file "+file+" is null. All providers: "+Arrays.asList(providers));
LOG.assertTrue(provider.accept(myProject, file), "Provider " + provider + " doesn't accept file " + file);
final FileEditor editor = provider.createEditor(myProject, file);
LOG.assertTrue(editor != null);
LOG.assertTrue(editor.isValid());
editors[i] = editor;
// Register PropertyChangeListener into editor
editor.addPropertyChangeListener(myEditorPropertyChangeListener);
editor.putUserData(DUMB_AWARE, DumbService.isDumbAware(provider));
if (current && editor instanceof TextEditorImpl) {
((TextEditorImpl)editor).initFolding();
}
}
catch (Exception e) {
LOG.error(e);
}
catch (AssertionError e) {
LOG.error(e);
}
}
// Now we have to create EditorComposite and insert it into the TabbedEditorComponent.
// After that we have to select opened editor.
newSelectedComposite = new EditorWithProviderComposite(file, editors, providers, this);
if (index >= 0) {
newSelectedComposite.getFile().putUserData(EditorWindow.INITIAL_INDEX_KEY, index);
}
}
window.setEditor(newSelectedComposite, focusEditor);
final EditorHistoryManager editorHistoryManager = EditorHistoryManager.getInstance(myProject);
for (int i = 0; i < editors.length; i++) {
final FileEditor editor = editors[i];
if (editor instanceof TextEditor) {
// hack!!!
// This code prevents "jumping" on next repaint.
((EditorEx)((TextEditor)editor).getEditor()).stopOptimizedScrolling();
}
final FileEditorProvider provider = providers[i];//getProvider(editor);
// Restore editor state
FileEditorState state = null;
if (entry != null) {
state = entry.getState(provider);
}
if (state == null && !open) {
// We have to try to get state from the history only in case
// if editor is not opened. Otherwise history entry might have a state
// out of sync with the current editor state.
state = editorHistoryManager.getState(file, provider);
}
if (state != null) {
if (!isDumbAware(editor)) {
final FileEditorState finalState = state;
DumbService.getInstance(getProject()).runWhenSmart(new Runnable() {
@Override
public void run() {
editor.setState(finalState);
}
});
}
else {
editor.setState(state);
}
}
}
// Restore selected editor
final FileEditorProvider[] _providers = newSelectedComposite.getProviders();
final FileEditorProvider selectedProvider;
if (entry == null) {
selectedProvider = ((FileEditorProviderManagerImpl)FileEditorProviderManager.getInstance())
.getSelectedFileEditorProvider(editorHistoryManager, file, _providers);
}
else {
selectedProvider = entry.mySelectedProvider;
}
if (selectedProvider != null) {
final FileEditor[] _editors = newSelectedComposite.getEditors();
for (int i = _editors.length - 1; i >= 0; i--) {
final FileEditorProvider provider = _providers[i];//getProvider(_editors[i]);
if (provider.equals(selectedProvider)) {
newSelectedComposite.setSelectedEditor(i);
break;
}
}
}
// Notify editors about selection changes
window.getOwner().setCurrentWindow(window, focusEditor);
window.getOwner().afterFileOpen(file);
UIUtil.invokeLaterIfNeeded(new Runnable() {
@Override
public void run() {
newSelectedComposite.getSelectedEditor().selectNotify();
}
});
final IdeFocusManager focusManager = IdeFocusManager.getInstance(myProject);
if (newEditorCreated) {
if (window.isShowing()) {
window.setPaintBlocked(true);
}
notifyPublisher(new Runnable() {
@Override
public void run() {
window.setPaintBlocked(false);
if (isFileOpen(file)) {
getProject().getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER)
.fileOpened(FileEditorManagerImpl.this, file);
}
}
});
//Add request to watch this editor's virtual file
final VirtualFile parentDir = file.getParent();
if (parentDir != null) {
final LocalFileSystem.WatchRequest request = LocalFileSystem.getInstance().addRootToWatch(parentDir.getPath(), false);
file.putUserData(WATCH_REQUEST_KEY, request);
}
}
//[jeka] this is a hack to support back-forward navigation
// previously here was incorrect call to fireSelectionChanged() with a side-effect
((IdeDocumentHistoryImpl)IdeDocumentHistory.getInstance(myProject)).onSelectionChanged();
// Transfer focus into editor
if (!ApplicationManagerEx.getApplicationEx().isUnitTestMode()) {
if (focusEditor) {
//myFirstIsActive = myTabbedContainer1.equals(tabbedContainer);
window.setAsCurrentWindow(true);
ToolWindowManager.getInstance(myProject).activateEditorComponent();
focusManager.toFront(window.getOwner());
}
}
// Update frame and tab title
updateFileName(file);
// Make back/forward work
IdeDocumentHistory.getInstance(myProject).includeCurrentCommandAsNavigation();
return Pair.create(editors, providers);
}
@NotNull
@Override
public ActionCallback notifyPublisher(@NotNull final Runnable runnable) {
final IdeFocusManager focusManager = IdeFocusManager.getInstance(myProject);
final ActionCallback done = new ActionCallback();
return myBusyObject.execute(new ActiveRunnable() {
@NotNull
@Override
public ActionCallback run() {
focusManager.doWhenFocusSettlesDown(new ExpirableRunnable.ForProject(myProject) {
@Override
public void run() {
runnable.run();
done.setDone();
}
});
return done;
}
});
}
@Override
public void setSelectedEditor(@NotNull VirtualFile file, String fileEditorProviderId) {
EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite == null) {
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
if (composites.isEmpty()) return;
composite = composites.get(0);
}
final FileEditorProvider[] editorProviders = composite.getProviders();
final FileEditorProvider selectedProvider = composite.getSelectedEditorWithProvider().getSecond();
for (int i = 0; i < editorProviders.length; i++) {
if (editorProviders[i].getEditorTypeId().equals(fileEditorProviderId) && !selectedProvider.equals(editorProviders[i])) {
composite.setSelectedEditor(i);
composite.getSelectedEditor().selectNotify();
}
}
}
@Nullable
EditorWithProviderComposite newEditorComposite(final VirtualFile file) {
if (file == null) {
return null;
}
final FileEditorProviderManager editorProviderManager = FileEditorProviderManager.getInstance();
final FileEditorProvider[] providers = editorProviderManager.getProviders(myProject, file);
final FileEditor[] editors = new FileEditor[providers.length];
for (int i = 0; i < providers.length; i++) {
final FileEditorProvider provider = providers[i];
LOG.assertTrue(provider != null);
LOG.assertTrue(provider.accept(myProject, file));
final FileEditor editor = provider.createEditor(myProject, file);
editors[i] = editor;
LOG.assertTrue(editor.isValid());
editor.addPropertyChangeListener(myEditorPropertyChangeListener);
}
final EditorWithProviderComposite newComposite = new EditorWithProviderComposite(file, editors, providers, this);
final EditorHistoryManager editorHistoryManager = EditorHistoryManager.getInstance(myProject);
for (int i = 0; i < editors.length; i++) {
final FileEditor editor = editors[i];
if (editor instanceof TextEditor) {
// hack!!!
// This code prevents "jumping" on next repaint.
//((EditorEx)((TextEditor)editor).getEditor()).stopOptimizedScrolling();
}
final FileEditorProvider provider = providers[i];
// Restore myEditor state
FileEditorState state = editorHistoryManager.getState(file, provider);
if (state != null) {
editor.setState(state);
}
}
return newComposite;
}
@Override
@NotNull
public List<FileEditor> openEditor(@NotNull final OpenFileDescriptor descriptor, final boolean focusEditor) {
assertDispatchThread();
if (descriptor.getFile() instanceof VirtualFileWindow) {
VirtualFileWindow delegate = (VirtualFileWindow)descriptor.getFile();
int hostOffset = delegate.getDocumentWindow().injectedToHost(descriptor.getOffset());
OpenFileDescriptor realDescriptor = new OpenFileDescriptor(descriptor.getProject(), delegate.getDelegate(), hostOffset);
realDescriptor.setUseCurrentWindow(descriptor.isUseCurrentWindow());
return openEditor(realDescriptor, focusEditor);
}
final List<FileEditor> result = new SmartList<FileEditor>();
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
VirtualFile file = descriptor.getFile();
final FileEditor[] editors = openFile(file, focusEditor, !descriptor.isUseCurrentWindow());
ContainerUtil.addAll(result, editors);
boolean navigated = false;
for (final FileEditor editor : editors) {
if (editor instanceof NavigatableFileEditor &&
getSelectedEditor(descriptor.getFile()) == editor) { // try to navigate opened editor
navigated = navigateAndSelectEditor((NavigatableFileEditor)editor, descriptor);
if (navigated) break;
}
}
if (!navigated) {
for (final FileEditor editor : editors) {
if (editor instanceof NavigatableFileEditor && getSelectedEditor(descriptor.getFile()) != editor) { // try other editors
if (navigateAndSelectEditor((NavigatableFileEditor)editor, descriptor)) {
break;
}
}
}
}
}
}, "", null);
return result;
}
private boolean navigateAndSelectEditor(final NavigatableFileEditor editor, final OpenFileDescriptor descriptor) {
if (editor.canNavigateTo(descriptor)) {
setSelectedEditor(editor);
editor.navigateTo(descriptor);
return true;
}
return false;
}
private void setSelectedEditor(final FileEditor editor) {
final EditorWithProviderComposite composite = getEditorComposite(editor);
if (composite == null) return;
final FileEditor[] editors = composite.getEditors();
for (int i = 0; i < editors.length; i++) {
final FileEditor each = editors[i];
if (editor == each) {
composite.setSelectedEditor(i);
composite.getSelectedEditor().selectNotify();
break;
}
}
}
@Override
@NotNull
public Project getProject() {
return myProject;
}
@Override
@Nullable
public Editor openTextEditor(@NotNull final OpenFileDescriptor descriptor, final boolean focusEditor) {
final Collection<FileEditor> fileEditors = openEditor(descriptor, focusEditor);
for (FileEditor fileEditor : fileEditors) {
if (fileEditor instanceof TextEditor) {
setSelectedEditor(descriptor.getFile(), TextEditorProvider.getInstance().getEditorTypeId());
Editor editor = ((TextEditor)fileEditor).getEditor();
return getOpenedEditor(editor, focusEditor);
}
}
return null;
}
protected Editor getOpenedEditor(final Editor editor, final boolean focusEditor) {
return editor;
}
@Override
public Editor getSelectedTextEditor() {
assertReadAccess();
final EditorWindow currentWindow = getSplitters().getCurrentWindow();
if (currentWindow != null) {
final EditorWithProviderComposite selectedEditor = currentWindow.getSelectedEditor();
if (selectedEditor != null && selectedEditor.getSelectedEditor() instanceof TextEditor) {
return ((TextEditor)selectedEditor.getSelectedEditor()).getEditor();
}
}
return null;
}
@Override
public boolean isFileOpen(@NotNull final VirtualFile file) {
return !getEditorComposites(file).isEmpty();
}
@Override
@NotNull
public VirtualFile[] getOpenFiles() {
HashSet<VirtualFile> openFiles = new HashSet<VirtualFile>();
for (EditorsSplitters each : getAllSplitters()) {
openFiles.addAll(Arrays.asList(each.getOpenFiles()));
}
return VfsUtilCore.toVirtualFileArray(openFiles);
}
@Override
@NotNull
public VirtualFile[] getSelectedFiles() {
HashSet<VirtualFile> selectedFiles = new HashSet<VirtualFile>();
for (EditorsSplitters each : getAllSplitters()) {
selectedFiles.addAll(Arrays.asList(each.getSelectedFiles()));
}
return VfsUtilCore.toVirtualFileArray(selectedFiles);
}
@Override
@NotNull
public FileEditor[] getSelectedEditors() {
HashSet<FileEditor> selectedEditors = new HashSet<FileEditor>();
for (EditorsSplitters each : getAllSplitters()) {
selectedEditors.addAll(Arrays.asList(each.getSelectedEditors()));
}
return selectedEditors.toArray(new FileEditor[selectedEditors.size()]);
}
@Override
@NotNull
public EditorsSplitters getSplitters() {
EditorsSplitters active = getActiveSplitters(true).getResult();
return active == null ? getMainSplitters() : active;
}
@Override
@Nullable
public FileEditor getSelectedEditor(@NotNull final VirtualFile file) {
final Pair<FileEditor, FileEditorProvider> selectedEditorWithProvider = getSelectedEditorWithProvider(file);
return selectedEditorWithProvider == null ? null : selectedEditorWithProvider.getFirst();
}
@Override
@Nullable
public Pair<FileEditor, FileEditorProvider> getSelectedEditorWithProvider(@NotNull VirtualFile file) {
if (file instanceof VirtualFileWindow) file = ((VirtualFileWindow)file).getDelegate();
final EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite != null) {
return composite.getSelectedEditorWithProvider();
}
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
return composites.isEmpty() ? null : composites.get(0).getSelectedEditorWithProvider();
}
@Override
@NotNull
public Pair<FileEditor[], FileEditorProvider[]> getEditorsWithProviders(@NotNull final VirtualFile file) {
assertReadAccess();
final EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite != null) {
return Pair.create(composite.getEditors(), composite.getProviders());
}
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
if (!composites.isEmpty()) {
return Pair.create(composites.get(0).getEditors(), composites.get(0).getProviders());
}
else {
return Pair.create(EMPTY_EDITOR_ARRAY, EMPTY_PROVIDER_ARRAY);
}
}
@Override
@NotNull
public FileEditor[] getEditors(@NotNull VirtualFile file) {
assertReadAccess();
if (file instanceof VirtualFileWindow) file = ((VirtualFileWindow)file).getDelegate();
final EditorWithProviderComposite composite = getCurrentEditorWithProviderComposite(file);
if (composite != null) {
return composite.getEditors();
}
final List<EditorWithProviderComposite> composites = getEditorComposites(file);
if (!composites.isEmpty()) {
return composites.get(0).getEditors();
}
else {
return EMPTY_EDITOR_ARRAY;
}
}
@NotNull
@Override
public FileEditor[] getAllEditors(@NotNull VirtualFile file) {
List<EditorWithProviderComposite> editorComposites = getEditorComposites(file);
if (editorComposites.isEmpty()) return EMPTY_EDITOR_ARRAY;
List<FileEditor> editors = new ArrayList<FileEditor>();
for (EditorWithProviderComposite composite : editorComposites) {
ContainerUtil.addAll(editors, composite.getEditors());
}
return editors.toArray(new FileEditor[editors.size()]);
}
@Nullable
private EditorWithProviderComposite getCurrentEditorWithProviderComposite(@NotNull final VirtualFile virtualFile) {
final EditorWindow editorWindow = getSplitters().getCurrentWindow();
if (editorWindow != null) {
return editorWindow.findFileComposite(virtualFile);
}
return null;
}
@NotNull
public List<EditorWithProviderComposite> getEditorComposites(final VirtualFile file) {
ArrayList<EditorWithProviderComposite> result = new ArrayList<EditorWithProviderComposite>();
Set<EditorsSplitters> all = getAllSplitters();
for (EditorsSplitters each : all) {
result.addAll(each.findEditorComposites(file));
}
return result;
}
@Override
@NotNull
public FileEditor[] getAllEditors() {
assertReadAccess();
final ArrayList<FileEditor> result = new ArrayList<FileEditor>();
final Set<EditorsSplitters> allSplitters = getAllSplitters();
for (EditorsSplitters splitter : allSplitters) {
final EditorWithProviderComposite[] editorsComposites = splitter.getEditorsComposites();
for (EditorWithProviderComposite editorsComposite : editorsComposites) {
final FileEditor[] editors = editorsComposite.getEditors();
ContainerUtil.addAll(result, editors);
}
}
return result.toArray(new FileEditor[result.size()]);
}
@Override
public void showEditorAnnotation(@NotNull FileEditor editor, @NotNull JComponent annotationComponent) {
addTopComponent(editor, annotationComponent);
}
@Override
public void removeEditorAnnotation(@NotNull FileEditor editor, @NotNull JComponent annotationComponent) {
removeTopComponent(editor, annotationComponent);
}
@Override
public void addTopComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.addTopComponent(editor, component);
}
}
@Override
public void removeTopComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.removeTopComponent(editor, component);
}
}
@Override
public void addBottomComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.addBottomComponent(editor, component);
}
}
@Override
public void removeBottomComponent(@NotNull final FileEditor editor, @NotNull final JComponent component) {
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
composite.removeBottomComponent(editor, component);
}
}
private final MessageListenerList<FileEditorManagerListener> myListenerList;
@Override
public void addFileEditorManagerListener(@NotNull final FileEditorManagerListener listener) {
myListenerList.add(listener);
}
@Override
public void addFileEditorManagerListener(@NotNull final FileEditorManagerListener listener, @NotNull final Disposable parentDisposable) {
myListenerList.add(listener, parentDisposable);
}
@Override
public void removeFileEditorManagerListener(@NotNull final FileEditorManagerListener listener) {
myListenerList.remove(listener);
}
// ProjectComponent methods
@Override
public void projectOpened() {
//myFocusWatcher.install(myWindows.getComponent ());
getMainSplitters().startListeningFocus();
MessageBusConnection connection = myProject.getMessageBus().connect(myProject);
final FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
if (fileStatusManager != null) {
/**
* Updates tabs colors
*/
final MyFileStatusListener myFileStatusListener = new MyFileStatusListener();
fileStatusManager.addFileStatusListener(myFileStatusListener, myProject);
}
connection.subscribe(FileTypeManager.TOPIC, new MyFileTypeListener());
connection.subscribe(ProjectTopics.PROJECT_ROOTS, new MyRootsListener());
/**
* Updates tabs names
*/
final MyVirtualFileListener myVirtualFileListener = new MyVirtualFileListener();
VirtualFileManager.getInstance().addVirtualFileListener(myVirtualFileListener, myProject);
/**
* Extends/cuts number of opened tabs. Also updates location of tabs.
*/
final MyUISettingsListener myUISettingsListener = new MyUISettingsListener();
UISettings.getInstance().addUISettingsListener(myUISettingsListener, myProject);
StartupManager.getInstance(myProject).registerPostStartupActivity(new DumbAwareRunnable() {
@Override
public void run() {
setTabsMode(UISettings.getInstance().EDITOR_TAB_PLACEMENT != UISettings.TABS_NONE);
ToolWindowManager.getInstance(myProject).invokeLater(new Runnable() {
@Override
public void run() {
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
LaterInvocator.invokeLater(new Runnable() {
@Override
public void run() {
long currentTime = System.nanoTime();
Long startTime = myProject.getUserData(ProjectImpl.CREATION_TIME);
if (startTime != null) {
LOG.info("Project opening took " + (currentTime - startTime.longValue()) / 1000000 + " ms");
PluginManager.dumpPluginClassStatistics();
}
}
});
// group 1
}
}, "", null);
}
});
}
});
}
@Override
public void projectClosed() {
//myFocusWatcher.deinstall(myWindows.getComponent ());
getMainSplitters().dispose();
// Dispose created editors. We do not use use closeEditor method because
// it fires event and changes history.
closeAllFiles();
}
// BaseCompomemnt methods
@Override
@NotNull
public String getComponentName() {
return FILE_EDITOR_MANAGER;
}
@Override
public void initComponent() {
}
@Override
public void disposeComponent() { /* really do nothing */ }
//JDOMExternalizable methods
@Override
public void writeExternal(final Element element) {
getMainSplitters().writeExternal(element);
}
@Override
public void readExternal(final Element element) {
getMainSplitters().readExternal(element);
}
@Nullable
private EditorWithProviderComposite getEditorComposite(@NotNull final FileEditor editor) {
for (EditorsSplitters splitters : getAllSplitters()) {
final EditorWithProviderComposite[] editorsComposites = splitters.getEditorsComposites();
for (int i = editorsComposites.length - 1; i >= 0; i--) {
final EditorWithProviderComposite composite = editorsComposites[i];
final FileEditor[] editors = composite.getEditors();
for (int j = editors.length - 1; j >= 0; j--) {
final FileEditor _editor = editors[j];
LOG.assertTrue(_editor != null);
if (editor.equals(_editor)) {
return composite;
}
}
}
}
return null;
}
//======================= Misc =====================
private static void assertDispatchThread() {
ApplicationManager.getApplication().assertIsDispatchThread();
}
private static void assertReadAccess() {
ApplicationManager.getApplication().assertReadAccessAllowed();
}
public void fireSelectionChanged(final EditorComposite newSelectedComposite) {
final Trinity<VirtualFile, FileEditor, FileEditorProvider> oldData = extract(myLastSelectedComposite.get());
final Trinity<VirtualFile, FileEditor, FileEditorProvider> newData = extract(newSelectedComposite);
myLastSelectedComposite = new WeakReference<EditorComposite>(newSelectedComposite);
final boolean filesEqual = oldData.first == null ? newData.first == null : oldData.first.equals(newData.first);
final boolean editorsEqual = oldData.second == null ? newData.second == null : oldData.second.equals(newData.second);
if (!filesEqual || !editorsEqual) {
if (oldData.first != null && newData.first != null) {
for (FileEditorAssociateFinder finder : Extensions.getExtensions(FileEditorAssociateFinder.EP_NAME)) {
VirtualFile associatedFile = finder.getAssociatedFileToOpen(myProject, oldData.first);
if (Comparing.equal(associatedFile, newData.first)) {
return;
}
}
}
final FileEditorManagerEvent event =
new FileEditorManagerEvent(this, oldData.first, oldData.second, oldData.third, newData.first, newData.second, newData.third);
final FileEditorManagerListener publisher = getProject().getMessageBus().syncPublisher(FileEditorManagerListener.FILE_EDITOR_MANAGER);
if (newData.first != null) {
final JComponent component = newData.second.getComponent();
final EditorWindowHolder holder = UIUtil.getParentOfType(EditorWindowHolder.class, component);
if (holder != null) {
addSelectionRecord(newData.first, holder.getEditorWindow());
}
}
notifyPublisher(new Runnable() {
@Override
public void run() {
publisher.selectionChanged(event);
}
});
}
}
@NotNull
private static Trinity<VirtualFile, FileEditor, FileEditorProvider> extract(@Nullable EditorComposite composite) {
final VirtualFile file;
final FileEditor editor;
final FileEditorProvider provider;
if (composite == null || composite.isDisposed()) {
file = null;
editor = null;
provider = null;
}
else {
file = composite.getFile();
final Pair<FileEditor, FileEditorProvider> pair = composite.getSelectedEditorWithProvider();
editor = pair.first;
provider = pair.second;
}
return new Trinity<VirtualFile, FileEditor, FileEditorProvider>(file, editor, provider);
}
@Override
public boolean isChanged(@NotNull final EditorComposite editor) {
final FileStatusManager fileStatusManager = FileStatusManager.getInstance(myProject);
if (fileStatusManager != null) {
VirtualFile file = editor.getFile();
FileStatus status = fileStatusManager.getStatus(file);
if (status == FileStatus.UNKNOWN && !file.isWritable()) {
return false;
}
if (!status.equals(FileStatus.NOT_CHANGED)) {
return true;
}
}
return false;
}
public void disposeComposite(@NotNull EditorWithProviderComposite editor) {
if (getAllEditors().length == 0) {
setCurrentWindow(null);
}
if (editor.equals(getLastSelected())) {
editor.getSelectedEditor().deselectNotify();
getSplitters().setCurrentWindow(null, false);
}
final FileEditor[] editors = editor.getEditors();
final FileEditorProvider[] providers = editor.getProviders();
final FileEditor selectedEditor = editor.getSelectedEditor();
for (int i = editors.length - 1; i >= 0; i--) {
final FileEditor editor1 = editors[i];
final FileEditorProvider provider = providers[i];
if (!editor.equals(selectedEditor)) { // we already notified the myEditor (when fire event)
if (selectedEditor.equals(editor1)) {
editor1.deselectNotify();
}
}
editor1.removePropertyChangeListener(myEditorPropertyChangeListener);
provider.disposeEditor(editor1);
}
Disposer.dispose(editor);
}
@Nullable
EditorComposite getLastSelected() {
final EditorWindow currentWindow = getActiveSplitters(true).getResult().getCurrentWindow();
if (currentWindow != null) {
return currentWindow.getSelectedEditor();
}
return null;
}
public void runChange(FileEditorManagerChange change, EditorsSplitters splitters) {
Set<EditorsSplitters> target = new HashSet<EditorsSplitters>();
if (splitters == null) {
target.addAll(getAllSplitters());
} else {
target.add(splitters);
}
for (EditorsSplitters each : target) {
each.myInsideChange++;
try {
change.run(each);
}
finally {
each.myInsideChange--;
}
}
}
//================== Listeners =====================
/**
* Closes deleted files. Closes file which are in the deleted directories.
*/
private final class MyVirtualFileListener extends VirtualFileAdapter {
@Override
public void beforeFileDeletion(VirtualFileEvent e) {
assertDispatchThread();
boolean moveFocus = moveFocusOnDelete();
final VirtualFile file = e.getFile();
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
if (VfsUtilCore.isAncestor(file, openFiles[i], false)) {
closeFile(openFiles[i], moveFocus, true);
}
}
}
@Override
public void propertyChanged(VirtualFilePropertyEvent e) {
if (VirtualFile.PROP_NAME.equals(e.getPropertyName())) {
assertDispatchThread();
final VirtualFile file = e.getFile();
if (isFileOpen(file)) {
updateFileName(file);
updateFileIcon(file); // file type can change after renaming
updateFileBackgroundColor(file);
}
}
else if (VirtualFile.PROP_WRITABLE.equals(e.getPropertyName()) || VirtualFile.PROP_ENCODING.equals(e.getPropertyName())) {
// TODO: message bus?
updateIconAndStatusBar(e);
}
}
private void updateIconAndStatusBar(final VirtualFilePropertyEvent e) {
assertDispatchThread();
final VirtualFile file = e.getFile();
if (isFileOpen(file)) {
updateFileIcon(file);
if (file.equals(getSelectedFiles()[0])) { // update "write" status
final StatusBarEx statusBar = (StatusBarEx)WindowManager.getInstance().getStatusBar(myProject);
assert statusBar != null;
statusBar.updateWidgets();
}
}
}
@Override
public void fileMoved(VirtualFileMoveEvent e) {
final VirtualFile file = e.getFile();
final VirtualFile[] openFiles = getOpenFiles();
for (final VirtualFile openFile : openFiles) {
if (VfsUtilCore.isAncestor(file, openFile, false)) {
updateFileName(openFile);
updateFileBackgroundColor(openFile);
}
}
}
}
private static boolean moveFocusOnDelete() {
final Window window = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusedWindow();
if (window != null) {
final Component component = FocusTrackback.getFocusFor(window);
if (component != null) {
return component instanceof EditorComponentImpl;
}
return window instanceof IdeFrameImpl;
}
return true;
}
@Override
public boolean isInsideChange() {
return getSplitters().isInsideChange();
}
private final class MyEditorPropertyChangeListener implements PropertyChangeListener {
@Override
public void propertyChange(final PropertyChangeEvent e) {
assertDispatchThread();
final String propertyName = e.getPropertyName();
if (FileEditor.PROP_MODIFIED.equals(propertyName)) {
final FileEditor editor = (FileEditor)e.getSource();
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
updateFileIcon(composite.getFile());
}
}
else if (FileEditor.PROP_VALID.equals(propertyName)) {
final boolean valid = ((Boolean)e.getNewValue()).booleanValue();
if (!valid) {
final FileEditor editor = (FileEditor)e.getSource();
LOG.assertTrue(editor != null);
final EditorComposite composite = getEditorComposite(editor);
if (composite != null) {
closeFile(composite.getFile());
}
}
}
}
}
/**
* Gets events from VCS and updates color of myEditor tabs
*/
private final class MyFileStatusListener implements FileStatusListener {
@Override
public void fileStatusesChanged() { // update color of all open files
assertDispatchThread();
LOG.debug("FileEditorManagerImpl.MyFileStatusListener.fileStatusesChanged()");
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
LOG.assertTrue(file != null);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (LOG.isDebugEnabled()) {
LOG.debug("updating file status in tab for " + file.getPath());
}
updateFileStatus(file);
}
}, ModalityState.NON_MODAL, myProject.getDisposed());
}
}
@Override
public void fileStatusChanged(@NotNull final VirtualFile file) { // update color of the file (if necessary)
assertDispatchThread();
if (isFileOpen(file)) {
updateFileStatus(file);
}
}
private void updateFileStatus(final VirtualFile file) {
updateFileColor(file);
updateFileIcon(file);
}
}
/**
* Gets events from FileTypeManager and updates icons on tabs
*/
private final class MyFileTypeListener extends FileTypeListener.Adapter {
@Override
public void fileTypesChanged(final FileTypeEvent event) {
assertDispatchThread();
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
LOG.assertTrue(file != null);
updateFileIcon(file);
}
}
}
private class MyRootsListener extends ModuleRootAdapter {
@Override
public void rootsChanged(ModuleRootEvent event) {
EditorFileSwapper[] swappers = Extensions.getExtensions(EditorFileSwapper.EP_NAME);
for (EditorWindow eachWindow : getWindows()) {
EditorWithProviderComposite selected = eachWindow.getSelectedEditor();
EditorWithProviderComposite[] editors = eachWindow.getEditors();
for (int i = 0; i < editors.length; i++) {
EditorWithProviderComposite editor = editors[i];
VirtualFile file = editor.getFile();
if (!file.isValid()) continue;
Pair<VirtualFile, Integer> newFilePair = null;
for (EditorFileSwapper each : swappers) {
newFilePair = each.getFileToSwapTo(myProject, editor);
if (newFilePair != null) break;
}
if (newFilePair == null) continue;
VirtualFile newFile = newFilePair.first;
if (newFile == null) continue;
// already open
if (eachWindow.findFileIndex(newFile) != -1) continue;
try {
newFile.putUserData(EditorWindow.INITIAL_INDEX_KEY, i);
Pair<FileEditor[], FileEditorProvider[]> pair = openFileImpl2(eachWindow, newFile, editor == selected);
if (newFilePair.second != null) {
TextEditorImpl openedEditor = EditorFileSwapper.findSinglePsiAwareEditor(pair.first);
if (openedEditor != null) {
openedEditor.getEditor().getCaretModel().moveToOffset(newFilePair.second);
openedEditor.getEditor().getScrollingModel().scrollToCaret(ScrollType.CENTER);
}
}
}
finally {
newFile.putUserData(EditorWindow.INITIAL_INDEX_KEY, null);
}
closeFile(file, eachWindow);
}
}
}
}
/**
* Gets notifications from UISetting component to track changes of RECENT_FILES_LIMIT
* and EDITOR_TAB_LIMIT, etc values.
*/
private final class MyUISettingsListener implements UISettingsListener {
@Override
public void uiSettingsChanged(final UISettings source) {
assertDispatchThread();
setTabsMode(source.EDITOR_TAB_PLACEMENT != UISettings.TABS_NONE && !UISettings.getInstance().PRESENTATION_MODE);
for (EditorsSplitters each : getAllSplitters()) {
each.setTabsPlacement(source.EDITOR_TAB_PLACEMENT);
each.trimToSize(source.EDITOR_TAB_LIMIT);
// Tab layout policy
if (source.SCROLL_TAB_LAYOUT_IN_EDITOR) {
each.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT);
}
else {
each.setTabLayoutPolicy(JTabbedPane.WRAP_TAB_LAYOUT);
}
}
// "Mark modified files with asterisk"
final VirtualFile[] openFiles = getOpenFiles();
for (int i = openFiles.length - 1; i >= 0; i--) {
final VirtualFile file = openFiles[i];
updateFileIcon(file);
updateFileName(file);
updateFileBackgroundColor(file);
}
}
}
@Override
public void closeAllFiles() {
final VirtualFile[] openFiles = getSplitters().getOpenFiles();
for (VirtualFile openFile : openFiles) {
closeFile(openFile);
}
}
@Override
@NotNull
public VirtualFile[] getSiblings(@NotNull VirtualFile file) {
return getOpenFiles();
}
protected void queueUpdateFile(final VirtualFile file) {
myQueue.queue(new Update(file) {
@Override
public void run() {
if (isFileOpen(file)) {
updateFileIcon(file);
updateFileColor(file);
updateFileBackgroundColor(file);
}
}
});
}
@Override
public EditorsSplitters getSplittersFor(Component c) {
EditorsSplitters splitters = null;
DockContainer dockContainer = myDockManager.getContainerFor(c);
if (dockContainer instanceof DockableEditorTabbedContainer) {
splitters = ((DockableEditorTabbedContainer)dockContainer).getSplitters();
}
if (splitters == null) {
splitters = getMainSplitters();
}
return splitters;
}
public List<Pair<VirtualFile, EditorWindow>> getSelectionHistory() {
List<Pair<VirtualFile, EditorWindow>> copy = new ArrayList<Pair<VirtualFile, EditorWindow>>();
for (Pair<VirtualFile, EditorWindow> pair : mySelectionHistory) {
if (pair.second.getFiles().length == 0) {
final EditorWindow[] windows = pair.second.getOwner().getWindows();
if (windows.length > 0 && windows[0] != null && windows[0].getFiles().length > 0) {
final Pair<VirtualFile, EditorWindow> p = Pair.create(pair.first, windows[0]);
if (!copy.contains(p)) {
copy.add(p);
}
}
} else {
if (!copy.contains(pair)) {
copy.add(pair);
}
}
}
mySelectionHistory.clear();
mySelectionHistory.addAll(copy);
return mySelectionHistory;
}
public void addSelectionRecord(VirtualFile file, EditorWindow window) {
final Pair<VirtualFile, EditorWindow> record = Pair.create(file, window);
mySelectionHistory.remove(record);
mySelectionHistory.add(0, record);
}
public void removeSelectionRecord(VirtualFile file, EditorWindow window) {
mySelectionHistory.remove(Pair.create(file, window));
}
@Override
public ActionCallback getReady(@NotNull Object requestor) {
return myBusyObject.getReady(requestor);
}
}
|
already disposed
|
platform/platform-impl/src/com/intellij/openapi/fileEditor/impl/FileEditorManagerImpl.java
|
already disposed
|
|
Java
|
apache-2.0
|
edca1393559befc075e962e1d3f0d5aa06acd719
| 0
|
debezium/debezium,debezium/debezium,debezium/debezium,debezium/debezium
|
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.server.pulsar;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.enterprise.event.Observes;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.Schema;
import org.awaitility.Awaitility;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.junit.jupiter.api.Test;
import io.debezium.connector.postgresql.connection.PostgresConnection;
import io.debezium.jdbc.JdbcConfiguration;
import io.debezium.server.TestConfigSource;
import io.debezium.server.events.ConnectorCompletedEvent;
import io.debezium.server.events.ConnectorStartedEvent;
import io.debezium.testing.testcontainers.PostgresTestResourceLifecycleManager;
import io.debezium.util.Testing;
import io.quarkus.test.common.QuarkusTestResource;
import io.quarkus.test.junit.QuarkusTest;
/**
* Integration test that verifies basic reading from PostgreSQL database and writing to an Apache Pulsar topic.
*
* @author Jiri Pechanec
*/
@QuarkusTest
@QuarkusTestResource(PostgresTestResourceLifecycleManager.class)
@QuarkusTestResource(PulsarTestResourceLifecycleManager.class)
public class PulsarIT {
private static final int MESSAGE_COUNT = 4;
private static final String TOPIC_NAME = "testc.inventory.customers";
private static final String NOKEY_TOPIC_NAME = "testc.inventory.nokey";
@ConfigProperty(name = "debezium.source.database.hostname")
String dbHostname;
@ConfigProperty(name = "debezium.source.database.port")
String dbPort;
@ConfigProperty(name = "debezium.source.database.user")
String dbUser;
@ConfigProperty(name = "debezium.source.database.password")
String dbPassword;
@ConfigProperty(name = "debezium.source.database.dbname")
String dbName;
protected static PulsarClient pulsarClient;
{
Testing.Files.delete(TestConfigSource.OFFSET_STORE_PATH);
Testing.Files.createTestingFile(PulsarTestConfigSource.OFFSET_STORE_PATH);
}
void setupDependencies(@Observes ConnectorStartedEvent event) throws IOException {
Testing.Print.enable();
pulsarClient = PulsarClient.builder()
.serviceUrl(PulsarTestResourceLifecycleManager.getPulsarServiceUrl())
.build();
}
void connectorCompleted(@Observes ConnectorCompletedEvent event) throws Exception {
if (!event.isSuccess()) {
throw new RuntimeException(event.getError().get());
}
}
@Test
public void testPulsar() throws Exception {
Awaitility.await().atMost(Duration.ofSeconds(PulsarTestConfigSource.waitForSeconds())).until(() -> {
return pulsarClient != null;
});
final Consumer<String> consumer = pulsarClient.newConsumer(Schema.STRING)
.topic(TOPIC_NAME)
.subscriptionName("test-" + UUID.randomUUID())
.subscribe();
final List<Message<String>> records = new ArrayList<>();
Awaitility.await().atMost(Duration.ofSeconds(PulsarTestConfigSource.waitForSeconds())).until(() -> {
records.add(consumer.receive());
return records.size() >= MESSAGE_COUNT;
});
final JdbcConfiguration config = JdbcConfiguration.create()
.with("hostname", dbHostname)
.with("port", dbPort)
.with("user", dbUser)
.with("password", dbPassword)
.with("dbname", dbName)
.build();
try (final PostgresConnection connection = new PostgresConnection(config, "Debezium Pulsar Test")) {
connection.execute(
"CREATE TABLE inventory.nokey (val INT);",
"INSERT INTO inventory.nokey VALUES (1)",
"INSERT INTO inventory.nokey VALUES (2)",
"INSERT INTO inventory.nokey VALUES (3)",
"INSERT INTO inventory.nokey VALUES (4)");
}
final Consumer<String> nokeyConsumer = pulsarClient.newConsumer(Schema.STRING)
.topic(NOKEY_TOPIC_NAME)
.subscriptionName("test-" + UUID.randomUUID())
.subscribe();
final List<Message<String>> nokeyRecords = new ArrayList<>();
Awaitility.await().atMost(Duration.ofSeconds(PulsarTestConfigSource.waitForSeconds())).until(() -> {
nokeyRecords.add(nokeyConsumer.receive());
return nokeyRecords.size() >= MESSAGE_COUNT;
});
}
}
|
debezium-server/debezium-server-pulsar/src/test/java/io/debezium/server/pulsar/PulsarIT.java
|
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.server.pulsar;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.enterprise.event.Observes;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.Schema;
import org.awaitility.Awaitility;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import org.junit.jupiter.api.Test;
import io.debezium.connector.postgresql.connection.PostgresConnection;
import io.debezium.jdbc.JdbcConfiguration;
import io.debezium.server.TestConfigSource;
import io.debezium.server.events.ConnectorCompletedEvent;
import io.debezium.server.events.ConnectorStartedEvent;
import io.debezium.testing.testcontainers.PostgresTestResourceLifecycleManager;
import io.debezium.util.Testing;
import io.quarkus.test.common.QuarkusTestResource;
import io.quarkus.test.junit.QuarkusTest;
/**
* Integration test that verifies basic reading from PostgreSQL database and writing to an Apache Pulsar topic.
*
* @author Jiri Pechanec
*/
@QuarkusTest
@QuarkusTestResource(PostgresTestResourceLifecycleManager.class)
@QuarkusTestResource(PulsarTestResourceLifecycleManager.class)
public class PulsarIT {
private static final int MESSAGE_COUNT = 4;
private static final String TOPIC_NAME = "testc.inventory.customers";
private static final String NOKEY_TOPIC_NAME = "testc.inventory.nokey";
@ConfigProperty(name = "debezium.source.database.hostname")
String dbHostname;
@ConfigProperty(name = "debezium.source.database.port")
String dbPort;
@ConfigProperty(name = "debezium.source.database.user")
String dbUser;
@ConfigProperty(name = "debezium.source.database.password")
String dbPassword;
@ConfigProperty(name = "debezium.source.database.dbname")
String dbName;
protected static PulsarClient pulsarClient;
{
Testing.Files.delete(TestConfigSource.OFFSET_STORE_PATH);
Testing.Files.createTestingFile(PulsarTestConfigSource.OFFSET_STORE_PATH);
}
void setupDependencies(@Observes ConnectorStartedEvent event) throws IOException {
Testing.Print.enable();
pulsarClient = PulsarClient.builder()
.serviceUrl(PulsarTestResourceLifecycleManager.getPulsarServiceUrl())
.build();
}
void connectorCompleted(@Observes ConnectorCompletedEvent event) throws Exception {
if (!event.isSuccess()) {
throw new RuntimeException(event.getError().get());
}
}
@Test
public void testPulsar() throws Exception {
final Consumer<String> consumer = pulsarClient.newConsumer(Schema.STRING)
.topic(TOPIC_NAME)
.subscriptionName("test-" + UUID.randomUUID())
.subscribe();
final List<Message<String>> records = new ArrayList<>();
Awaitility.await().atMost(Duration.ofSeconds(PulsarTestConfigSource.waitForSeconds())).until(() -> {
records.add(consumer.receive());
return records.size() >= MESSAGE_COUNT;
});
final JdbcConfiguration config = JdbcConfiguration.create()
.with("hostname", dbHostname)
.with("port", dbPort)
.with("user", dbUser)
.with("password", dbPassword)
.with("dbname", dbName)
.build();
try (final PostgresConnection connection = new PostgresConnection(config, "Debezium Pulsar Test")) {
connection.execute(
"CREATE TABLE inventory.nokey (val INT);",
"INSERT INTO inventory.nokey VALUES (1)",
"INSERT INTO inventory.nokey VALUES (2)",
"INSERT INTO inventory.nokey VALUES (3)",
"INSERT INTO inventory.nokey VALUES (4)");
}
final Consumer<String> nokeyConsumer = pulsarClient.newConsumer(Schema.STRING)
.topic(NOKEY_TOPIC_NAME)
.subscriptionName("test-" + UUID.randomUUID())
.subscribe();
final List<Message<String>> nokeyRecords = new ArrayList<>();
Awaitility.await().atMost(Duration.ofSeconds(PulsarTestConfigSource.waitForSeconds())).until(() -> {
nokeyRecords.add(nokeyConsumer.receive());
return nokeyRecords.size() >= MESSAGE_COUNT;
});
}
}
|
DBZ-4720 wait for pulsarClient
|
debezium-server/debezium-server-pulsar/src/test/java/io/debezium/server/pulsar/PulsarIT.java
|
DBZ-4720 wait for pulsarClient
|
|
Java
|
apache-2.0
|
752e533256d5f102c30bdbaeddbd3ffd634bd61e
| 0
|
Nanoware/Terasology,Malanius/Terasology,Nanoware/Terasology,MovingBlocks/Terasology,Nanoware/Terasology,Malanius/Terasology,MovingBlocks/Terasology,MovingBlocks/Terasology
|
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.world.block.family;
import com.google.common.collect.Sets;
import gnu.trove.map.TByteObjectMap;
import gnu.trove.map.hash.TByteObjectHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.math.Rotation;
import org.terasology.math.Side;
import org.terasology.math.SideBitFlag;
import org.terasology.math.geom.Vector3i;
import org.terasology.naming.Name;
import org.terasology.registry.In;
import org.terasology.world.BlockEntityRegistry;
import org.terasology.world.WorldProvider;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockBuilderHelper;
import org.terasology.world.block.BlockUri;
import org.terasology.world.block.loader.BlockFamilyDefinition;
import org.terasology.world.block.shapes.BlockShape;
import java.util.Locale;
import java.util.Set;
/**
* Multi-Connect family describes a block family that will connect to other neighboring blocks.
*
* examples:
* - Rail Segments
* - Cables
* - Fence
*/
public abstract class MultiConnectFamily extends AbstractBlockFamily implements UpdatesWithNeighboursFamily {
private static final Logger logger = LoggerFactory.getLogger(FreeformFamily.class);
@In
protected WorldProvider worldProvider;
@In
protected BlockEntityRegistry blockEntityRegistry;
protected TByteObjectMap<Block> blocks = new TByteObjectHashMap<>();
public MultiConnectFamily(BlockFamilyDefinition definition, BlockShape shape, BlockBuilderHelper blockBuilder) {
super(definition, shape, blockBuilder);
this.setBlockUri(new BlockUri(definition.getUrn()));
this.setCategory(definition.getCategories());
}
public MultiConnectFamily(BlockFamilyDefinition definition, BlockBuilderHelper blockBuilder) {
super(definition, blockBuilder);
this.setBlockUri(new BlockUri(definition.getUrn()));
this.setCategory(definition.getCategories());
}
/**
* A condition that identifies which sides are valid to connect to
* @param blockLocation
* @param connectSide
* @return
*/
protected abstract boolean connectionCondition(Vector3i blockLocation, Side connectSide);
/**
* Sides that are valid to connect to using SideBitFlag
* @return
*/
public abstract byte getConnectionSides();
public abstract boolean horizontalOnly();
@Override
public abstract Block getArchetypeBlock();
public Set<Block> registerBlock(BlockUri root,BlockFamilyDefinition definition,final BlockBuilderHelper blockBuilder,String name,byte sides,Iterable<Rotation> rotations){
Set<Block> result = Sets.newLinkedHashSet();
for(Rotation rotation: rotations)
{
byte sideBits = 0;
for(Side side : SideBitFlag.getSides(sides)){
sideBits += SideBitFlag.getSide(rotation.rotate(side));
}
Block block = blockBuilder.constructTransformedBlock(definition,name,rotation);
block.setBlockFamily(this);
block.setUri(new BlockUri(root,new Name(String.valueOf(sideBits))));
blocks.put(sideBits,block);
result.add(block);
}
return result;
}
@Override
public Block getBlockForPlacement(Vector3i location, Side attachmentSide, Side direction) {
byte connections = 0;
for (Side connectSide : SideBitFlag.getSides(getConnectionSides())) {
if (this.connectionCondition(location, connectSide)) {
connections += SideBitFlag.getSide(connectSide);
}
}
return blocks.get(connections);
}
public Block getBlockForNeighborUpdate(Vector3i location, Block oldBlock) {
byte connections = 0;
for (Side connectSide : SideBitFlag.getSides(getConnectionSides())) {
if (this.connectionCondition(location, connectSide)) {
connections += SideBitFlag.getSide(connectSide);
}
}
return blocks.get(connections);
}
@Override
public Block getBlockFor(BlockUri blockUri) {
if (getURI().equals(blockUri.getFamilyUri())) {
try {
byte connections = Byte.parseByte(blockUri.getIdentifier().toString().toLowerCase(Locale.ENGLISH));
return blocks.get(connections);
} catch (IllegalArgumentException e) {
logger.error("can't find block with URI: {}", blockUri, e);
return null;
}
}
return null;
}
@Override
public Iterable<Block> getBlocks() {
return blocks.valueCollection();
}
}
|
engine/src/main/java/org/terasology/world/block/family/MultiConnectFamily.java
|
/*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.world.block.family;
import com.google.common.collect.Sets;
import gnu.trove.map.TByteObjectMap;
import gnu.trove.map.hash.TByteObjectHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.math.Rotation;
import org.terasology.math.Side;
import org.terasology.math.SideBitFlag;
import org.terasology.math.geom.Vector3i;
import org.terasology.naming.Name;
import org.terasology.registry.In;
import org.terasology.world.BlockEntityRegistry;
import org.terasology.world.WorldProvider;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockBuilderHelper;
import org.terasology.world.block.BlockUri;
import org.terasology.world.block.loader.BlockFamilyDefinition;
import org.terasology.world.block.shapes.BlockShape;
import java.util.Locale;
import java.util.Set;
public abstract class MultiConnectFamily extends AbstractBlockFamily implements UpdatesWithNeighboursFamily {
private static final Logger logger = LoggerFactory.getLogger(FreeformFamily.class);
@In
protected WorldProvider worldProvider;
@In
protected BlockEntityRegistry blockEntityRegistry;
protected TByteObjectMap<Block> blocks = new TByteObjectHashMap<>();
public MultiConnectFamily(BlockFamilyDefinition definition, BlockShape shape, BlockBuilderHelper blockBuilder) {
super(definition, shape, blockBuilder);
this.setBlockUri(new BlockUri(definition.getUrn()));
this.setCategory(definition.getCategories());
}
public MultiConnectFamily(BlockFamilyDefinition definition, BlockBuilderHelper blockBuilder) {
super(definition, blockBuilder);
this.setBlockUri(new BlockUri(definition.getUrn()));
this.setCategory(definition.getCategories());
}
protected abstract boolean connectionCondition(Vector3i blockLocation, Side connectSide);
public abstract byte getConnectionSides();
public abstract boolean horizontalOnly();
@Override
public abstract Block getArchetypeBlock();
public Set<Block> registerBlock(BlockUri root,BlockFamilyDefinition definition,final BlockBuilderHelper blockBuilder,String name,byte sides,Iterable<Rotation> rotations){
Set<Block> result = Sets.newLinkedHashSet();
for(Rotation rotation: rotations)
{
byte sideBits = 0;
for(Side side : SideBitFlag.getSides(sides)){
sideBits += SideBitFlag.getSide(rotation.rotate(side));
}
Block block = blockBuilder.constructTransformedBlock(definition,name,rotation);
block.setBlockFamily(this);
block.setUri(new BlockUri(root,new Name(String.valueOf(sideBits))));
blocks.put(sideBits,block);
result.add(block);
}
return result;
}
@Override
public Block getBlockForPlacement(Vector3i location, Side attachmentSide, Side direction) {
byte connections = 0;
for (Side connectSide : SideBitFlag.getSides(getConnectionSides())) {
if (this.connectionCondition(location, connectSide)) {
connections += SideBitFlag.getSide(connectSide);
}
}
return blocks.get(connections);
}
public Block getBlockForNeighborUpdate(Vector3i location, Block oldBlock) {
byte connections = 0;
for (Side connectSide : SideBitFlag.getSides(getConnectionSides())) {
if (this.connectionCondition(location, connectSide)) {
connections += SideBitFlag.getSide(connectSide);
}
}
return blocks.get(connections);
}
@Override
public Block getBlockFor(BlockUri blockUri) {
if (getURI().equals(blockUri.getFamilyUri())) {
try {
byte connections = Byte.parseByte(blockUri.getIdentifier().toString().toLowerCase(Locale.ENGLISH));
return blocks.get(connections);
} catch (IllegalArgumentException e) {
logger.error("can't find block with URI: {}", blockUri, e);
return null;
}
}
return null;
}
@Override
public Iterable<Block> getBlocks() {
return blocks.valueCollection();
}
}
|
updated java docs
|
engine/src/main/java/org/terasology/world/block/family/MultiConnectFamily.java
|
updated java docs
|
|
Java
|
apache-2.0
|
c24a92de79a07da74946d5dfcf5cb0e55e7a9918
| 0
|
vpro/poel,vpro/poel,vpro/poel,vpro/poel
|
package nl.vpro.poel;
import lombok.experimental.UtilityClass;
import nl.vpro.poel.domain.CurrentUser;
import org.springframework.security.cas.authentication.CasAuthenticationToken;
import org.springframework.security.core.userdetails.UserDetails;
import java.security.Principal;
import java.util.Optional;
@UtilityClass
public class UserUtil {
public Optional<CurrentUser> getCurrentUser(Principal principal) {
if (principal instanceof CasAuthenticationToken) {
UserDetails userDetails = ((CasAuthenticationToken)principal).getUserDetails();
if (userDetails instanceof CurrentUser) {
return Optional.of((CurrentUser) userDetails);
}
}
return Optional.empty();
}
}
|
src/main/java/nl/vpro/poel/UserUtil.java
|
package nl.vpro.poel;
import nl.vpro.poel.domain.CurrentUser;
import org.springframework.security.cas.authentication.CasAuthenticationToken;
import org.springframework.security.core.userdetails.UserDetails;
import java.security.Principal;
import java.util.Optional;
public class UserUtil {
public static Optional<CurrentUser> getCurrentUser(Principal principal) {
if (principal instanceof CasAuthenticationToken) {
UserDetails userDetails = ((CasAuthenticationToken)principal).getUserDetails();
if (userDetails instanceof CurrentUser) {
return Optional.of((CurrentUser) userDetails);
}
}
return Optional.empty();
}
}
|
Use Lombok's UtilityClass.
|
src/main/java/nl/vpro/poel/UserUtil.java
|
Use Lombok's UtilityClass.
|
|
Java
|
apache-2.0
|
00b55b87315ff3930a28fdf606e788e73323a39f
| 0
|
ontop/ontop,ontop/ontop,ontop/ontop,ontop/ontop,ontop/ontop
|
package it.unibz.inf.ontop.iq.node.impl;
import com.google.common.collect.*;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import it.unibz.inf.ontop.evaluator.TermNullabilityEvaluator;
import it.unibz.inf.ontop.injection.IntermediateQueryFactory;
import it.unibz.inf.ontop.iq.exception.InvalidIntermediateQueryException;
import it.unibz.inf.ontop.iq.exception.QueryNodeTransformationException;
import it.unibz.inf.ontop.iq.node.*;
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier.ExpressionAndSubstitution;
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier;
import it.unibz.inf.ontop.iq.node.normalization.InnerJoinNormalizer;
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
import it.unibz.inf.ontop.iq.visit.IQVisitor;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.TypeFactory;
import it.unibz.inf.ontop.substitution.ImmutableSubstitution;
import it.unibz.inf.ontop.iq.*;
import it.unibz.inf.ontop.iq.transform.node.HomogeneousQueryNodeTransformer;
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
import it.unibz.inf.ontop.substitution.impl.ImmutableSubstitutionTools;
import it.unibz.inf.ontop.substitution.impl.ImmutableUnificationTools;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import it.unibz.inf.ontop.utils.VariableGenerator;
import java.util.AbstractCollection;
import java.util.Collection;
import java.util.Optional;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class InnerJoinNodeImpl extends JoinLikeNodeImpl implements InnerJoinNode {
private static final String JOIN_NODE_STR = "JOIN" ;
private final ConstructionNodeTools constructionNodeTools;
private final JoinOrFilterVariableNullabilityTools variableNullabilityTools;
private final ConditionSimplifier conditionSimplifier;
private final InnerJoinNormalizer normalizer;
@AssistedInject
protected InnerJoinNodeImpl(@Assisted Optional<ImmutableExpression> optionalFilterCondition,
TermNullabilityEvaluator nullabilityEvaluator,
TermFactory termFactory, TypeFactory typeFactory,
IntermediateQueryFactory iqFactory, SubstitutionFactory substitutionFactory,
ConstructionNodeTools constructionNodeTools,
ImmutableUnificationTools unificationTools, ImmutableSubstitutionTools substitutionTools,
JoinOrFilterVariableNullabilityTools variableNullabilityTools, ConditionSimplifier conditionSimplifier,
InnerJoinNormalizer normalizer) {
super(optionalFilterCondition, nullabilityEvaluator, termFactory, iqFactory, typeFactory,
substitutionFactory, unificationTools, substitutionTools);
this.constructionNodeTools = constructionNodeTools;
this.variableNullabilityTools = variableNullabilityTools;
this.conditionSimplifier = conditionSimplifier;
this.normalizer = normalizer;
}
@AssistedInject
private InnerJoinNodeImpl(@Assisted ImmutableExpression joiningCondition,
TermNullabilityEvaluator nullabilityEvaluator,
TermFactory termFactory, TypeFactory typeFactory,
IntermediateQueryFactory iqFactory, SubstitutionFactory substitutionFactory,
ConstructionNodeTools constructionNodeTools,
ImmutableUnificationTools unificationTools, ImmutableSubstitutionTools substitutionTools,
JoinOrFilterVariableNullabilityTools variableNullabilityTools, ConditionSimplifier conditionSimplifier, InnerJoinNormalizer normalizer) {
super(Optional.of(joiningCondition), nullabilityEvaluator, termFactory, iqFactory, typeFactory,
substitutionFactory, unificationTools, substitutionTools);
this.constructionNodeTools = constructionNodeTools;
this.variableNullabilityTools = variableNullabilityTools;
this.conditionSimplifier = conditionSimplifier;
this.normalizer = normalizer;
}
@AssistedInject
private InnerJoinNodeImpl(TermNullabilityEvaluator nullabilityEvaluator, TermFactory termFactory,
TypeFactory typeFactory, IntermediateQueryFactory iqFactory,
SubstitutionFactory substitutionFactory, ConstructionNodeTools constructionNodeTools,
ImmutableUnificationTools unificationTools, ImmutableSubstitutionTools substitutionTools,
JoinOrFilterVariableNullabilityTools variableNullabilityTools, ConditionSimplifier conditionSimplifier, InnerJoinNormalizer normalizer) {
super(Optional.empty(), nullabilityEvaluator, termFactory, iqFactory, typeFactory,
substitutionFactory, unificationTools, substitutionTools);
this.constructionNodeTools = constructionNodeTools;
this.variableNullabilityTools = variableNullabilityTools;
this.conditionSimplifier = conditionSimplifier;
this.normalizer = normalizer;
}
@Override
public void acceptVisitor(QueryNodeVisitor visitor) {
visitor.visit(this);
}
@Override
public InnerJoinNode clone() {
return iqFactory.createInnerJoinNode(getOptionalFilterCondition());
}
@Override
public InnerJoinNode acceptNodeTransformer(HomogeneousQueryNodeTransformer transformer)
throws QueryNodeTransformationException {
return transformer.transform(this);
}
@Override
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions(ImmutableList<IQTree> children) {
return children.stream()
.map(IQTree::getPossibleVariableDefinitions)
.filter(s -> !s.isEmpty())
.reduce(ImmutableSet.of(), this::combineVarDefs);
}
private ImmutableSet<ImmutableSubstitution<NonVariableTerm>> combineVarDefs(
ImmutableSet<ImmutableSubstitution<NonVariableTerm>> s1,
ImmutableSet<ImmutableSubstitution<NonVariableTerm>> s2) {
return s1.isEmpty()
? s2
: s1.stream()
.flatMap(d1 -> s2.stream()
/*
* Takes the first definition of a common variable.
*
* Behaves like an union except that is robust to "non-identical" definitions.
* If normalized, two definitions for the same variables are expected to be compatible.
*
* If not normalized, the definitions may be incompatible, but that's fine
* since they will not produce any result.
*
*/
.map(d2 -> d2.composeWith2(d1)))
.collect(ImmutableCollectors.toSet());
}
@Override
public InnerJoinNode changeOptionalFilterCondition(Optional<ImmutableExpression> newOptionalFilterCondition) {
return iqFactory.createInnerJoinNode(newOptionalFilterCondition);
}
@Override
public boolean isVariableNullable(IntermediateQuery query, Variable variable) {
if (isFilteringNullValue(variable))
return false;
// Non-already
boolean alsoProjectedByAnotherChild = false;
for(QueryNode child : query.getChildren(this)) {
if (query.getVariables(child).contains(variable)) {
// Joining conditions cannot be null
if (alsoProjectedByAnotherChild)
return false;
if (child.isVariableNullable(query, variable))
alsoProjectedByAnotherChild = true;
else
return false;
}
}
if (!alsoProjectedByAnotherChild)
throw new IllegalArgumentException("The variable " + variable + " is not projected by " + this);
return true;
}
@Override
public boolean isSyntacticallyEquivalentTo(QueryNode node) {
return (node instanceof InnerJoinNode) &&
this.getOptionalFilterCondition().equals(((InnerJoinNode) node).getOptionalFilterCondition());
}
@Override
public boolean isEquivalentTo(QueryNode queryNode) {
return (queryNode instanceof InnerJoinNode)
&& getOptionalFilterCondition().equals(((InnerJoinNode) queryNode).getOptionalFilterCondition());
}
@Override
public String toString() {
return JOIN_NODE_STR + getOptionalFilterString();
}
/**
* TODO: refactor
*/
@Override
public IQTree normalizeForOptimization(ImmutableList<IQTree> children, VariableGenerator variableGenerator,
IQProperties currentIQProperties) {
return normalizer.normalizeForOptimization(this, children, variableGenerator, currentIQProperties);
}
@Override
public IQTree applyDescendingSubstitution(ImmutableSubstitution<? extends VariableOrGroundTerm> descendingSubstitution,
Optional<ImmutableExpression> constraint, ImmutableList<IQTree> children) {
Optional<ImmutableExpression> unoptimizedExpression = getOptionalFilterCondition()
.map(descendingSubstitution::applyToBooleanExpression);
VariableNullability dummyVariableNullability = variableNullabilityTools.getDummyVariableNullability(
constructionNodeTools.computeNewProjectedVariables(descendingSubstitution, getProjectedVariables(children)));
try {
ExpressionAndSubstitution expressionAndSubstitution = conditionSimplifier.simplifyCondition(
unoptimizedExpression, ImmutableSet.of(), dummyVariableNullability);
Optional<ImmutableExpression> downConstraint = conditionSimplifier.computeDownConstraint(constraint,
expressionAndSubstitution, dummyVariableNullability);
ImmutableSubstitution<? extends VariableOrGroundTerm> downSubstitution =
((ImmutableSubstitution<VariableOrGroundTerm>)descendingSubstitution)
.composeWith2(expressionAndSubstitution.getSubstitution());
ImmutableList<IQTree> newChildren = children.stream()
.map(c -> c.applyDescendingSubstitution(downSubstitution, downConstraint))
.collect(ImmutableCollectors.toList());
IQTree joinTree = iqFactory.createNaryIQTree(
iqFactory.createInnerJoinNode(expressionAndSubstitution.getOptionalExpression()),
newChildren);
return expressionAndSubstitution.getSubstitution().isEmpty()
? joinTree
: iqFactory.createUnaryIQTree(
iqFactory.createConstructionNode(
constructionNodeTools.computeNewProjectedVariables(descendingSubstitution,
getProjectedVariables(children)),
(ImmutableSubstitution<ImmutableTerm>)(ImmutableSubstitution<?>)
expressionAndSubstitution.getSubstitution()),
joinTree);
} catch (UnsatisfiableConditionException e) {
return iqFactory.createEmptyNode(computeNewlyProjectedVariables(descendingSubstitution, children));
}
}
@Override
public IQTree applyDescendingSubstitutionWithoutOptimizing(
ImmutableSubstitution<? extends VariableOrGroundTerm> descendingSubstitution, ImmutableList<IQTree> children) {
InnerJoinNode newJoinNode = getOptionalFilterCondition()
.map(descendingSubstitution::applyToBooleanExpression)
.map(iqFactory::createInnerJoinNode)
.orElseGet(iqFactory::createInnerJoinNode);
ImmutableList<IQTree> newChildren = children.stream()
.map(c -> c.applyDescendingSubstitutionWithoutOptimizing(descendingSubstitution))
.collect(ImmutableCollectors.toList());
return iqFactory.createNaryIQTree(newJoinNode, newChildren);
}
private ImmutableSet<Variable> getProjectedVariables(ImmutableList<IQTree> children) {
return children.stream()
.flatMap(c -> c.getVariables().stream())
.collect(ImmutableCollectors.toSet());
}
@Override
public VariableNullability getVariableNullability(ImmutableList<IQTree> children) {
return variableNullabilityTools.getVariableNullability(children, getOptionalFilterCondition());
}
@Override
public boolean isConstructed(Variable variable, ImmutableList<IQTree> children) {
return children.stream()
.anyMatch(c -> c.isConstructed(variable));
}
@Override
public boolean isDistinct(ImmutableList<IQTree> children) {
return children.stream().allMatch(IQTree::isDistinct);
}
@Override
public IQTree liftIncompatibleDefinitions(Variable variable, ImmutableList<IQTree> children, VariableGenerator variableGenerator) {
return IntStream.range(0, children.size()).boxed()
.map(i -> Maps.immutableEntry(i, children.get(i)))
.filter(e -> e.getValue().isConstructed(variable))
// index -> new child
.map(e -> Maps.immutableEntry(e.getKey(), e.getValue().liftIncompatibleDefinitions(variable, variableGenerator)))
.filter(e -> {
QueryNode newRootNode = e.getValue().getRootNode();
return (newRootNode instanceof UnionNode)
&& ((UnionNode) newRootNode).hasAChildWithLiftableDefinition(variable,
e.getValue().getChildren());
})
.findFirst()
.map(e -> liftUnionChild(e.getKey(), (NaryIQTree) e.getValue(), children, variableGenerator))
.orElseGet(() -> iqFactory.createNaryIQTree(this, children));
}
@Override
public IQTree propagateDownConstraint(ImmutableExpression constraint, ImmutableList<IQTree> children) {
return propagateDownCondition(Optional.of(constraint), children);
}
@Override
public IQTree acceptTransformer(IQTree tree, IQTreeVisitingTransformer transformer, ImmutableList<IQTree> children) {
return transformer.transformInnerJoin(tree,this, children);
}
@Override
public <T> T acceptVisitor(IQVisitor<T> visitor, ImmutableList<IQTree> children) {
return visitor.visitInnerJoin(this, children);
}
@Override
public void validateNode(ImmutableList<IQTree> children) throws InvalidIntermediateQueryException {
if (children.size() < 2) {
throw new InvalidIntermediateQueryException("JOIN node " + this
+" does not have at least 2 children.\n" + children);
}
getOptionalFilterCondition()
.ifPresent(e -> checkExpression(e, children));
checkNonProjectedVariables(children);
}
@Override
public IQTree removeDistincts(ImmutableList<IQTree> children, IQProperties properties) {
ImmutableList<IQTree> newChildren = children.stream()
.map(IQTree::removeDistincts)
.collect(ImmutableCollectors.toList());
IQProperties newProperties = newChildren.equals(children)
? properties.declareDistinctRemovalWithoutEffect()
: properties.declareDistinctRemovalWithEffect();
return iqFactory.createNaryIQTree(this, children, newProperties);
}
/**
* For unique constraints to emerge from an inner join, children must provide unique constraints
* and being naturally joined over some of such constraints.
*/
@Override
public ImmutableSet<ImmutableSet<Variable>> inferUniqueConstraints(ImmutableList<IQTree> children) {
ImmutableMap<IQTree, ImmutableSet<ImmutableSet<Variable>>> constraintMap = children.stream()
.collect(ImmutableCollectors.toMap(
c -> c,
IQTree::inferUniqueConstraints));
/*
* Conditions:
* - All the children must have at least one unique constraint
* - Each child is naturally joined with another child and such variables matches at least one unique constraint
* on one of these two children.
*/
if (constraintMap.values().stream().anyMatch(AbstractCollection::isEmpty))
return ImmutableSet.of();
ImmutableSet<IQTree> satisfyingChildren = IntStream.range(0, children.size() - 1)
.boxed()
.flatMap(i -> extractSatisfyingChildren(children.get(i), children.subList(i+1, children.size()), constraintMap))
.collect(ImmutableCollectors.toSet());
if (satisfyingChildren.containsAll(children))
return constraintMap.values().stream()
.flatMap(Collection::stream)
.collect(ImmutableCollectors.toSet());
else
return ImmutableSet.of();
}
/**
* TODO: find a better name
*
* Returns all the following children that "naturally joins" with the current child and for which an unique constraint is involved in the join.
* Also returns the current child if at least of one following child is selected.
*
*/
private Stream<IQTree> extractSatisfyingChildren(IQTree currentChild, ImmutableList<IQTree> followingChildren,
ImmutableMap<IQTree, ImmutableSet<ImmutableSet<Variable>>> constraintMap) {
ImmutableSet<IQTree> selectedFollowingChildren = followingChildren.stream()
.filter(o -> areJoiningOverUniqueConstraint(currentChild, o, constraintMap))
.collect(ImmutableCollectors.toSet());
return (selectedFollowingChildren.isEmpty())
? Stream.empty()
: Stream.concat(Stream.of(currentChild), selectedFollowingChildren.stream());
}
private boolean areJoiningOverUniqueConstraint(IQTree child1, IQTree child2,
ImmutableMap<IQTree, ImmutableSet<ImmutableSet<Variable>>> constraintMap) {
ImmutableSet<Variable> commonVariables = Sets.intersection(child1.getVariables(), child2.getVariables())
.immutableCopy();
if (commonVariables.isEmpty())
return false;
ImmutableSet<ImmutableSet<Variable>> constraints1 = constraintMap.get(child1);
ImmutableSet<ImmutableSet<Variable>> constraints2 = constraintMap.get(child2);
return Stream.of(constraints1, constraints2)
.flatMap(Collection::stream)
.anyMatch(commonVariables::containsAll);
}
private IQTree propagateDownCondition(Optional<ImmutableExpression> initialConstraint, ImmutableList<IQTree> children) {
VariableNullability childrenVariableNullability = variableNullabilityTools.getChildrenVariableNullability(children);
try {
ExpressionAndSubstitution conditionSimplificationResults = conditionSimplifier.simplifyCondition(
getOptionalFilterCondition(), ImmutableSet.of(), childrenVariableNullability);
// TODO: find a way to avoid creating dummy objects
// NB: if some variables are not nullable at the join level, they may be at the child level
VariableNullability dummyVariableNullability = variableNullabilityTools
.getDummyVariableNullability(getProjectedVariables(children));
Optional<ImmutableExpression> downConstraint = conditionSimplifier.computeDownConstraint(initialConstraint,
conditionSimplificationResults, dummyVariableNullability);
//TODO: propagate different constraints to different children
ImmutableList<IQTree> newChildren = Optional.of(conditionSimplificationResults.getSubstitution())
.filter(s -> !s.isEmpty())
.map(s -> children.stream()
.map(child -> child.applyDescendingSubstitution(s, downConstraint))
.collect(ImmutableCollectors.toList())
)
.orElseGet(() -> downConstraint
.map(s -> children.stream()
.map(child -> child.propagateDownConstraint(s))
.collect(ImmutableCollectors.toList()))
.orElse(children));
InnerJoinNode newJoin = conditionSimplificationResults.getOptionalExpression().equals(getOptionalFilterCondition())
? this
: conditionSimplificationResults.getOptionalExpression()
.map(iqFactory::createInnerJoinNode)
.orElseGet(iqFactory::createInnerJoinNode);
NaryIQTree joinTree = iqFactory.createNaryIQTree(newJoin, newChildren);
return Optional.of(conditionSimplificationResults.getSubstitution())
.filter(s -> !s.isEmpty())
.map(s -> iqFactory.createConstructionNode(children.stream()
.flatMap(c -> c.getVariables().stream())
.collect(ImmutableCollectors.toSet()),
(ImmutableSubstitution<ImmutableTerm>)(ImmutableSubstitution<?>)s))
.map(c -> (IQTree) iqFactory.createUnaryIQTree(c, joinTree))
.orElse(joinTree);
} catch (UnsatisfiableConditionException e) {
return iqFactory.createEmptyNode(getProjectedVariables(children));
}
}
private IQTree liftUnionChild(int childIndex, NaryIQTree newUnionChild, ImmutableList<IQTree> initialChildren,
VariableGenerator variableGenerator) {
UnionNode newUnionNode = iqFactory.createUnionNode(initialChildren.stream()
.flatMap(c -> c.getVariables().stream())
.collect(ImmutableCollectors.toSet()));
return iqFactory.createNaryIQTree(newUnionNode,
newUnionChild.getChildren().stream()
.map(unionGrandChild -> createJoinSubtree(childIndex, unionGrandChild, initialChildren))
.collect(ImmutableCollectors.toList()))
.normalizeForOptimization(variableGenerator);
}
private IQTree createJoinSubtree(int childIndex, IQTree unionGrandChild, ImmutableList<IQTree> initialChildren) {
return iqFactory.createNaryIQTree(this,
IntStream.range(0, initialChildren.size())
.boxed()
.map(i -> i == childIndex
? unionGrandChild
: initialChildren.get(i))
.collect(ImmutableCollectors.toList()));
}
private ImmutableSet<Variable> computeNewlyProjectedVariables(
ImmutableSubstitution<? extends VariableOrGroundTerm> descendingSubstitution,
ImmutableList<IQTree> children) {
ImmutableSet<Variable> formerProjectedVariables = getProjectedVariables(children);
return constructionNodeTools.computeNewProjectedVariables(descendingSubstitution, formerProjectedVariables);
}
}
|
core/model/src/main/java/it/unibz/inf/ontop/iq/node/impl/InnerJoinNodeImpl.java
|
package it.unibz.inf.ontop.iq.node.impl;
import com.google.common.collect.*;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import it.unibz.inf.ontop.evaluator.TermNullabilityEvaluator;
import it.unibz.inf.ontop.injection.IntermediateQueryFactory;
import it.unibz.inf.ontop.iq.exception.InvalidIntermediateQueryException;
import it.unibz.inf.ontop.iq.exception.QueryNodeTransformationException;
import it.unibz.inf.ontop.iq.node.*;
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier.ExpressionAndSubstitution;
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier;
import it.unibz.inf.ontop.iq.node.normalization.InnerJoinNormalizer;
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
import it.unibz.inf.ontop.iq.visit.IQVisitor;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.TypeFactory;
import it.unibz.inf.ontop.substitution.ImmutableSubstitution;
import it.unibz.inf.ontop.iq.*;
import it.unibz.inf.ontop.iq.transform.node.HomogeneousQueryNodeTransformer;
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
import it.unibz.inf.ontop.substitution.impl.ImmutableSubstitutionTools;
import it.unibz.inf.ontop.substitution.impl.ImmutableUnificationTools;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import it.unibz.inf.ontop.utils.VariableGenerator;
import java.util.Optional;
import java.util.stream.IntStream;
public class InnerJoinNodeImpl extends JoinLikeNodeImpl implements InnerJoinNode {
private static final String JOIN_NODE_STR = "JOIN" ;
private final ConstructionNodeTools constructionNodeTools;
private final JoinOrFilterVariableNullabilityTools variableNullabilityTools;
private final ConditionSimplifier conditionSimplifier;
private final InnerJoinNormalizer normalizer;
@AssistedInject
protected InnerJoinNodeImpl(@Assisted Optional<ImmutableExpression> optionalFilterCondition,
TermNullabilityEvaluator nullabilityEvaluator,
TermFactory termFactory, TypeFactory typeFactory,
IntermediateQueryFactory iqFactory, SubstitutionFactory substitutionFactory,
ConstructionNodeTools constructionNodeTools,
ImmutableUnificationTools unificationTools, ImmutableSubstitutionTools substitutionTools,
JoinOrFilterVariableNullabilityTools variableNullabilityTools, ConditionSimplifier conditionSimplifier,
InnerJoinNormalizer normalizer) {
super(optionalFilterCondition, nullabilityEvaluator, termFactory, iqFactory, typeFactory,
substitutionFactory, unificationTools, substitutionTools);
this.constructionNodeTools = constructionNodeTools;
this.variableNullabilityTools = variableNullabilityTools;
this.conditionSimplifier = conditionSimplifier;
this.normalizer = normalizer;
}
@AssistedInject
private InnerJoinNodeImpl(@Assisted ImmutableExpression joiningCondition,
TermNullabilityEvaluator nullabilityEvaluator,
TermFactory termFactory, TypeFactory typeFactory,
IntermediateQueryFactory iqFactory, SubstitutionFactory substitutionFactory,
ConstructionNodeTools constructionNodeTools,
ImmutableUnificationTools unificationTools, ImmutableSubstitutionTools substitutionTools,
JoinOrFilterVariableNullabilityTools variableNullabilityTools, ConditionSimplifier conditionSimplifier, InnerJoinNormalizer normalizer) {
super(Optional.of(joiningCondition), nullabilityEvaluator, termFactory, iqFactory, typeFactory,
substitutionFactory, unificationTools, substitutionTools);
this.constructionNodeTools = constructionNodeTools;
this.variableNullabilityTools = variableNullabilityTools;
this.conditionSimplifier = conditionSimplifier;
this.normalizer = normalizer;
}
@AssistedInject
private InnerJoinNodeImpl(TermNullabilityEvaluator nullabilityEvaluator, TermFactory termFactory,
TypeFactory typeFactory, IntermediateQueryFactory iqFactory,
SubstitutionFactory substitutionFactory, ConstructionNodeTools constructionNodeTools,
ImmutableUnificationTools unificationTools, ImmutableSubstitutionTools substitutionTools,
JoinOrFilterVariableNullabilityTools variableNullabilityTools, ConditionSimplifier conditionSimplifier, InnerJoinNormalizer normalizer) {
super(Optional.empty(), nullabilityEvaluator, termFactory, iqFactory, typeFactory,
substitutionFactory, unificationTools, substitutionTools);
this.constructionNodeTools = constructionNodeTools;
this.variableNullabilityTools = variableNullabilityTools;
this.conditionSimplifier = conditionSimplifier;
this.normalizer = normalizer;
}
@Override
public void acceptVisitor(QueryNodeVisitor visitor) {
visitor.visit(this);
}
@Override
public InnerJoinNode clone() {
return iqFactory.createInnerJoinNode(getOptionalFilterCondition());
}
@Override
public InnerJoinNode acceptNodeTransformer(HomogeneousQueryNodeTransformer transformer)
throws QueryNodeTransformationException {
return transformer.transform(this);
}
@Override
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions(ImmutableList<IQTree> children) {
return children.stream()
.map(IQTree::getPossibleVariableDefinitions)
.filter(s -> !s.isEmpty())
.reduce(ImmutableSet.of(), this::combineVarDefs);
}
private ImmutableSet<ImmutableSubstitution<NonVariableTerm>> combineVarDefs(
ImmutableSet<ImmutableSubstitution<NonVariableTerm>> s1,
ImmutableSet<ImmutableSubstitution<NonVariableTerm>> s2) {
return s1.isEmpty()
? s2
: s1.stream()
.flatMap(d1 -> s2.stream()
/*
* Takes the first definition of a common variable.
*
* Behaves like an union except that is robust to "non-identical" definitions.
* If normalized, two definitions for the same variables are expected to be compatible.
*
* If not normalized, the definitions may be incompatible, but that's fine
* since they will not produce any result.
*
*/
.map(d2 -> d2.composeWith2(d1)))
.collect(ImmutableCollectors.toSet());
}
@Override
public InnerJoinNode changeOptionalFilterCondition(Optional<ImmutableExpression> newOptionalFilterCondition) {
return iqFactory.createInnerJoinNode(newOptionalFilterCondition);
}
@Override
public boolean isVariableNullable(IntermediateQuery query, Variable variable) {
if (isFilteringNullValue(variable))
return false;
// Non-already
boolean alsoProjectedByAnotherChild = false;
for(QueryNode child : query.getChildren(this)) {
if (query.getVariables(child).contains(variable)) {
// Joining conditions cannot be null
if (alsoProjectedByAnotherChild)
return false;
if (child.isVariableNullable(query, variable))
alsoProjectedByAnotherChild = true;
else
return false;
}
}
if (!alsoProjectedByAnotherChild)
throw new IllegalArgumentException("The variable " + variable + " is not projected by " + this);
return true;
}
@Override
public boolean isSyntacticallyEquivalentTo(QueryNode node) {
return (node instanceof InnerJoinNode) &&
this.getOptionalFilterCondition().equals(((InnerJoinNode) node).getOptionalFilterCondition());
}
@Override
public boolean isEquivalentTo(QueryNode queryNode) {
return (queryNode instanceof InnerJoinNode)
&& getOptionalFilterCondition().equals(((InnerJoinNode) queryNode).getOptionalFilterCondition());
}
@Override
public String toString() {
return JOIN_NODE_STR + getOptionalFilterString();
}
/**
* TODO: refactor
*/
@Override
public IQTree normalizeForOptimization(ImmutableList<IQTree> children, VariableGenerator variableGenerator,
IQProperties currentIQProperties) {
return normalizer.normalizeForOptimization(this, children, variableGenerator, currentIQProperties);
}
@Override
public IQTree applyDescendingSubstitution(ImmutableSubstitution<? extends VariableOrGroundTerm> descendingSubstitution,
Optional<ImmutableExpression> constraint, ImmutableList<IQTree> children) {
Optional<ImmutableExpression> unoptimizedExpression = getOptionalFilterCondition()
.map(descendingSubstitution::applyToBooleanExpression);
VariableNullability dummyVariableNullability = variableNullabilityTools.getDummyVariableNullability(
constructionNodeTools.computeNewProjectedVariables(descendingSubstitution, getProjectedVariables(children)));
try {
ExpressionAndSubstitution expressionAndSubstitution = conditionSimplifier.simplifyCondition(
unoptimizedExpression, ImmutableSet.of(), dummyVariableNullability);
Optional<ImmutableExpression> downConstraint = conditionSimplifier.computeDownConstraint(constraint,
expressionAndSubstitution, dummyVariableNullability);
ImmutableSubstitution<? extends VariableOrGroundTerm> downSubstitution =
((ImmutableSubstitution<VariableOrGroundTerm>)descendingSubstitution)
.composeWith2(expressionAndSubstitution.getSubstitution());
ImmutableList<IQTree> newChildren = children.stream()
.map(c -> c.applyDescendingSubstitution(downSubstitution, downConstraint))
.collect(ImmutableCollectors.toList());
IQTree joinTree = iqFactory.createNaryIQTree(
iqFactory.createInnerJoinNode(expressionAndSubstitution.getOptionalExpression()),
newChildren);
return expressionAndSubstitution.getSubstitution().isEmpty()
? joinTree
: iqFactory.createUnaryIQTree(
iqFactory.createConstructionNode(
constructionNodeTools.computeNewProjectedVariables(descendingSubstitution,
getProjectedVariables(children)),
(ImmutableSubstitution<ImmutableTerm>)(ImmutableSubstitution<?>)
expressionAndSubstitution.getSubstitution()),
joinTree);
} catch (UnsatisfiableConditionException e) {
return iqFactory.createEmptyNode(computeNewlyProjectedVariables(descendingSubstitution, children));
}
}
@Override
public IQTree applyDescendingSubstitutionWithoutOptimizing(
ImmutableSubstitution<? extends VariableOrGroundTerm> descendingSubstitution, ImmutableList<IQTree> children) {
InnerJoinNode newJoinNode = getOptionalFilterCondition()
.map(descendingSubstitution::applyToBooleanExpression)
.map(iqFactory::createInnerJoinNode)
.orElseGet(iqFactory::createInnerJoinNode);
ImmutableList<IQTree> newChildren = children.stream()
.map(c -> c.applyDescendingSubstitutionWithoutOptimizing(descendingSubstitution))
.collect(ImmutableCollectors.toList());
return iqFactory.createNaryIQTree(newJoinNode, newChildren);
}
private ImmutableSet<Variable> getProjectedVariables(ImmutableList<IQTree> children) {
return children.stream()
.flatMap(c -> c.getVariables().stream())
.collect(ImmutableCollectors.toSet());
}
@Override
public VariableNullability getVariableNullability(ImmutableList<IQTree> children) {
return variableNullabilityTools.getVariableNullability(children, getOptionalFilterCondition());
}
@Override
public boolean isConstructed(Variable variable, ImmutableList<IQTree> children) {
return children.stream()
.anyMatch(c -> c.isConstructed(variable));
}
@Override
public boolean isDistinct(ImmutableList<IQTree> children) {
return children.stream().allMatch(IQTree::isDistinct);
}
@Override
public IQTree liftIncompatibleDefinitions(Variable variable, ImmutableList<IQTree> children, VariableGenerator variableGenerator) {
return IntStream.range(0, children.size()).boxed()
.map(i -> Maps.immutableEntry(i, children.get(i)))
.filter(e -> e.getValue().isConstructed(variable))
// index -> new child
.map(e -> Maps.immutableEntry(e.getKey(), e.getValue().liftIncompatibleDefinitions(variable, variableGenerator)))
.filter(e -> {
QueryNode newRootNode = e.getValue().getRootNode();
return (newRootNode instanceof UnionNode)
&& ((UnionNode) newRootNode).hasAChildWithLiftableDefinition(variable,
e.getValue().getChildren());
})
.findFirst()
.map(e -> liftUnionChild(e.getKey(), (NaryIQTree) e.getValue(), children, variableGenerator))
.orElseGet(() -> iqFactory.createNaryIQTree(this, children));
}
@Override
public IQTree propagateDownConstraint(ImmutableExpression constraint, ImmutableList<IQTree> children) {
return propagateDownCondition(Optional.of(constraint), children);
}
@Override
public IQTree acceptTransformer(IQTree tree, IQTreeVisitingTransformer transformer, ImmutableList<IQTree> children) {
return transformer.transformInnerJoin(tree,this, children);
}
@Override
public <T> T acceptVisitor(IQVisitor<T> visitor, ImmutableList<IQTree> children) {
return visitor.visitInnerJoin(this, children);
}
@Override
public void validateNode(ImmutableList<IQTree> children) throws InvalidIntermediateQueryException {
if (children.size() < 2) {
throw new InvalidIntermediateQueryException("JOIN node " + this
+" does not have at least 2 children.\n" + children);
}
getOptionalFilterCondition()
.ifPresent(e -> checkExpression(e, children));
checkNonProjectedVariables(children);
}
@Override
public IQTree removeDistincts(ImmutableList<IQTree> children, IQProperties properties) {
ImmutableList<IQTree> newChildren = children.stream()
.map(IQTree::removeDistincts)
.collect(ImmutableCollectors.toList());
IQProperties newProperties = newChildren.equals(children)
? properties.declareDistinctRemovalWithoutEffect()
: properties.declareDistinctRemovalWithEffect();
return iqFactory.createNaryIQTree(this, children, newProperties);
}
/**
* TODO: implement it seriously
*/
@Override
public ImmutableSet<ImmutableSet<Variable>> inferUniqueConstraints(ImmutableList<IQTree> children) {
return ImmutableSet.of();
}
private IQTree propagateDownCondition(Optional<ImmutableExpression> initialConstraint, ImmutableList<IQTree> children) {
VariableNullability childrenVariableNullability = variableNullabilityTools.getChildrenVariableNullability(children);
try {
ExpressionAndSubstitution conditionSimplificationResults = conditionSimplifier.simplifyCondition(
getOptionalFilterCondition(), ImmutableSet.of(), childrenVariableNullability);
// TODO: find a way to avoid creating dummy objects
// NB: if some variables are not nullable at the join level, they may be at the child level
VariableNullability dummyVariableNullability = variableNullabilityTools
.getDummyVariableNullability(getProjectedVariables(children));
Optional<ImmutableExpression> downConstraint = conditionSimplifier.computeDownConstraint(initialConstraint,
conditionSimplificationResults, dummyVariableNullability);
//TODO: propagate different constraints to different children
ImmutableList<IQTree> newChildren = Optional.of(conditionSimplificationResults.getSubstitution())
.filter(s -> !s.isEmpty())
.map(s -> children.stream()
.map(child -> child.applyDescendingSubstitution(s, downConstraint))
.collect(ImmutableCollectors.toList())
)
.orElseGet(() -> downConstraint
.map(s -> children.stream()
.map(child -> child.propagateDownConstraint(s))
.collect(ImmutableCollectors.toList()))
.orElse(children));
InnerJoinNode newJoin = conditionSimplificationResults.getOptionalExpression().equals(getOptionalFilterCondition())
? this
: conditionSimplificationResults.getOptionalExpression()
.map(iqFactory::createInnerJoinNode)
.orElseGet(iqFactory::createInnerJoinNode);
NaryIQTree joinTree = iqFactory.createNaryIQTree(newJoin, newChildren);
return Optional.of(conditionSimplificationResults.getSubstitution())
.filter(s -> !s.isEmpty())
.map(s -> iqFactory.createConstructionNode(children.stream()
.flatMap(c -> c.getVariables().stream())
.collect(ImmutableCollectors.toSet()),
(ImmutableSubstitution<ImmutableTerm>)(ImmutableSubstitution<?>)s))
.map(c -> (IQTree) iqFactory.createUnaryIQTree(c, joinTree))
.orElse(joinTree);
} catch (UnsatisfiableConditionException e) {
return iqFactory.createEmptyNode(getProjectedVariables(children));
}
}
private IQTree liftUnionChild(int childIndex, NaryIQTree newUnionChild, ImmutableList<IQTree> initialChildren,
VariableGenerator variableGenerator) {
UnionNode newUnionNode = iqFactory.createUnionNode(initialChildren.stream()
.flatMap(c -> c.getVariables().stream())
.collect(ImmutableCollectors.toSet()));
return iqFactory.createNaryIQTree(newUnionNode,
newUnionChild.getChildren().stream()
.map(unionGrandChild -> createJoinSubtree(childIndex, unionGrandChild, initialChildren))
.collect(ImmutableCollectors.toList()))
.normalizeForOptimization(variableGenerator);
}
private IQTree createJoinSubtree(int childIndex, IQTree unionGrandChild, ImmutableList<IQTree> initialChildren) {
return iqFactory.createNaryIQTree(this,
IntStream.range(0, initialChildren.size())
.boxed()
.map(i -> i == childIndex
? unionGrandChild
: initialChildren.get(i))
.collect(ImmutableCollectors.toList()));
}
private ImmutableSet<Variable> computeNewlyProjectedVariables(
ImmutableSubstitution<? extends VariableOrGroundTerm> descendingSubstitution,
ImmutableList<IQTree> children) {
ImmutableSet<Variable> formerProjectedVariables = getProjectedVariables(children);
return constructionNodeTools.computeNewProjectedVariables(descendingSubstitution, formerProjectedVariables);
}
}
|
InnerJoin.inferUniqueConstraints() implemented.
|
core/model/src/main/java/it/unibz/inf/ontop/iq/node/impl/InnerJoinNodeImpl.java
|
InnerJoin.inferUniqueConstraints() implemented.
|
|
Java
|
apache-2.0
|
436d975c78d566edbff80b79488037799d173093
| 0
|
yurloc/guvnor,wmedvede/guvnor,baldimir/guvnor,yurloc/guvnor,baldimir/guvnor,kiereleaseuser/guvnor,cristianonicolai/guvnor,Rikkola/guvnor,kiereleaseuser/guvnor,mbiarnes/guvnor,Rikkola/guvnor,wmedvede/guvnor,adrielparedes/guvnor,mbiarnes/guvnor,porcelli-forks/guvnor,porcelli-forks/guvnor,hxf0801/guvnor,mswiderski/guvnor,psiroky/guvnor,porcelli-forks/guvnor,adrielparedes/guvnor,etirelli/guvnor,droolsjbpm/guvnor,psiroky/guvnor,etirelli/guvnor,wmedvede/guvnor,cristianonicolai/guvnor,Rikkola/guvnor,mbiarnes/guvnor,hxf0801/guvnor,kiereleaseuser/guvnor,baldimir/guvnor,droolsjbpm/guvnor,nmirasch/guvnor,adrielparedes/guvnor,psiroky/guvnor,etirelli/guvnor,hxf0801/guvnor,nmirasch/guvnor,nmirasch/guvnor,droolsjbpm/guvnor,cristianonicolai/guvnor
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.explorer;
import java.util.Arrays;
import java.util.Map;
import org.drools.guvnor.client.common.GenericCallback;
import org.drools.guvnor.client.common.RulePackageSelector;
import org.drools.guvnor.client.messages.Constants;
import org.drools.guvnor.client.resources.Images;
import org.drools.guvnor.client.rpc.PackageConfigData;
import org.drools.guvnor.client.rpc.RepositoryServiceFactory;
import org.drools.guvnor.client.util.TabOpener;
import org.drools.guvnor.client.util.Util;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.logical.shared.OpenEvent;
import com.google.gwt.event.logical.shared.OpenHandler;
import com.google.gwt.event.logical.shared.SelectionEvent;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.ui.Tree;
import com.google.gwt.user.client.ui.TreeItem;
public class PackagesTree extends AbstractTree
implements
OpenHandler<TreeItem> {
private static Constants constants = GWT.create( Constants.class );
private static Images images = GWT.create( Images.class );
private boolean packagesLoaded = false;
public PackagesTree() {
this.name = constants.KnowledgeBases();
this.image = images.packages();
mainTree.setAnimationEnabled( true );
mainTree.addSelectionHandler( this );
mainTree.addOpenHandler( (OpenHandler<TreeItem>) this );
}
@Override
protected Tree createTree() {
return new Tree();
}
public void loadPackageList() {
if ( !packagesLoaded ) {
setupPackagesTree();
packagesLoaded = true;
}
}
public void refreshTree() {
mainTree.clear();
itemWidgets.clear();
setupPackagesTree();
}
private void setupPackagesTree() {
TreeItem packageRootNode = new TreeItem( Util.getHeader( images.chartOrganisation(),
constants.Packages() ) );
setupPackageNode( packageRootNode );
mainTree.addItem( packageRootNode );
setupGlobalNode( mainTree,
itemWidgets );
}
private void setupPackageNode(final TreeItem packageRootNode) {
packageRootNode.setState( true );
packageRootNode.setUserObject( "rootNode" );
RepositoryServiceFactory.getService().listPackages( new GenericCallback<PackageConfigData[]>() {
public void onSuccess(PackageConfigData[] packageConfigDatas) {
PackageHierarchy packageHierarchy = new PackageHierarchy();
for ( PackageConfigData packageConfigData : packageConfigDatas ) {
packageHierarchy.addPackage( packageConfigData );
}
for ( PackageHierarchy.Folder folder : packageHierarchy.getRoot().getChildren() ) {
buildPkgTree( packageRootNode,
folder );
}
}
} );
}
private void setupGlobalNode(final Tree root,
final Map<TreeItem, String> itemWidgets) {
RepositoryServiceFactory.getService().loadGlobalPackage( new GenericCallback<PackageConfigData>() {
public void onSuccess(PackageConfigData value) {
TreeItem globalRootNode = ExplorerNodeConfig.getPackageItemStructure( constants.GlobalArea(),
value.uuid,
itemWidgets );
globalRootNode.setHTML( Util.getHeader( images.chartOrganisation(),
constants.GlobalArea() ) );
globalRootNode.setUserObject( value );
root.addItem( globalRootNode );
}
} );
}
private void buildPkgTree(TreeItem root,
PackageHierarchy.Folder folder) {
if ( folder.getConfig() != null ) {
root.addItem( loadPackage( folder.getName(),
folder.getConfig() ) );
} else {
TreeItem treeItem = new TreeItem( Util.getHeader( images.emptyPackage(),
folder.getName() ) );
root.addItem( treeItem );
for ( PackageHierarchy.Folder childFolder : folder.getChildren() ) {
buildPkgTree( treeItem,
childFolder );
}
}
}
private TreeItem loadPackage(String name,
PackageConfigData config) {
TreeItem treeItem = ExplorerNodeConfig.getPackageItemStructure( name,
config.uuid,
itemWidgets );
treeItem.setUserObject( config );
return treeItem;
}
public static String key(String[] formats,
PackageConfigData userObject) {
String key = userObject.uuid;
for ( String format : formats ) {
key = key + format;
}
if ( formats.length == 0 ) {
key = key + "[0]";
}
return key;
}
// Show the associated widget in the deck panel
public void onSelection(SelectionEvent<TreeItem> event) {
TreeItem node = event.getSelectedItem();
Object userObject = node.getUserObject();
TabOpener opener = TabOpener.getInstance();
if ( userObject != null ) {
if ( userObject instanceof PackageConfigData && !((PackageConfigData) userObject).isGlobal() ) {
PackageConfigData pc = (PackageConfigData) userObject;
RulePackageSelector.currentlySelectedPackage = pc.name;
String uuid = pc.uuid;
opener.openPackageEditor( uuid,
new Command() {
public void execute() {
// refresh the package tree.
refreshTree();
}
} );
} else if ( userObject instanceof String[] ) {
final String[] formats = (String[]) userObject;
final PackageConfigData packageConfigData = (PackageConfigData) node.getParentItem().getUserObject();
RulePackageSelector.currentlySelectedPackage = packageConfigData.name;
String key = key( formats,
packageConfigData );
opener.openPackageViewAssets( packageConfigData.uuid,
packageConfigData.name,
key,
formats.length == 0 ? null : Arrays.asList( formats ),
formats.length == 0 ? Boolean.TRUE : null,
node.getText() );
} else if ( userObject instanceof String ) {
// Ignore, there is no click event for this.
} else {
throw new IllegalArgumentException( "The userObject (" + userObject + ") is not supported." );
}
}
}
public void onOpen(OpenEvent<TreeItem> event) {
TreeItem node = event.getTarget();
Object userObject = node.getUserObject();
if ( userObject != null && userObject instanceof String && "rootNode".equals( (String) userObject ) ) {
node.removeItems();
setupPackageNode( node );
}
}
}
|
drools-guvnor/src/main/java/org/drools/guvnor/client/explorer/PackagesTree.java
|
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.explorer;
import java.util.Arrays;
import java.util.Map;
import org.drools.guvnor.client.common.GenericCallback;
import org.drools.guvnor.client.common.RulePackageSelector;
import org.drools.guvnor.client.messages.Constants;
import org.drools.guvnor.client.resources.Images;
import org.drools.guvnor.client.rpc.PackageConfigData;
import org.drools.guvnor.client.rpc.RepositoryServiceFactory;
import org.drools.guvnor.client.util.TabOpener;
import org.drools.guvnor.client.util.Util;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.logical.shared.OpenEvent;
import com.google.gwt.event.logical.shared.OpenHandler;
import com.google.gwt.event.logical.shared.SelectionEvent;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.ui.Tree;
import com.google.gwt.user.client.ui.TreeItem;
public class PackagesTree extends AbstractTree
implements
OpenHandler<TreeItem> {
private static Constants constants = GWT.create( Constants.class );
private static Images images = GWT.create( Images.class );
private boolean packagesLoaded = false;
public PackagesTree() {
this.name = constants.KnowledgeBases();
this.image = images.packages();
mainTree.setAnimationEnabled( true );
mainTree.addSelectionHandler( this );
mainTree.addOpenHandler( (OpenHandler<TreeItem>) this );
}
@Override
protected Tree createTree() {
return new Tree();
}
public void loadPackageList() {
if ( !packagesLoaded ) {
setupPackagesTree();
packagesLoaded = true;
}
}
public void refreshTree() {
mainTree.clear();
itemWidgets.clear();
setupPackagesTree();
}
private void setupPackagesTree() {
TreeItem packageRootNode = new TreeItem( Util.getHeader( images.chartOrganisation(),
constants.Packages() ) );
setupPackageNode( packageRootNode );
mainTree.addItem( packageRootNode );
setupGlobalNode( mainTree,
itemWidgets );
}
private void setupPackageNode(final TreeItem packageRootNode) {
packageRootNode.setState( true );
packageRootNode.setUserObject( new String( "rootNode" ) );
RepositoryServiceFactory.getService().listPackages( new GenericCallback<PackageConfigData[]>() {
public void onSuccess(PackageConfigData[] packageConfigDatas) {
PackageHierarchy packageHierarchy = new PackageHierarchy();
for ( PackageConfigData packageConfigData : packageConfigDatas ) {
packageHierarchy.addPackage( packageConfigData );
}
for ( PackageHierarchy.Folder folder : packageHierarchy.getRoot().getChildren() ) {
buildPkgTree( packageRootNode,
folder );
}
}
} );
}
private void setupGlobalNode(final Tree root,
final Map<TreeItem, String> itemWidgets) {
RepositoryServiceFactory.getService().loadGlobalPackage( new GenericCallback<PackageConfigData>() {
public void onSuccess(PackageConfigData value) {
TreeItem globalRootNode = ExplorerNodeConfig.getPackageItemStructure( constants.GlobalArea(),
value.uuid,
itemWidgets );
globalRootNode.setHTML( Util.getHeader( images.chartOrganisation(),
constants.GlobalArea() ) );
globalRootNode.setUserObject( value );
root.addItem( globalRootNode );
}
} );
}
private void buildPkgTree(TreeItem root,
PackageHierarchy.Folder folder) {
if ( folder.getConfig() != null ) {
root.addItem( loadPackage( folder.getName(),
folder.getConfig() ) );
} else {
TreeItem treeItem = new TreeItem( Util.getHeader( images.emptyPackage(),
folder.getName() ) );
root.addItem( treeItem );
for ( PackageHierarchy.Folder childFolder : folder.getChildren() ) {
buildPkgTree( treeItem,
childFolder );
}
}
}
private TreeItem loadPackage(String name,
PackageConfigData config) {
TreeItem treeItem = ExplorerNodeConfig.getPackageItemStructure( name,
config.uuid,
itemWidgets );
treeItem.setUserObject( config );
return treeItem;
}
public static String key(String[] formats,
PackageConfigData userObject) {
String key = userObject.uuid;
for ( String format : formats ) {
key = key + format;
}
if ( formats.length == 0 ) {
key = key + "[0]";
}
return key;
}
// Show the associated widget in the deck panel
public void onSelection(SelectionEvent<TreeItem> event) {
TreeItem node = event.getSelectedItem();
Object userObject = node.getUserObject();
TabOpener opener = TabOpener.getInstance();
if ( userObject != null ) {
if ( userObject instanceof PackageConfigData && !((PackageConfigData) userObject).isGlobal() ) {
PackageConfigData pc = (PackageConfigData) userObject;
RulePackageSelector.currentlySelectedPackage = pc.name;
String uuid = pc.uuid;
opener.openPackageEditor( uuid,
new Command() {
public void execute() {
// refresh the package tree.
refreshTree();
}
} );
} else if ( userObject instanceof String[] ) {
final String[] formats = (String[]) userObject;
final PackageConfigData packageConfigData = (PackageConfigData) node.getParentItem().getUserObject();
RulePackageSelector.currentlySelectedPackage = packageConfigData.name;
String key = key( formats,
packageConfigData );
opener.openPackageViewAssets( packageConfigData.uuid,
packageConfigData.name,
key,
formats.length == 0 ? null : Arrays.asList( formats ),
formats.length == 0 ? Boolean.TRUE : null,
node.getText() );
} else if ( userObject instanceof String ) {
// Ignore, there is no click event for this.
} else {
throw new IllegalArgumentException( "The userObject (" + userObject + ") is not supported." );
}
}
}
public void onOpen(OpenEvent<TreeItem> event) {
TreeItem node = event.getTarget();
Object userObject = node.getUserObject();
if ( userObject != null && userObject instanceof String && "rootNode".equals( (String) userObject ) ) {
node.removeItems();
setupPackageNode( node );
}
}
}
|
findbugs: invokes inefficient new String(String) constructor
|
drools-guvnor/src/main/java/org/drools/guvnor/client/explorer/PackagesTree.java
|
findbugs: invokes inefficient new String(String) constructor
|
|
Java
|
apache-2.0
|
bcc33d296a3504595285079a195e8fe01fbd98d0
| 0
|
NuwanSameera/syncope,nscendoni/syncope,securny/syncope,giacomolm/syncope,NuwanSameera/syncope,giacomolm/syncope,ilgrosso/syncope,apache/syncope,ilgrosso/syncope,ilgrosso/syncope,apache/syncope,nscendoni/syncope,giacomolm/syncope,NuwanSameera/syncope,securny/syncope,ilgrosso/syncope,nscendoni/syncope,tmess567/syncope,tmess567/syncope,tmess567/syncope,giacomolm/syncope,NuwanSameera/syncope,securny/syncope,apache/syncope,apache/syncope,nscendoni/syncope,tmess567/syncope,securny/syncope
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.fit.core;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.Locale;
import java.util.Map;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.Predicate;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.syncope.common.lib.SyncopeClientException;
import org.apache.syncope.common.lib.patch.PasswordPatch;
import org.apache.syncope.common.lib.patch.StatusPatch;
import org.apache.syncope.common.lib.patch.StringPatchItem;
import org.apache.syncope.common.lib.patch.UserPatch;
import org.apache.syncope.common.lib.to.AnyTypeClassTO;
import org.apache.syncope.common.lib.to.AttrTO;
import org.apache.syncope.common.lib.to.ConnInstanceTO;
import org.apache.syncope.common.lib.to.ConnObjectTO;
import org.apache.syncope.common.lib.to.MappingItemTO;
import org.apache.syncope.common.lib.to.MappingTO;
import org.apache.syncope.common.lib.to.MembershipTO;
import org.apache.syncope.common.lib.to.ResourceTO;
import org.apache.syncope.common.lib.to.GroupTO;
import org.apache.syncope.common.lib.to.ProvisionTO;
import org.apache.syncope.common.lib.to.ProvisioningResult;
import org.apache.syncope.common.lib.to.UserTO;
import org.apache.syncope.common.lib.to.VirSchemaTO;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.ConnConfProperty;
import org.apache.syncope.common.lib.types.MappingPurpose;
import org.apache.syncope.common.lib.types.PatchOperation;
import org.apache.syncope.common.lib.types.PropagationTaskExecStatus;
import org.apache.syncope.common.lib.types.SchemaType;
import org.apache.syncope.common.lib.types.StatusPatchType;
import org.apache.syncope.common.rest.api.service.AnyTypeClassService;
import org.apache.syncope.common.rest.api.service.ResourceService;
import org.apache.syncope.fit.AbstractITCase;
import org.identityconnectors.framework.common.objects.ObjectClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.jdbc.core.JdbcTemplate;
@FixMethodOrder(MethodSorters.JVM)
public class VirAttrITCase extends AbstractITCase {
@Test
public void issueSYNCOPE16() {
UserTO userTO = UserITCase.getUniqueSampleTO("issue16@apache.org");
userTO.getVirAttrs().add(attrTO("virtualdata", "virtualvalue"));
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
userTO.getMemberships().add(
new MembershipTO.Builder().group("f779c0d4-633b-4be5-8f57-32eb478a3ca5").build());
// 1. create user
userTO = createUser(userTO).getAny();
assertNotNull(userTO);
// 2. check for virtual attribute value
userTO = userService.read(userTO.getKey());
assertNotNull(userTO);
assertEquals("virtualvalue", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getVirAttrs().add(attrTO("virtualdata", "virtualupdated"));
// 3. update virtual attribute
userTO = updateUser(userPatch).getAny();
assertNotNull(userTO);
// 4. check for virtual attribute value
userTO = userService.read(userTO.getKey());
assertNotNull(userTO);
assertEquals("virtualupdated", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE260() {
// create new virtual schema for the resource below
ResourceTO ws2 = resourceService.read(RESOURCE_NAME_WS2);
ProvisionTO provision = ws2.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
VirSchemaTO virSchema = new VirSchemaTO();
virSchema.setKey("syncope260" + getUUIDString());
virSchema.setExtAttrName("companyName");
virSchema.setResource(RESOURCE_NAME_WS2);
virSchema.setAnyType(provision.getAnyType());
virSchema = createSchema(SchemaType.VIRTUAL, virSchema);
assertNotNull(virSchema);
AnyTypeClassTO newClass = new AnyTypeClassTO();
newClass.setKey("syncope260" + getUUIDString());
newClass.getVirSchemas().add(virSchema.getKey());
Response response = anyTypeClassService.create(newClass);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
newClass = getObject(response.getLocation(), AnyTypeClassService.class, AnyTypeClassTO.class);
// ----------------------------------
// create user and check virtual attribute value propagation
// ----------------------------------
UserTO userTO = UserITCase.getUniqueSampleTO("260@a.com");
userTO.getAuxClasses().add(newClass.getKey());
userTO.getVirAttrs().add(attrTO(virSchema.getKey(), "virtualvalue"));
userTO.getResources().add(RESOURCE_NAME_WS2);
ProvisioningResult<UserTO> result = createUser(userTO);
assertNotNull(result);
assertFalse(result.getPropagationStatuses().isEmpty());
assertEquals(RESOURCE_NAME_WS2, result.getPropagationStatuses().get(0).getResource());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
userTO = result.getAny();
ConnObjectTO connObjectTO =
resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
// ----------------------------------
// update user virtual attribute and check virtual attribute value update propagation
// ----------------------------------
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getVirAttrs().add(attrTO(virSchema.getKey(), "virtualvalue2"));
result = updateUser(userPatch);
assertNotNull(result);
assertFalse(result.getPropagationStatuses().isEmpty());
assertEquals(RESOURCE_NAME_WS2, result.getPropagationStatuses().get(0).getResource());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
userTO = result.getAny();
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
// ----------------------------------
// suspend/reactivate user and check virtual attribute value (unchanged)
// ----------------------------------
StatusPatch statusPatch = new StatusPatch();
statusPatch.setKey(userTO.getKey());
statusPatch.setType(StatusPatchType.SUSPEND);
userTO = userService.status(statusPatch).readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getAny();
assertEquals("suspended", userTO.getStatus());
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
statusPatch = new StatusPatch();
statusPatch.setKey(userTO.getKey());
statusPatch.setType(StatusPatchType.REACTIVATE);
userTO = userService.status(statusPatch).readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getAny();
assertEquals("active", userTO.getStatus());
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
// ----------------------------------
// update user attribute and check virtual attribute value (unchanged)
// ----------------------------------
userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getPlainAttrs().add(attrAddReplacePatch("surname", "Surname2"));
result = updateUser(userPatch);
assertNotNull(result);
assertFalse(result.getPropagationStatuses().isEmpty());
assertEquals(RESOURCE_NAME_WS2, result.getPropagationStatuses().get(0).getResource());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
userTO = result.getAny();
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("Surname2", connObjectTO.getPlainAttrMap().get("SURNAME").getValues().get(0));
// virtual attribute value did not change
assertFalse(connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().isEmpty());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
}
@Test
public void virAttrCache() {
UserTO userTO = UserITCase.getUniqueSampleTO("virattrcache@apache.org");
userTO.getVirAttrs().clear();
AttrTO virAttrTO = new AttrTO();
virAttrTO.setSchema("virtualdata");
virAttrTO.getValues().add("virattrcache");
userTO.getVirAttrs().add(virAttrTO);
userTO.getMemberships().clear();
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
// 1. create user
UserTO actual = createUser(userTO).getAny();
assertNotNull(actual);
// 2. check for virtual attribute value
actual = userService.read(actual.getKey());
assertEquals("virattrcache", actual.getVirAttrMap().get("virtualdata").getValues().get(0));
// 3. update virtual attribute directly
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
String value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, actual.getKey());
assertEquals("virattrcache", value);
jdbcTemplate.update("UPDATE testpull set USERNAME='virattrcache2' WHERE ID=?", actual.getKey());
value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, actual.getKey());
assertEquals("virattrcache2", value);
// 4. check for cached attribute value
actual = userService.read(actual.getKey());
assertEquals("virattrcache", actual.getVirAttrMap().get("virtualdata").getValues().get(0));
UserPatch userPatch = new UserPatch();
userPatch.setKey(actual.getKey());
userPatch.getVirAttrs().add(attrTO("virtualdata", "virtualupdated"));
// 5. update virtual attribute
actual = updateUser(userPatch).getAny();
assertNotNull(actual);
// 6. check for virtual attribute value
actual = userService.read(actual.getKey());
assertNotNull(actual);
assertEquals("virtualupdated", actual.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE397() {
ResourceTO csv = resourceService.read(RESOURCE_NAME_CSV);
// change mapping of resource-csv
MappingTO origMapping = SerializationUtils.clone(csv.getProvisions().get(0).getMapping());
try {
// remove this mapping
CollectionUtils.filterInverse(csv.getProvisions().get(0).getMapping().getItems(),
new Predicate<MappingItemTO>() {
@Override
public boolean evaluate(final MappingItemTO item) {
return "email".equals(item.getIntAttrName());
}
});
resourceService.update(csv);
csv = resourceService.read(RESOURCE_NAME_CSV);
assertNotNull(csv.getProvisions().get(0).getMapping());
// create new virtual schema for the resource below
ProvisionTO provision = csv.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
VirSchemaTO virSchema = new VirSchemaTO();
virSchema.setKey("syncope397" + getUUIDString());
virSchema.setExtAttrName("email");
virSchema.setResource(RESOURCE_NAME_CSV);
virSchema.setAnyType(provision.getAnyType());
virSchema = createSchema(SchemaType.VIRTUAL, virSchema);
assertNotNull(virSchema);
AnyTypeClassTO newClass = new AnyTypeClassTO();
newClass.setKey("syncope397" + getUUIDString());
newClass.getVirSchemas().add(virSchema.getKey());
Response response = anyTypeClassService.create(newClass);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
newClass = getObject(response.getLocation(), AnyTypeClassService.class, AnyTypeClassTO.class);
// create a new user
UserTO userTO = UserITCase.getUniqueSampleTO("397@syncope.apache.org");
userTO.getAuxClasses().add("csv");
userTO.getAuxClasses().add(newClass.getKey());
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getDerAttrs().clear();
userTO.getVirAttrs().clear();
userTO.getDerAttrs().add(attrTO("csvuserid", null));
userTO.getDerAttrs().add(attrTO("cn", null));
userTO.getVirAttrs().add(attrTO(virSchema.getKey(), "test@testone.org"));
// assign resource-csv to user
userTO.getResources().add(RESOURCE_NAME_CSV);
// save user
userTO = createUser(userTO).getAny();
// make std controls about user
assertNotNull(userTO);
assertTrue(RESOURCE_NAME_CSV.equals(userTO.getResources().iterator().next()));
assertEquals("test@testone.org", userTO.getVirAttrs().iterator().next().getValues().get(0));
// update user
UserTO toBeUpdated = userService.read(userTO.getKey());
UserPatch userPatch = new UserPatch();
userPatch.setKey(toBeUpdated.getKey());
userPatch.setPassword(new PasswordPatch.Builder().value("password234").build());
// assign new resource to user
userPatch.getResources().add(new StringPatchItem.Builder().
operation(PatchOperation.ADD_REPLACE).value(RESOURCE_NAME_WS2).build());
// modify virtual attribute
userPatch.getVirAttrs().add(attrTO(virSchema.getKey(), "test@testoneone.com"));
// check Syncope change password
userPatch.setPassword(new PasswordPatch.Builder().
value("password234").
onSyncope(true).
resource(RESOURCE_NAME_WS2).
build());
ProvisioningResult<UserTO> result = updateUser(userPatch);
assertNotNull(result);
toBeUpdated = result.getAny();
assertTrue(toBeUpdated.getVirAttrs().iterator().next().getValues().contains("test@testoneone.com"));
// check if propagates correctly with assertEquals on size of tasks list
assertEquals(2, result.getPropagationStatuses().size());
} finally {
// restore mapping of resource-csv
csv.getProvisions().get(0).setMapping(origMapping);
resourceService.update(csv);
}
}
@Test
public void issueSYNCOPE442() {
UserTO userTO = UserITCase.getUniqueSampleTO("syncope442@apache.org");
userTO.getVirAttrs().clear();
AttrTO virAttrTO = new AttrTO();
virAttrTO.setSchema("virtualdata");
virAttrTO.getValues().add("virattrcache");
userTO.getVirAttrs().add(virAttrTO);
userTO.getMemberships().clear();
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
// 1. create user
userTO = createUser(userTO).getAny();
assertNotNull(userTO);
// 2. check for virtual attribute value
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// ----------------------------------------
// 3. change connector URL so that we are sure that any provided value will come from virtual cache
// ----------------------------------------
String jdbcURL = null;
ConnInstanceTO connInstanceTO = connectorService.readByResource(
RESOURCE_NAME_DBVIRATTR, Locale.ENGLISH.getLanguage());
for (ConnConfProperty prop : connInstanceTO.getConf()) {
if ("jdbcUrlTemplate".equals(prop.getSchema().getName())) {
jdbcURL = prop.getValues().iterator().next().toString();
prop.getValues().clear();
prop.getValues().add("jdbc:h2:tcp://localhost:9092/xxx");
}
}
connectorService.update(connInstanceTO);
// ----------------------------------------
// ----------------------------------------
// 4. update value on external resource
// ----------------------------------------
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
String value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, userTO.getKey());
assertEquals("virattrcache", value);
jdbcTemplate.update("UPDATE testpull set USERNAME='virattrcache2' WHERE ID=?", userTO.getKey());
value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, userTO.getKey());
assertEquals("virattrcache2", value);
// ----------------------------------------
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// ----------------------------------------
// 5. restore connector URL, values can be read again from external resource
// ----------------------------------------
for (ConnConfProperty prop : connInstanceTO.getConf()) {
if ("jdbcUrlTemplate".equals(prop.getSchema().getName())) {
prop.getValues().clear();
prop.getValues().add(jdbcURL);
}
}
connectorService.update(connInstanceTO);
// ----------------------------------------
// cached value still in place...
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// force cache update by adding a resource which has virtualdata mapped for propagation
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getResources().add(new StringPatchItem.Builder().
operation(PatchOperation.ADD_REPLACE).value(RESOURCE_NAME_WS2).build());
userTO = updateUser(userPatch).getAny();
assertNotNull(userTO);
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache2", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE436() {
UserTO userTO = UserITCase.getUniqueSampleTO("syncope436@syncope.apache.org");
userTO.getMemberships().clear();
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_LDAP);
userTO.getVirAttrs().add(attrTO("virtualReadOnly", "readOnly"));
userTO = createUser(userTO).getAny();
// finding no values because the virtual attribute is readonly
assertTrue(userTO.getVirAttrMap().get("virtualReadOnly").getValues().isEmpty());
}
@Test
public void issueSYNCOPE453() {
String resourceName = "issueSYNCOPE453Res" + getUUIDString();
String groupKey = null;
String groupName = "issueSYNCOPE453Group" + getUUIDString();
try {
// -------------------------------------------
// Create a VirAttrITCase ad-hoc
// -------------------------------------------
VirSchemaTO rvirtualdata;
try {
rvirtualdata = schemaService.read(SchemaType.VIRTUAL, "rvirtualdata");
} catch (SyncopeClientException e) {
LOG.warn("rvirtualdata not found, re-creating", e);
rvirtualdata = new VirSchemaTO();
rvirtualdata.setKey("rvirtualdata");
rvirtualdata.setExtAttrName("businessCategory");
rvirtualdata.setResource(RESOURCE_NAME_LDAP);
rvirtualdata.setAnyType(AnyTypeKind.GROUP.name());
rvirtualdata = createSchema(SchemaType.VIRTUAL, rvirtualdata);
}
assertNotNull(rvirtualdata);
if (!"minimal group".equals(rvirtualdata.getAnyTypeClass())) {
LOG.warn("rvirtualdata not in minimal group, restoring");
AnyTypeClassTO minimalGroup = anyTypeClassService.read("minimal group");
minimalGroup.getVirSchemas().add(rvirtualdata.getKey());
anyTypeClassService.update(minimalGroup);
rvirtualdata = schemaService.read(SchemaType.VIRTUAL, rvirtualdata.getKey());
assertEquals("minimal group", rvirtualdata.getAnyTypeClass());
}
// -------------------------------------------
// Create a resource ad-hoc
// -------------------------------------------
ResourceTO resourceTO = new ResourceTO();
resourceTO.setKey(resourceName);
resourceTO.setConnector("be24b061-019d-4e3e-baf0-0a6d0a45cb9c");
ProvisionTO provisionTO = new ProvisionTO();
provisionTO.setAnyType(AnyTypeKind.USER.name());
provisionTO.setObjectClass(ObjectClass.ACCOUNT_NAME);
provisionTO.getAuxClasses().add("minimal group");
resourceTO.getProvisions().add(provisionTO);
MappingTO mapping = new MappingTO();
provisionTO.setMapping(mapping);
MappingItemTO item = new MappingItemTO();
item.setIntAttrName("fullname");
item.setExtAttrName("ID");
item.setPurpose(MappingPurpose.PROPAGATION);
item.setConnObjectKey(true);
mapping.setConnObjectKeyItem(item);
item = new MappingItemTO();
item.setExtAttrName("USERNAME");
item.setIntAttrName("username");
item.setPurpose(MappingPurpose.PROPAGATION);
mapping.getItems().add(item);
item = new MappingItemTO();
item.setExtAttrName("EMAIL");
item.setIntAttrName("groups[" + groupName + "].rvirtualdata");
item.setPurpose(MappingPurpose.PROPAGATION);
mapping.getItems().add(item);
assertNotNull(getObject(
resourceService.create(resourceTO).getLocation(), ResourceService.class, ResourceTO.class));
// -------------------------------------------
GroupTO groupTO = new GroupTO();
groupTO.setName(groupName);
groupTO.setRealm("/");
groupTO.getVirAttrs().add(attrTO(rvirtualdata.getKey(), "ml@group.it"));
groupTO.getResources().add(RESOURCE_NAME_LDAP);
groupTO = createGroup(groupTO).getAny();
groupKey = groupTO.getKey();
assertEquals(1, groupTO.getVirAttrs().size());
assertEquals("ml@group.it", groupTO.getVirAttrs().iterator().next().getValues().get(0));
// -------------------------------------------
// -------------------------------------------
// Create new user
// -------------------------------------------
UserTO userTO = UserITCase.getUniqueSampleTO("syncope453@syncope.apache.org");
userTO.getPlainAttrs().add(attrTO("fullname", "123"));
userTO.getResources().clear();
userTO.getResources().add(resourceName);
userTO.getVirAttrs().clear();
userTO.getDerAttrs().clear();
userTO.getMemberships().clear();
userTO.getMemberships().add(new MembershipTO.Builder().group(groupTO.getKey()).build());
ProvisioningResult<UserTO> result = createUser(userTO);
assertEquals(2, result.getPropagationStatuses().size());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(1).getStatus());
userTO = result.getAny();
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
Map<String, Object> actuals = jdbcTemplate.queryForMap(
"SELECT id, surname, email FROM testpull WHERE id=?",
new Object[] { userTO.getPlainAttrMap().get("fullname").getValues().get(0) });
assertEquals(userTO.getPlainAttrMap().get("fullname").getValues().get(0), actuals.get("id").toString());
assertEquals("ml@group.it", actuals.get("email"));
// -------------------------------------------
} catch(Exception e) {
LOG.error("Unexpected error", e);
} finally {
// -------------------------------------------
// Delete resource and group ad-hoc
// -------------------------------------------
resourceService.delete(resourceName);
if (groupKey != null) {
groupService.delete(groupKey);
}
// -------------------------------------------
}
}
@Test
public void issueSYNCOPE459() {
UserTO userTO = UserITCase.getUniqueSampleTO("syncope459@apache.org");
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_LDAP);
userTO.getMemberships().clear();
userTO.getVirAttrs().clear();
userTO = createUser(userTO).getAny();
assertNotNull(userTO.getVirAttrMap().get("virtualReadOnly"));
}
@Test
public void issueSYNCOPE501() {
// 1. create user and propagate him on resource-db-virattr
UserTO userTO = UserITCase.getUniqueSampleTO("syncope501@apache.org");
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getVirAttrs().clear();
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
// virtualdata is mapped with username
userTO.getVirAttrs().add(attrTO("virtualdata", "syncope501@apache.org"));
userTO = createUser(userTO).getAny();
assertNotNull(userTO.getVirAttrMap().get("virtualdata"));
assertEquals("syncope501@apache.org", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// 2. update virtual attribute
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
// change virtual attribute value
userPatch.getVirAttrs().add(attrTO("virtualdata", "syncope501_updated@apache.org"));
userTO = updateUser(userPatch).getAny();
assertNotNull(userTO);
// 3. check that user virtual attribute has really been updated
assertFalse(userTO.getVirAttrMap().get("virtualdata").getValues().isEmpty());
assertEquals("syncope501_updated@apache.org", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE691() {
ResourceTO ldap = resourceService.read(RESOURCE_NAME_LDAP);
try {
ProvisionTO provision = ldap.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
CollectionUtils.filterInverse(provision.getMapping().getItems(), new Predicate<MappingItemTO>() {
@Override
public boolean evaluate(final MappingItemTO item) {
return "mail".equals(item.getExtAttrName());
}
});
provision.getVirSchemas().clear();
ldap.getProvisions().clear();
ldap.getProvisions().add(provision);
ldap.setKey(RESOURCE_NAME_LDAP + "691" + getUUIDString());
resourceService.create(ldap);
ldap = resourceService.read(ldap.getKey());
provision = ldap.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
// create new virtual schema for the resource below
VirSchemaTO virSchema = new VirSchemaTO();
virSchema.setKey("syncope691" + getUUIDString());
virSchema.setExtAttrName("mail");
virSchema.setResource(ldap.getKey());
virSchema.setAnyType(provision.getAnyType());
virSchema = createSchema(SchemaType.VIRTUAL, virSchema);
assertNotNull(virSchema);
AnyTypeClassTO newClass = new AnyTypeClassTO();
newClass.setKey("syncope691" + getUUIDString());
newClass.getVirSchemas().add(virSchema.getKey());
Response response = anyTypeClassService.create(newClass);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
newClass = getObject(response.getLocation(), AnyTypeClassService.class, AnyTypeClassTO.class);
// create a new user
UserTO userTO = UserITCase.getUniqueSampleTO("syncope691@syncope.apache.org");
userTO.getAuxClasses().add(newClass.getKey());
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getDerAttrs().clear();
userTO.getVirAttrs().clear();
AttrTO emailTO = new AttrTO();
emailTO.setSchema(virSchema.getKey());
emailTO.getValues().add("test@issue691.dom1.org");
emailTO.getValues().add("test@issue691.dom2.org");
userTO.getVirAttrs().add(emailTO);
// assign resource-ldap691 to user
userTO.getResources().add(ldap.getKey());
// save user
userTO = createUser(userTO).getAny();
// make std controls about user
assertNotNull(userTO);
assertTrue(ldap.getKey().equals(userTO.getResources().iterator().next()));
assertEquals(2, userTO.getVirAttrs().iterator().next().getValues().size(), 0);
assertTrue(userTO.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom1.org"));
assertTrue(userTO.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom2.org"));
// update user
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
// modify virtual attribute
userPatch.getVirAttrs().add(
new AttrTO.Builder().schema(virSchema.getKey()).
value("test@issue691.dom3.org").
value("test@issue691.dom4.org").
build());
UserTO updated = updateUser(userPatch).getAny();
assertNotNull(updated);
assertEquals(2, updated.getVirAttrs().iterator().next().getValues().size(), 0);
assertTrue(updated.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom3.org"));
assertTrue(updated.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom4.org"));
} finally {
try {
resourceService.delete(ldap.getKey());
} catch (Exception ignore) {
// ignore
}
}
}
}
|
fit/core-reference/src/test/java/org/apache/syncope/fit/core/VirAttrITCase.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.fit.core;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.Locale;
import java.util.Map;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.Predicate;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.syncope.common.lib.SyncopeClientException;
import org.apache.syncope.common.lib.patch.PasswordPatch;
import org.apache.syncope.common.lib.patch.StatusPatch;
import org.apache.syncope.common.lib.patch.StringPatchItem;
import org.apache.syncope.common.lib.patch.UserPatch;
import org.apache.syncope.common.lib.to.AnyTypeClassTO;
import org.apache.syncope.common.lib.to.AttrTO;
import org.apache.syncope.common.lib.to.ConnInstanceTO;
import org.apache.syncope.common.lib.to.ConnObjectTO;
import org.apache.syncope.common.lib.to.MappingItemTO;
import org.apache.syncope.common.lib.to.MappingTO;
import org.apache.syncope.common.lib.to.MembershipTO;
import org.apache.syncope.common.lib.to.ResourceTO;
import org.apache.syncope.common.lib.to.GroupTO;
import org.apache.syncope.common.lib.to.ProvisionTO;
import org.apache.syncope.common.lib.to.ProvisioningResult;
import org.apache.syncope.common.lib.to.UserTO;
import org.apache.syncope.common.lib.to.VirSchemaTO;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.ConnConfProperty;
import org.apache.syncope.common.lib.types.MappingPurpose;
import org.apache.syncope.common.lib.types.PatchOperation;
import org.apache.syncope.common.lib.types.PropagationTaskExecStatus;
import org.apache.syncope.common.lib.types.SchemaType;
import org.apache.syncope.common.lib.types.StatusPatchType;
import org.apache.syncope.common.rest.api.service.AnyTypeClassService;
import org.apache.syncope.common.rest.api.service.ResourceService;
import org.apache.syncope.fit.AbstractITCase;
import org.identityconnectors.framework.common.objects.ObjectClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.jdbc.core.JdbcTemplate;
@FixMethodOrder(MethodSorters.JVM)
public class VirAttrITCase extends AbstractITCase {
@Test
public void issueSYNCOPE16() {
UserTO userTO = UserITCase.getUniqueSampleTO("issue16@apache.org");
userTO.getVirAttrs().add(attrTO("virtualdata", "virtualvalue"));
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
userTO.getMemberships().add(
new MembershipTO.Builder().group("f779c0d4-633b-4be5-8f57-32eb478a3ca5").build());
// 1. create user
userTO = createUser(userTO).getAny();
assertNotNull(userTO);
// 2. check for virtual attribute value
userTO = userService.read(userTO.getKey());
assertNotNull(userTO);
assertEquals("virtualvalue", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getVirAttrs().add(attrTO("virtualdata", "virtualupdated"));
// 3. update virtual attribute
userTO = updateUser(userPatch).getAny();
assertNotNull(userTO);
// 4. check for virtual attribute value
userTO = userService.read(userTO.getKey());
assertNotNull(userTO);
assertEquals("virtualupdated", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE260() {
// create new virtual schema for the resource below
ResourceTO ws2 = resourceService.read(RESOURCE_NAME_WS2);
ProvisionTO provision = ws2.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
VirSchemaTO virSchema = new VirSchemaTO();
virSchema.setKey("syncope260" + getUUIDString());
virSchema.setExtAttrName("companyName");
virSchema.setResource(RESOURCE_NAME_WS2);
virSchema.setAnyType(provision.getAnyType());
virSchema = createSchema(SchemaType.VIRTUAL, virSchema);
assertNotNull(virSchema);
AnyTypeClassTO newClass = new AnyTypeClassTO();
newClass.setKey("syncope260" + getUUIDString());
newClass.getVirSchemas().add(virSchema.getKey());
Response response = anyTypeClassService.create(newClass);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
newClass = getObject(response.getLocation(), AnyTypeClassService.class, AnyTypeClassTO.class);
// ----------------------------------
// create user and check virtual attribute value propagation
// ----------------------------------
UserTO userTO = UserITCase.getUniqueSampleTO("260@a.com");
userTO.getAuxClasses().add(newClass.getKey());
userTO.getVirAttrs().add(attrTO(virSchema.getKey(), "virtualvalue"));
userTO.getResources().add(RESOURCE_NAME_WS2);
ProvisioningResult<UserTO> result = createUser(userTO);
assertNotNull(result);
assertFalse(result.getPropagationStatuses().isEmpty());
assertEquals(RESOURCE_NAME_WS2, result.getPropagationStatuses().get(0).getResource());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
userTO = result.getAny();
ConnObjectTO connObjectTO =
resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
// ----------------------------------
// update user virtual attribute and check virtual attribute value update propagation
// ----------------------------------
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getVirAttrs().add(attrTO(virSchema.getKey(), "virtualvalue2"));
result = updateUser(userPatch);
assertNotNull(result);
assertFalse(result.getPropagationStatuses().isEmpty());
assertEquals(RESOURCE_NAME_WS2, result.getPropagationStatuses().get(0).getResource());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
userTO = result.getAny();
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
// ----------------------------------
// suspend/reactivate user and check virtual attribute value (unchanged)
// ----------------------------------
StatusPatch statusPatch = new StatusPatch();
statusPatch.setKey(userTO.getKey());
statusPatch.setType(StatusPatchType.SUSPEND);
userTO = userService.status(statusPatch).readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getAny();
assertEquals("suspended", userTO.getStatus());
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
statusPatch = new StatusPatch();
statusPatch.setKey(userTO.getKey());
statusPatch.setType(StatusPatchType.REACTIVATE);
userTO = userService.status(statusPatch).readEntity(new GenericType<ProvisioningResult<UserTO>>() {
}).getAny();
assertEquals("active", userTO.getStatus());
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
// ----------------------------------
// update user attribute and check virtual attribute value (unchanged)
// ----------------------------------
userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getPlainAttrs().add(attrAddReplacePatch("surname", "Surname2"));
result = updateUser(userPatch);
assertNotNull(result);
assertFalse(result.getPropagationStatuses().isEmpty());
assertEquals(RESOURCE_NAME_WS2, result.getPropagationStatuses().get(0).getResource());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
userTO = result.getAny();
connObjectTO = resourceService.readConnObject(RESOURCE_NAME_WS2, AnyTypeKind.USER.name(), userTO.getKey());
assertEquals("Surname2", connObjectTO.getPlainAttrMap().get("SURNAME").getValues().get(0));
// virtual attribute value did not change
assertFalse(connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().isEmpty());
assertEquals("virtualvalue2", connObjectTO.getPlainAttrMap().get("COMPANYNAME").getValues().get(0));
// ----------------------------------
}
@Test
public void virAttrCache() {
UserTO userTO = UserITCase.getUniqueSampleTO("virattrcache@apache.org");
userTO.getVirAttrs().clear();
AttrTO virAttrTO = new AttrTO();
virAttrTO.setSchema("virtualdata");
virAttrTO.getValues().add("virattrcache");
userTO.getVirAttrs().add(virAttrTO);
userTO.getMemberships().clear();
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
// 1. create user
UserTO actual = createUser(userTO).getAny();
assertNotNull(actual);
// 2. check for virtual attribute value
actual = userService.read(actual.getKey());
assertEquals("virattrcache", actual.getVirAttrMap().get("virtualdata").getValues().get(0));
// 3. update virtual attribute directly
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
String value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, actual.getKey());
assertEquals("virattrcache", value);
jdbcTemplate.update("UPDATE testpull set USERNAME='virattrcache2' WHERE ID=?", actual.getKey());
value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, actual.getKey());
assertEquals("virattrcache2", value);
// 4. check for cached attribute value
actual = userService.read(actual.getKey());
assertEquals("virattrcache", actual.getVirAttrMap().get("virtualdata").getValues().get(0));
UserPatch userPatch = new UserPatch();
userPatch.setKey(actual.getKey());
userPatch.getVirAttrs().add(attrTO("virtualdata", "virtualupdated"));
// 5. update virtual attribute
actual = updateUser(userPatch).getAny();
assertNotNull(actual);
// 6. check for virtual attribute value
actual = userService.read(actual.getKey());
assertNotNull(actual);
assertEquals("virtualupdated", actual.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE397() {
ResourceTO csv = resourceService.read(RESOURCE_NAME_CSV);
// change mapping of resource-csv
MappingTO origMapping = SerializationUtils.clone(csv.getProvisions().get(0).getMapping());
try {
// remove this mapping
CollectionUtils.filterInverse(csv.getProvisions().get(0).getMapping().getItems(),
new Predicate<MappingItemTO>() {
@Override
public boolean evaluate(final MappingItemTO item) {
return "email".equals(item.getIntAttrName());
}
});
resourceService.update(csv);
csv = resourceService.read(RESOURCE_NAME_CSV);
assertNotNull(csv.getProvisions().get(0).getMapping());
// create new virtual schema for the resource below
ProvisionTO provision = csv.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
VirSchemaTO virSchema = new VirSchemaTO();
virSchema.setKey("syncope397" + getUUIDString());
virSchema.setExtAttrName("email");
virSchema.setResource(RESOURCE_NAME_CSV);
virSchema.setAnyType(provision.getAnyType());
virSchema = createSchema(SchemaType.VIRTUAL, virSchema);
assertNotNull(virSchema);
AnyTypeClassTO newClass = new AnyTypeClassTO();
newClass.setKey("syncope397" + getUUIDString());
newClass.getVirSchemas().add(virSchema.getKey());
Response response = anyTypeClassService.create(newClass);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
newClass = getObject(response.getLocation(), AnyTypeClassService.class, AnyTypeClassTO.class);
// create a new user
UserTO userTO = UserITCase.getUniqueSampleTO("397@syncope.apache.org");
userTO.getAuxClasses().add("csv");
userTO.getAuxClasses().add(newClass.getKey());
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getDerAttrs().clear();
userTO.getVirAttrs().clear();
userTO.getDerAttrs().add(attrTO("csvuserid", null));
userTO.getDerAttrs().add(attrTO("cn", null));
userTO.getVirAttrs().add(attrTO(virSchema.getKey(), "test@testone.org"));
// assign resource-csv to user
userTO.getResources().add(RESOURCE_NAME_CSV);
// save user
userTO = createUser(userTO).getAny();
// make std controls about user
assertNotNull(userTO);
assertTrue(RESOURCE_NAME_CSV.equals(userTO.getResources().iterator().next()));
assertEquals("test@testone.org", userTO.getVirAttrs().iterator().next().getValues().get(0));
// update user
UserTO toBeUpdated = userService.read(userTO.getKey());
UserPatch userPatch = new UserPatch();
userPatch.setKey(toBeUpdated.getKey());
userPatch.setPassword(new PasswordPatch.Builder().value("password234").build());
// assign new resource to user
userPatch.getResources().add(new StringPatchItem.Builder().
operation(PatchOperation.ADD_REPLACE).value(RESOURCE_NAME_WS2).build());
// modify virtual attribute
userPatch.getVirAttrs().add(attrTO(virSchema.getKey(), "test@testoneone.com"));
// check Syncope change password
userPatch.setPassword(new PasswordPatch.Builder().
value("password234").
onSyncope(true).
resource(RESOURCE_NAME_WS2).
build());
ProvisioningResult<UserTO> result = updateUser(userPatch);
assertNotNull(result);
toBeUpdated = result.getAny();
assertTrue(toBeUpdated.getVirAttrs().iterator().next().getValues().contains("test@testoneone.com"));
// check if propagates correctly with assertEquals on size of tasks list
assertEquals(2, result.getPropagationStatuses().size());
} finally {
// restore mapping of resource-csv
csv.getProvisions().get(0).setMapping(origMapping);
resourceService.update(csv);
}
}
@Test
public void issueSYNCOPE442() {
UserTO userTO = UserITCase.getUniqueSampleTO("syncope442@apache.org");
userTO.getVirAttrs().clear();
AttrTO virAttrTO = new AttrTO();
virAttrTO.setSchema("virtualdata");
virAttrTO.getValues().add("virattrcache");
userTO.getVirAttrs().add(virAttrTO);
userTO.getMemberships().clear();
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
// 1. create user
userTO = createUser(userTO).getAny();
assertNotNull(userTO);
// 2. check for virtual attribute value
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// ----------------------------------------
// 3. change connector URL so that we are sure that any provided value will come from virtual cache
// ----------------------------------------
String jdbcURL = null;
ConnInstanceTO connInstanceTO = connectorService.readByResource(
RESOURCE_NAME_DBVIRATTR, Locale.ENGLISH.getLanguage());
for (ConnConfProperty prop : connInstanceTO.getConf()) {
if ("jdbcUrlTemplate".equals(prop.getSchema().getName())) {
jdbcURL = prop.getValues().iterator().next().toString();
prop.getValues().clear();
prop.getValues().add("jdbc:h2:tcp://localhost:9092/xxx");
}
}
connectorService.update(connInstanceTO);
// ----------------------------------------
// ----------------------------------------
// 4. update value on external resource
// ----------------------------------------
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
String value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, userTO.getKey());
assertEquals("virattrcache", value);
jdbcTemplate.update("UPDATE testpull set USERNAME='virattrcache2' WHERE ID=?", userTO.getKey());
value = jdbcTemplate.queryForObject(
"SELECT USERNAME FROM testpull WHERE ID=?", String.class, userTO.getKey());
assertEquals("virattrcache2", value);
// ----------------------------------------
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// ----------------------------------------
// 5. restore connector URL, values can be read again from external resource
// ----------------------------------------
for (ConnConfProperty prop : connInstanceTO.getConf()) {
if ("jdbcUrlTemplate".equals(prop.getSchema().getName())) {
prop.getValues().clear();
prop.getValues().add(jdbcURL);
}
}
connectorService.update(connInstanceTO);
// ----------------------------------------
// cached value still in place...
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// force cache update by adding a resource which has virtualdata mapped for propagation
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
userPatch.getResources().add(new StringPatchItem.Builder().
operation(PatchOperation.ADD_REPLACE).value(RESOURCE_NAME_WS2).build());
userTO = updateUser(userPatch).getAny();
assertNotNull(userTO);
userTO = userService.read(userTO.getKey());
assertEquals("virattrcache2", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE436() {
UserTO userTO = UserITCase.getUniqueSampleTO("syncope436@syncope.apache.org");
userTO.getMemberships().clear();
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_LDAP);
userTO.getVirAttrs().add(attrTO("virtualReadOnly", "readOnly"));
userTO = createUser(userTO).getAny();
// finding no values because the virtual attribute is readonly
assertTrue(userTO.getVirAttrMap().get("virtualReadOnly").getValues().isEmpty());
}
@Test
public void issueSYNCOPE453() {
String resourceName = "issueSYNCOPE453-Res-" + getUUIDString();
String groupKey = null;
String groupName = "issueSYNCOPE453-Group-" + getUUIDString();
try {
// -------------------------------------------
// Create a VirAttrITCase ad-hoc
// -------------------------------------------
VirSchemaTO rvirtualdata;
try {
rvirtualdata = schemaService.read(SchemaType.VIRTUAL, "rvirtualdata");
} catch (SyncopeClientException e) {
LOG.warn("rvirtualdata not found, re-creating", e);
rvirtualdata = new VirSchemaTO();
rvirtualdata.setKey("rvirtualdata");
rvirtualdata.setExtAttrName("businessCategory");
rvirtualdata.setResource(RESOURCE_NAME_LDAP);
rvirtualdata.setAnyType(AnyTypeKind.GROUP.name());
rvirtualdata = createSchema(SchemaType.VIRTUAL, rvirtualdata);
}
assertNotNull(rvirtualdata);
if (!"minimal group".equals(rvirtualdata.getAnyTypeClass())) {
LOG.warn("rvirtualdata not in minimal group, restoring");
AnyTypeClassTO minimalGroup = anyTypeClassService.read("minimal group");
minimalGroup.getVirSchemas().add(rvirtualdata.getKey());
anyTypeClassService.update(minimalGroup);
rvirtualdata = schemaService.read(SchemaType.VIRTUAL, rvirtualdata.getKey());
assertEquals("minimal group", rvirtualdata.getAnyTypeClass());
}
// -------------------------------------------
// Create a resource ad-hoc
// -------------------------------------------
ResourceTO resourceTO = new ResourceTO();
resourceTO.setKey(resourceName);
resourceTO.setConnector("be24b061-019d-4e3e-baf0-0a6d0a45cb9c");
ProvisionTO provisionTO = new ProvisionTO();
provisionTO.setAnyType(AnyTypeKind.USER.name());
provisionTO.setObjectClass(ObjectClass.ACCOUNT_NAME);
provisionTO.getAuxClasses().add("minimal group");
resourceTO.getProvisions().add(provisionTO);
MappingTO mapping = new MappingTO();
provisionTO.setMapping(mapping);
MappingItemTO item = new MappingItemTO();
item.setIntAttrName("fullname");
item.setExtAttrName("ID");
item.setPurpose(MappingPurpose.PROPAGATION);
item.setConnObjectKey(true);
mapping.setConnObjectKeyItem(item);
item = new MappingItemTO();
item.setExtAttrName("USERNAME");
item.setIntAttrName("username");
item.setPurpose(MappingPurpose.PROPAGATION);
mapping.getItems().add(item);
item = new MappingItemTO();
item.setExtAttrName("EMAIL");
item.setIntAttrName("rvirtualdata");
item.setPurpose(MappingPurpose.PROPAGATION);
mapping.getItems().add(item);
assertNotNull(getObject(
resourceService.create(resourceTO).getLocation(), ResourceService.class, ResourceTO.class));
// -------------------------------------------
GroupTO groupTO = new GroupTO();
groupTO.setName(groupName);
groupTO.setRealm("/");
groupTO.getVirAttrs().add(attrTO(rvirtualdata.getKey(), "ml@group.it"));
groupTO.getResources().add(RESOURCE_NAME_LDAP);
groupTO = createGroup(groupTO).getAny();
groupKey = groupTO.getKey();
assertEquals(1, groupTO.getVirAttrs().size());
assertEquals("ml@group.it", groupTO.getVirAttrs().iterator().next().getValues().get(0));
// -------------------------------------------
// -------------------------------------------
// Create new user
// -------------------------------------------
UserTO userTO = UserITCase.getUniqueSampleTO("syncope453@syncope.apache.org");
userTO.getPlainAttrs().add(attrTO("fullname", "123"));
userTO.getResources().clear();
userTO.getResources().add(resourceName);
userTO.getVirAttrs().clear();
userTO.getDerAttrs().clear();
userTO.getMemberships().clear();
userTO.getMemberships().add(new MembershipTO.Builder().group(groupTO.getKey()).build());
ProvisioningResult<UserTO> result = createUser(userTO);
assertEquals(2, result.getPropagationStatuses().size());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(0).getStatus());
assertEquals(PropagationTaskExecStatus.SUCCESS, result.getPropagationStatuses().get(1).getStatus());
userTO = result.getAny();
JdbcTemplate jdbcTemplate = new JdbcTemplate(testDataSource);
Map<String, Object> actuals = jdbcTemplate.queryForMap(
"SELECT id, surname, email FROM testpull WHERE id=?",
new Object[] { userTO.getPlainAttrMap().get("fullname").getValues().get(0) });
assertEquals(userTO.getPlainAttrMap().get("fullname").getValues().get(0), actuals.get("id").toString());
assertEquals("ml@group.it", actuals.get("email"));
// -------------------------------------------
} finally {
// -------------------------------------------
// Delete resource and group ad-hoc
// -------------------------------------------
resourceService.delete(resourceName);
if (groupKey != null) {
groupService.delete(groupKey);
}
// -------------------------------------------
}
}
@Test
public void issueSYNCOPE459() {
UserTO userTO = UserITCase.getUniqueSampleTO("syncope459@apache.org");
userTO.getResources().clear();
userTO.getResources().add(RESOURCE_NAME_LDAP);
userTO.getMemberships().clear();
userTO.getVirAttrs().clear();
userTO = createUser(userTO).getAny();
assertNotNull(userTO.getVirAttrMap().get("virtualReadOnly"));
}
@Test
public void issueSYNCOPE501() {
// 1. create user and propagate him on resource-db-virattr
UserTO userTO = UserITCase.getUniqueSampleTO("syncope501@apache.org");
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getVirAttrs().clear();
userTO.getResources().add(RESOURCE_NAME_DBVIRATTR);
// virtualdata is mapped with username
userTO.getVirAttrs().add(attrTO("virtualdata", "syncope501@apache.org"));
userTO = createUser(userTO).getAny();
assertNotNull(userTO.getVirAttrMap().get("virtualdata"));
assertEquals("syncope501@apache.org", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
// 2. update virtual attribute
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
// change virtual attribute value
userPatch.getVirAttrs().add(attrTO("virtualdata", "syncope501_updated@apache.org"));
userTO = updateUser(userPatch).getAny();
assertNotNull(userTO);
// 3. check that user virtual attribute has really been updated
assertFalse(userTO.getVirAttrMap().get("virtualdata").getValues().isEmpty());
assertEquals("syncope501_updated@apache.org", userTO.getVirAttrMap().get("virtualdata").getValues().get(0));
}
@Test
public void issueSYNCOPE691() {
ResourceTO ldap = resourceService.read(RESOURCE_NAME_LDAP);
try {
ProvisionTO provision = ldap.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
CollectionUtils.filterInverse(provision.getMapping().getItems(), new Predicate<MappingItemTO>() {
@Override
public boolean evaluate(final MappingItemTO item) {
return "mail".equals(item.getExtAttrName());
}
});
provision.getVirSchemas().clear();
ldap.getProvisions().clear();
ldap.getProvisions().add(provision);
ldap.setKey(RESOURCE_NAME_LDAP + "691" + getUUIDString());
resourceService.create(ldap);
ldap = resourceService.read(ldap.getKey());
provision = ldap.getProvision(AnyTypeKind.USER.name());
assertNotNull(provision);
// create new virtual schema for the resource below
VirSchemaTO virSchema = new VirSchemaTO();
virSchema.setKey("syncope691" + getUUIDString());
virSchema.setExtAttrName("mail");
virSchema.setResource(ldap.getKey());
virSchema.setAnyType(provision.getAnyType());
virSchema = createSchema(SchemaType.VIRTUAL, virSchema);
assertNotNull(virSchema);
AnyTypeClassTO newClass = new AnyTypeClassTO();
newClass.setKey("syncope691" + getUUIDString());
newClass.getVirSchemas().add(virSchema.getKey());
Response response = anyTypeClassService.create(newClass);
assertEquals(Response.Status.CREATED.getStatusCode(), response.getStatusInfo().getStatusCode());
newClass = getObject(response.getLocation(), AnyTypeClassService.class, AnyTypeClassTO.class);
// create a new user
UserTO userTO = UserITCase.getUniqueSampleTO("syncope691@syncope.apache.org");
userTO.getAuxClasses().add(newClass.getKey());
userTO.getResources().clear();
userTO.getMemberships().clear();
userTO.getDerAttrs().clear();
userTO.getVirAttrs().clear();
AttrTO emailTO = new AttrTO();
emailTO.setSchema(virSchema.getKey());
emailTO.getValues().add("test@issue691.dom1.org");
emailTO.getValues().add("test@issue691.dom2.org");
userTO.getVirAttrs().add(emailTO);
// assign resource-ldap691 to user
userTO.getResources().add(ldap.getKey());
// save user
userTO = createUser(userTO).getAny();
// make std controls about user
assertNotNull(userTO);
assertTrue(ldap.getKey().equals(userTO.getResources().iterator().next()));
assertEquals(2, userTO.getVirAttrs().iterator().next().getValues().size(), 0);
assertTrue(userTO.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom1.org"));
assertTrue(userTO.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom2.org"));
// update user
UserPatch userPatch = new UserPatch();
userPatch.setKey(userTO.getKey());
// modify virtual attribute
userPatch.getVirAttrs().add(
new AttrTO.Builder().schema(virSchema.getKey()).
value("test@issue691.dom3.org").
value("test@issue691.dom4.org").
build());
UserTO updated = updateUser(userPatch).getAny();
assertNotNull(updated);
assertEquals(2, updated.getVirAttrs().iterator().next().getValues().size(), 0);
assertTrue(updated.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom3.org"));
assertTrue(updated.getVirAttrs().iterator().next().getValues().contains("test@issue691.dom4.org"));
} finally {
try {
resourceService.delete(ldap.getKey());
} catch (Exception ignore) {
// ignore
}
}
}
}
|
[SYNCOPE-862] Adjusting one failing test
|
fit/core-reference/src/test/java/org/apache/syncope/fit/core/VirAttrITCase.java
|
[SYNCOPE-862] Adjusting one failing test
|
|
Java
|
apache-2.0
|
ecf29a95b786efb81f078bb6855ad7ae898f8053
| 0
|
OpenHFT/Chronicle-Map
|
/*
* Copyright 2012-2018 Chronicle Map Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.map;
import net.openhft.chronicle.bytes.Byteable;
import net.openhft.chronicle.core.util.SerializableFunction;
import net.openhft.chronicle.hash.ChronicleHash;
import net.openhft.chronicle.hash.serialization.SizedReader;
import net.openhft.chronicle.hash.serialization.SizedWriter;
import org.jetbrains.annotations.NotNull;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
/**
* {@code ChronicleMap} provides concurrent access to a <i>Chronicle Map key-value store</i> from a
* JVM process.
* <p>
* <p>For information on <ul> <li>how to construct a {@code ChronicleMap}</li> <li>{@code
* ChronicleMap} flavors and properties</li> <li>available configurations</li> </ul> see {@link
* ChronicleMapBuilder} documentation.
* <p>
* <p>Functionally this interface defines some methods supporting garbage-free off-heap programming:
* {@link #getUsing(Object, Object)}, {@link #acquireUsing(Object, Object)}.
* <p>
* <p>Roughly speaking, {@code ChronicleMap} compares keys and values by their binary serialized
* form, that shouldn't necessary be the same equality relation as defined by built-in {@link
* Object#equals(Object)} method, which is prescribed by general {@link Map} contract.
* <p>
* <p>Note that {@code ChronicleMap} extends {@link Closeable}, don't forget to {@linkplain #close()
* close} map when it is no longer needed.
*
* @param <K> the map key type
* @param <V> the map value type
* @see ChronicleMapBuilder#create()
* @see ChronicleMapBuilder#createPersistedTo(File)
* @see ChronicleMapBuilder#createOrRecoverPersistedTo(File, boolean)
*/
public interface ChronicleMap<K, V> extends ConcurrentMap<K, V>,
ChronicleHash<K, MapEntry<K, V>, MapSegmentContext<K, V, ?>,
ExternalMapQueryContext<K, V, ?>> {
/**
* Delegates to {@link ChronicleMapBuilder#of(Class, Class)} for convenience.
*
* @param keyClass class of the key type of the Chronicle Map to create
* @param valueClass class of the value type of the Chronicle Map to create
* @param <K> the key type of the Chronicle Map to create
* @param <V> the value type of the Chronicle Map to create
* @return a new {@code ChronicleMapBuilder} for the given key and value classes
*/
static <K, V> ChronicleMapBuilder<K, V> of(Class<K> keyClass, Class<V> valueClass) {
return ChronicleMapBuilder.of(keyClass, valueClass);
}
/**
* Returns the value to which the specified key is mapped, or {@code null} if this map contains
* no mapping for the key.
* <p>
* <p>If the value class allows reusing, consider {@link #getUsing(Object, Object)} method
* instead of this to reduce garbage creation. Read <a
* href="https://github.com/OpenHFT/Chronicle-Map#single-key-queries">the section about usage
* patterns in the Chronicle Map 3 Tutorial</a> for more.
*
* @param key the key whose associated value is to be returned
* @return the value to which the specified key is mapped after this method call, or {@code
* null} if no value is mapped
* @see #getUsing(Object, Object)
*/
@Override
V get(Object key);
/**
* Returns the value to which the specified key is mapped, read to the provided {@code value}
* object, if possible, or returns {@code null}, if this map contains no mapping for the key.
* <p>
* <p>If the specified key is present in the map, the value data is read to the provided {@code
* value} object via value reader's {@link SizedReader#read(net.openhft.chronicle.bytes.Bytes, long, Object)
* read(StreamingDataInput, size, value)} method. If the value deserializer is able to reuse the
* given {@code value} object, calling this method instead of {@link #get(Object)} could help to
* reduce garbage creation.
* <p>
* <p>The provided {@code value} object is allowed to be {@code null}, in this case {@code
* map.getUsing(key, null)} call is semantically equivalent to simple {@code map.get(key)}
* call.
*
* @param key the key whose associated value is to be returned
* @param usingValue the object to read value data in, if possible
* @return the value to which the specified key is mapped, or {@code null} if this map contains
* no mapping for the key
* @see #get(Object)
* @see #acquireUsing(Object, Object)
* @see ChronicleMapBuilder#valueMarshallers(SizedReader, SizedWriter)
*/
V getUsing(K key, V usingValue);
/**
* Acquire a value for a key, creating if absent.
* <p>
* <p>If the specified key is absent in the map, {@linkplain
* ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider) default value provider} is
* called. Then this object is put to this map for the specified key.
* <p>
* <p>Then, either if the key was initially absent in the map or already present, the value is
* deserialized just as during {@link #getUsing(Object, Object) getUsing(key, usingValue)} call,
* passed the same {@code key} and {@code usingValue} as into this method call. This means, as
* in {@link #getUsing}, {@code usingValue} could safely be {@code null}, in this case a new
* value instance is created to deserialize the data.
* <p>
* <p>In code, {@code acquireUsing} is specified as :
* <pre>{@code
* V acquireUsing(K key, V usingValue) {
* if (!containsKey(key))
* put(key, defaultValue(key));
* return getUsing(key, usingValue);
* }}</pre>
* <p>
* <p>
* <p>Where {@code defaultValue(key)} returns {@link
* ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider) defaultValueProvider}.
* <p>
* <p>If the {@code ChronicleMap} is off-heap updatable, i. e. created via {@link
* ChronicleMapBuilder} builder (values are {@link Byteable}), there is one more option of what
* to do if the key is absent in the map. By default, value bytes are just zeroed out, no
* default value, either provided for key or constant, is put for the absent key.
* <p>
* <p>Unless value type is a Byteable or a value-type (e.g. {@link net.openhft.chronicle.core.values.LongValue}),
* it's strictly advised to set {@link ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider)
* defaultValueProvider} explicitly. The value may be deserialized from a non-initialized memory region,
* potentially causing marshalling errors.
*
* @param key the key whose associated value is to be returned
* @param usingValue the object to read value data in, if present. Can be null
* @return value to which the given key is mapping after this call, either found or created
* @see #getUsing(Object, Object)
*/
V acquireUsing(@NotNull K key, V usingValue);
/**
* Acquires an update lock and a value for a key.
* <p>
* Lock is released when returned {@link net.openhft.chronicle.core.io.Closeable} object is closed.
* This method is effectively equivalent to {@link #acquireUsing(Object, Object)} except for the
* update lock management policy: {@link #acquireUsing(Object, Object)} releases the lock right away.
* <p>
* <p>If the specified key is absent in the map, {@linkplain
* ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider) default value provider} is
* called. Then this object is put to this map for the specified key.
* <p>
* <p>Unless value is a Byteable or a value-type (e.g. {@link net.openhft.chronicle.core.values.LongValue}),
* it's strictly advised to set {@link ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider)
* defaultValueProvider} explicitly. The value may be deserialized from a non-initialized memory region,
* potentially causing marshalling errors.
* <p>
* <p>Also, if value is not a Byteable or a value-type, changes on {@code usingValue} are
* not propagated to the map memory right away. Updated {@code usingValue} is written to the map
* when the control object is closed, before releasing the update lock.
*
* @param key the key whose associated value is to be returned
* @param usingValue the object to read value data in, if present. Can be null
* @see #acquireUsing(Object, Object)
* @return Lock control object that releases the update lock on close.
*/
@NotNull
net.openhft.chronicle.core.io.Closeable acquireContext(@NotNull K key, @NotNull V usingValue);
/**
* Returns the result of application of the given function to the value to which the given key
* is mapped. If there is no mapping for the key, {@code null} is returned from {@code
* getMapped()} call without application of the given function. This method is primarily useful
* when accessing {@code ChronicleMap} implementation which delegates it's requests to some
* remote node (server) and pulls the result through serialization/deserialization path, and
* probably network. In this case, when you actually need only a part of the map value's state
* (e. g. a single field) it's cheaper to extract it on the server side and transmit lesser
* bytes.
*
* @param key the key whose associated value is to be queried
* @param function a function to transform the value to the actually needed result,
* which should be smaller than the map value
* @param <R> the result type
* @return the result of applying the function to the mapped value, or {@code null} if there
* is no mapping for the key
*/
<R> R getMapped(K key, @NotNull SerializableFunction<? super V, R> function);
/**
* Exports all the entries to a {@link File} storing them in JSON format, an attempt is
* made where possible to use standard java serialisation and keep the data human readable, data
* serialized using the custom serialises are converted to a binary format which is not human
* readable but this is only done if the Keys or Values are not {@link Serializable}.
* This method can be used in conjunction with {@link ChronicleMap#putAll(File)} and is
* especially useful if you wish to import/export entries from one chronicle map into another.
* This import and export of the entries can be performed even when the versions of ChronicleMap
* differ. This method is not performant and as such we recommend it is not used in performance
* sensitive code.
*
* @param toFile the file to store all the entries to, the entries will be stored in JSON
* format
* @throws IOException its not possible store the data to {@code toFile}
* @see ChronicleMap#putAll(File)
*/
void getAll(File toFile) throws IOException;
/**
* Imports all the entries from a {@link File}, the {@code fromFile} must be created
* using or the same format as {@link ChronicleMap#get(Object)}, this method behaves
* similar to {@link Map#put(Object, Object)} where existing
* entries are overwritten. A write lock is only held while each individual entry is inserted
* into the map, not over all the entries in the {@link File}
*
* @param fromFile the file containing entries ( in JSON format ) which will be deserialized and
* {@link Map#put(Object, Object)} into the map
* @throws IOException its not possible read the {@code fromFile}
* @see ChronicleMap#getAll(File)
*/
void putAll(File fromFile) throws IOException;
/**
* @return the class of {@code <V>}
*/
Class<V> valueClass();
/**
* @return the value Class or UnresolvedType if unknown.
*/
Type valueType();
/**
* @return the amount of free space in the map as a percentage. When the free space gets low ( around 5% ) the map will automatically expand. The
* number of times it can automatically expand is based on the {@code net.openhft.chronicle.map.ChronicleMapBuilder#maxBloatFactor}. If the map
* expands you will see an increase in the available free space. NOTE: It is not possible to expand the chronicle map manually.
* <p>
* see also {@see net.openhft.chronicle.map.ChronicleMap#remainingAutoResizes} as these operations are related.
*/
default short percentageFreeSpace() {
throw new UnsupportedOperationException("todo");
}
/**
* @return the number of times in the future the map can expand its capacity of each segment ( by expending its capacity we mean expending the maximum number of possible entries that
* can be stored into the map), the map will expand automatically. However, there is an upper limit to the number of times the map can expand.
* This limit is set via the {@code net.openhft.chronicle.map.ChronicleMapBuilder#maxBloatFactor} if the {@code remainingAutoResizes} drops to zero,
* then the map is no longer able to expand, if subsequently, the free space ( see {@link net.openhft.chronicle.map.ChronicleMap#percentageFreeSpace})
* in the map becomes low ( around 5% ), the map will not be able to take more entries and will fail with an {@code
* java.lang.IllegalStateException} for production systems it is recommended you periodically monitor the remainingAutoResizes and
* {@link net.openhft.chronicle.map.ChronicleMap#percentageFreeSpace}.
*/
default int remainingAutoResizes() {
throw new UnsupportedOperationException("todo");
}
/**
* The maximum number of times, the chronicle map is allowed to grow in size beyond
* the configured number of entries.
* <p>
* <p>The default maximum bloat factor factor is {@code 1.0} - i. e. "no bloat is expected".
* <p>
* <p>It is strongly advised not to configure {@code maxBloatFactor} to more than {@code 10.0},
* almost certainly, you either should configure {@code ChronicleHash}es completely differently,
* or this data store doesn't fit to your case.
*
* @return maxBloatFactor the maximum number ot times, the chronicle map is alowed to be resized
*/
default double maxBloatFactor() {
throw new UnsupportedOperationException("todo");
}
}
|
src/main/java/net/openhft/chronicle/map/ChronicleMap.java
|
/*
* Copyright 2012-2018 Chronicle Map Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.map;
import net.openhft.chronicle.bytes.Byteable;
import net.openhft.chronicle.core.util.SerializableFunction;
import net.openhft.chronicle.hash.ChronicleHash;
import net.openhft.chronicle.hash.serialization.SizedReader;
import net.openhft.chronicle.hash.serialization.SizedWriter;
import org.jetbrains.annotations.NotNull;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
/**
* {@code ChronicleMap} provides concurrent access to a <i>Chronicle Map key-value store</i> from a
* JVM process.
* <p>
* <p>For information on <ul> <li>how to construct a {@code ChronicleMap}</li> <li>{@code
* ChronicleMap} flavors and properties</li> <li>available configurations</li> </ul> see {@link
* ChronicleMapBuilder} documentation.
* <p>
* <p>Functionally this interface defines some methods supporting garbage-free off-heap programming:
* {@link #getUsing(Object, Object)}, {@link #acquireUsing(Object, Object)}.
* <p>
* <p>Roughly speaking, {@code ChronicleMap} compares keys and values by their binary serialized
* form, that shouldn't necessary be the same equality relation as defined by built-in {@link
* Object#equals(Object)} method, which is prescribed by general {@link Map} contract.
* <p>
* <p>Note that {@code ChronicleMap} extends {@link Closeable}, don't forget to {@linkplain #close()
* close} map when it is no longer needed.
*
* @param <K> the map key type
* @param <V> the map value type
* @see ChronicleMapBuilder#create()
* @see ChronicleMapBuilder#createPersistedTo(File)
* @see ChronicleMapBuilder#createOrRecoverPersistedTo(File, boolean)
*/
public interface ChronicleMap<K, V> extends ConcurrentMap<K, V>,
ChronicleHash<K, MapEntry<K, V>, MapSegmentContext<K, V, ?>,
ExternalMapQueryContext<K, V, ?>> {
/**
* Delegates to {@link ChronicleMapBuilder#of(Class, Class)} for convenience.
*
* @param keyClass class of the key type of the Chronicle Map to create
* @param valueClass class of the value type of the Chronicle Map to create
* @param <K> the key type of the Chronicle Map to create
* @param <V> the value type of the Chronicle Map to create
* @return a new {@code ChronicleMapBuilder} for the given key and value classes
*/
static <K, V> ChronicleMapBuilder<K, V> of(Class<K> keyClass, Class<V> valueClass) {
return ChronicleMapBuilder.of(keyClass, valueClass);
}
/**
* Returns the value to which the specified key is mapped, or {@code null} if this map contains
* no mapping for the key.
* <p>
* <p>If the value class allows reusing, consider {@link #getUsing(Object, Object)} method
* instead of this to reduce garbage creation. Read <a
* href="https://github.com/OpenHFT/Chronicle-Map#single-key-queries">the section about usage
* patterns in the Chronicle Map 3 Tutorial</a> for more.
*
* @param key the key whose associated value is to be returned
* @return the value to which the specified key is mapped after this method call, or {@code
* null} if no value is mapped
* @see #getUsing(Object, Object)
*/
@Override
V get(Object key);
/**
* Returns the value to which the specified key is mapped, read to the provided {@code value}
* object, if possible, or returns {@code null}, if this map contains no mapping for the key.
* <p>
* <p>If the specified key is present in the map, the value data is read to the provided {@code
* value} object via value reader's {@link SizedReader#read(net.openhft.chronicle.bytes.Bytes, long, Object)
* read(StreamingDataInput, size, value)} method. If the value deserializer is able to reuse the
* given {@code value} object, calling this method instead of {@link #get(Object)} could help to
* reduce garbage creation.
* <p>
* <p>The provided {@code value} object is allowed to be {@code null}, in this case {@code
* map.getUsing(key, null)} call is semantically equivalent to simple {@code map.get(key)}
* call.
*
* @param key the key whose associated value is to be returned
* @param usingValue the object to read value data in, if possible
* @return the value to which the specified key is mapped, or {@code null} if this map contains
* no mapping for the key
* @see #get(Object)
* @see #acquireUsing(Object, Object)
* @see ChronicleMapBuilder#valueMarshallers(SizedReader, SizedWriter)
*/
V getUsing(K key, V usingValue);
/**
* Acquire a value for a key, creating if absent.
* <p>
* <p>If the specified key is absent in the map, {@linkplain
* ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider) default value provider} is
* called. Then this object is put to this map for the specified key.
* <p>
* <p>Then, either if the key was initially absent in the map or already present, the value is
* deserialized just as during {@link #getUsing(Object, Object) getUsing(key, usingValue)} call,
* passed the same {@code key} and {@code usingValue} as into this method call. This means, as
* in {@link #getUsing}, {@code usingValue} could safely be {@code null}, in this case a new
* value instance is created to deserialize the data.
* <p>
* <p>In code, {@code acquireUsing} is specified as :
* <pre>{@code
* V acquireUsing(K key, V usingValue) {
* if (!containsKey(key))
* put(key, defaultValue(key));
* return getUsing(key, usingValue);
* }}</pre>
* <p>
* <p>
* <p>Where {@code defaultValue(key)} returns {@link
* ChronicleMapBuilder#defaultValueProvider(DefaultValueProvider) defaultValueProvider}.
* <p>
* <p>If the {@code ChronicleMap} is off-heap updatable, i. e. created via {@link
* ChronicleMapBuilder} builder (values are {@link Byteable}), there is one more option of what
* to do if the key is absent in the map. By default, value bytes are just zeroed out, no
* default value, either provided for key or constant, is put for the absent key.
*
* @param key the key whose associated value is to be returned
* @param usingValue the object to read value data in, if present. Can not be null
* @return value to which the given key is mapping after this call, either found or created
* @see #getUsing(Object, Object)
*/
V acquireUsing(@NotNull K key, V usingValue);
@NotNull
net.openhft.chronicle.core.io.Closeable acquireContext(@NotNull K key, @NotNull V usingValue);
/**
* Returns the result of application of the given function to the value to which the given key
* is mapped. If there is no mapping for the key, {@code null} is returned from {@code
* getMapped()} call without application of the given function. This method is primarily useful
* when accessing {@code ChronicleMap} implementation which delegates it's requests to some
* remote node (server) and pulls the result through serialization/deserialization path, and
* probably network. In this case, when you actually need only a part of the map value's state
* (e. g. a single field) it's cheaper to extract it on the server side and transmit lesser
* bytes.
*
* @param key the key whose associated value is to be queried
* @param function a function to transform the value to the actually needed result,
* which should be smaller than the map value
* @param <R> the result type
* @return the result of applying the function to the mapped value, or {@code null} if there
* is no mapping for the key
*/
<R> R getMapped(K key, @NotNull SerializableFunction<? super V, R> function);
/**
* Exports all the entries to a {@link File} storing them in JSON format, an attempt is
* made where possible to use standard java serialisation and keep the data human readable, data
* serialized using the custom serialises are converted to a binary format which is not human
* readable but this is only done if the Keys or Values are not {@link Serializable}.
* This method can be used in conjunction with {@link ChronicleMap#putAll(File)} and is
* especially useful if you wish to import/export entries from one chronicle map into another.
* This import and export of the entries can be performed even when the versions of ChronicleMap
* differ. This method is not performant and as such we recommend it is not used in performance
* sensitive code.
*
* @param toFile the file to store all the entries to, the entries will be stored in JSON
* format
* @throws IOException its not possible store the data to {@code toFile}
* @see ChronicleMap#putAll(File)
*/
void getAll(File toFile) throws IOException;
/**
* Imports all the entries from a {@link File}, the {@code fromFile} must be created
* using or the same format as {@link ChronicleMap#get(Object)}, this method behaves
* similar to {@link Map#put(Object, Object)} where existing
* entries are overwritten. A write lock is only held while each individual entry is inserted
* into the map, not over all the entries in the {@link File}
*
* @param fromFile the file containing entries ( in JSON format ) which will be deserialized and
* {@link Map#put(Object, Object)} into the map
* @throws IOException its not possible read the {@code fromFile}
* @see ChronicleMap#getAll(File)
*/
void putAll(File fromFile) throws IOException;
/**
* @return the class of {@code <V>}
*/
Class<V> valueClass();
/**
* @return the value Class or UnresolvedType if unknown.
*/
Type valueType();
/**
* @return the amount of free space in the map as a percentage. When the free space gets low ( around 5% ) the map will automatically expand. The
* number of times it can automatically expand is based on the {@code net.openhft.chronicle.map.ChronicleMapBuilder#maxBloatFactor}. If the map
* expands you will see an increase in the available free space. NOTE: It is not possible to expand the chronicle map manually.
* <p>
* see also {@see net.openhft.chronicle.map.ChronicleMap#remainingAutoResizes} as these operations are related.
*/
default short percentageFreeSpace() {
throw new UnsupportedOperationException("todo");
}
/**
* @return the number of times in the future the map can expand its capacity of each segment ( by expending its capacity we mean expending the maximum number of possible entries that
* can be stored into the map), the map will expand automatically. However, there is an upper limit to the number of times the map can expand.
* This limit is set via the {@code net.openhft.chronicle.map.ChronicleMapBuilder#maxBloatFactor} if the {@code remainingAutoResizes} drops to zero,
* then the map is no longer able to expand, if subsequently, the free space ( see {@link net.openhft.chronicle.map.ChronicleMap#percentageFreeSpace})
* in the map becomes low ( around 5% ), the map will not be able to take more entries and will fail with an {@code
* java.lang.IllegalStateException} for production systems it is recommended you periodically monitor the remainingAutoResizes and
* {@link net.openhft.chronicle.map.ChronicleMap#percentageFreeSpace}.
*/
default int remainingAutoResizes() {
throw new UnsupportedOperationException("todo");
}
/**
* The maximum number of times, the chronicle map is allowed to grow in size beyond
* the configured number of entries.
* <p>
* <p>The default maximum bloat factor factor is {@code 1.0} - i. e. "no bloat is expected".
* <p>
* <p>It is strongly advised not to configure {@code maxBloatFactor} to more than {@code 10.0},
* almost certainly, you either should configure {@code ChronicleHash}es completely differently,
* or this data store doesn't fit to your case.
*
* @return maxBloatFactor the maximum number ot times, the chronicle map is alowed to be resized
*/
default double maxBloatFactor() {
throw new UnsupportedOperationException("todo");
}
}
|
Document ChronicleMap#acquireContext
|
src/main/java/net/openhft/chronicle/map/ChronicleMap.java
|
Document ChronicleMap#acquireContext
|
|
Java
|
apache-2.0
|
df767369152a4ffe28fe96ab59c8a5cc10978fc7
| 0
|
NitorCreations/willow,NitorCreations/willow,NitorCreations/willow,NitorCreations/willow,NitorCreations/willow
|
package com.btr.proxy.selector.pac;
import static com.btr.proxy.selector.pac.TestUtil.toUrl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.Proxy;
import java.net.Proxy.Type;
import java.net.ProxySelector;
import java.net.SocketAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.List;
import org.junit.Test;
/*****************************************************************************
* Tests for the Pac script parser and proxy selector.
*
* @author Bernd Rosstauscher (proxyvole@rosstauscher.de) Copyright 2009
****************************************************************************/
public class PacProxySelectorTest {
/*************************************************************************
* Test method
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testScriptExecution() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("test1.pac"));
List<Proxy> result = test.select(TestUtil.HTTP_TEST_URI);
assertTrue(PacProxySelector.isEnabled());
assertEquals(TestUtil.HTTP_TEST_PROXY, result.get(0));
}
@Test
public void testNullUrl() throws Exception {
try {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("test1.pac"));
test.select(null);
assertTrue("Null url should throw exception", false);
} catch (IllegalArgumentException e) {
}
}
@Test
public void testInvalid() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("testInvalid.pac"));
assertEquals("Invalid return value should return no_proxy", Proxy.NO_PROXY,
test.select(new URI("http://test1")).get(0));
}
@Test
public void testInvalid2() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("testInvalid.pac"));
assertEquals("Invalid return value should return no_proxy", Proxy.NO_PROXY,
test.select(new URI("http://test2")).get(0));
}
@Test
public void testInvalid3() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("testInvalid.pac"));
assertTrue("Invalid return value should return no_proxy",
test.select(new URI("http://test3")).isEmpty());
}
/*************************************************************************
* Test method
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testScriptExecution2() throws Exception {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("test2.pac")));
List<Proxy> result = pacProxySelector.select(TestUtil.HTTP_TEST_URI);
assertEquals(Proxy.NO_PROXY, result.get(0));
result = pacProxySelector.select(TestUtil.HTTPS_TEST_URI);
assertEquals(Proxy.NO_PROXY, result.get(0));
}
public class UsedProxyException extends RuntimeException{
private static final long serialVersionUID = 1L;
public UsedProxyException(String msg) {
super(msg);
}
}
/*************************************************************************
* Test download fix to prevent infinite loop.
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void pacDownloadFromURLShouldNotUseProxy() throws Exception {
ProxySelector oldOne = ProxySelector.getDefault();
try {
ProxySelector.setDefault(new ProxySelector() {
@Override
public List<Proxy> select(URI uri) {
throw new UsedProxyException("Should not download via proxy");
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
// Not used
}
});
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource("http://www.test.invalid/wpad.pac"));
List<Proxy> ret = pacProxySelector.select(TestUtil.HTTPS_TEST_URI);
assertEquals("Should return no proxy for invalid pac url", Proxy.NO_PROXY, ret.get(0));
} finally {
ProxySelector.setDefault(oldOne);
}
}
/*************************************************************************
* Test method
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testScriptMuliProxy() throws Exception {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("testMultiProxy.pac")));
List<Proxy> result = pacProxySelector.select(TestUtil.HTTP_TEST_URI);
assertEquals(2, result.size());
assertEquals(
new Proxy(Type.HTTP, InetSocketAddress.createUnresolved("my-proxy.com",
80)), result.get(0));
assertEquals(
new Proxy(Type.HTTP, InetSocketAddress.createUnresolved(
"my-proxy2.com", 8080)), result.get(1));
}
/*************************************************************************
* Test method for the override local IP feature.
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testLocalIPOverride() throws Exception {
System.setProperty(PacScriptMethods.OVERRIDE_LOCAL_IP, "123.123.123.123");
try {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("testLocalIP.pac")));
List<Proxy> result = pacProxySelector.select(TestUtil.HTTP_TEST_URI);
assertEquals(
result.get(0),
new Proxy(Type.HTTP, InetSocketAddress.createUnresolved(
"123.123.123.123", 8080)));
} finally {
System.setProperty(PacScriptMethods.OVERRIDE_LOCAL_IP, "");
}
}
@Test
public void testSocksType() throws Exception {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("testSocks.pac")));
Proxy ret = pacProxySelector.select(TestUtil.SOCKS_TEST_URI).get(0);
assertEquals("socks url should return socks type", TestUtil.SOCKS_TEST_PROXY.type(),
ret.type());
assertEquals("socks url should return socks type", TestUtil.SOCKS_TEST_PROXY.address(),
ret.address());
}
}
|
willow-utils/src/test/java/com/btr/proxy/selector/pac/PacProxySelectorTest.java
|
package com.btr.proxy.selector.pac;
import static com.btr.proxy.selector.pac.TestUtil.toUrl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.net.Proxy;
import java.net.Proxy.Type;
import java.net.ProxySelector;
import java.net.SocketAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.List;
import org.junit.Test;
/*****************************************************************************
* Tests for the Pac script parser and proxy selector.
*
* @author Bernd Rosstauscher (proxyvole@rosstauscher.de) Copyright 2009
****************************************************************************/
public class PacProxySelectorTest {
/*************************************************************************
* Test method
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testScriptExecution() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("test1.pac"));
List<Proxy> result = test.select(TestUtil.HTTP_TEST_URI);
assertTrue(PacProxySelector.isEnabled());
assertEquals(TestUtil.HTTP_TEST_PROXY, result.get(0));
}
@Test
public void testNullUrl() throws Exception {
try {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("test1.pac"));
test.select(null);
assertTrue("Null url should throw exception", false);
} catch (IllegalArgumentException e) {
}
}
@Test
public void testInvalid() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("testInvalid.pac"));
assertEquals("Invalid return value should return no_proxy", Proxy.NO_PROXY,
test.select(new URI("http://test1")).get(0));
}
@Test
public void testInvalid2() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("testInvalid.pac"));
assertEquals("Invalid return value should return no_proxy", Proxy.NO_PROXY,
test.select(new URI("http://test2")).get(0));
}
@Test
public void testInvalid3() throws Exception {
PacProxySelector test = PacProxySelector
.buildPacSelectorForUrl(toUrl("testInvalid.pac"));
assertTrue("Invalid return value should return no_proxy",
test.select(new URI("http://test3")).isEmpty());
}
/*************************************************************************
* Test method
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testScriptExecution2() throws Exception {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("test2.pac")));
List<Proxy> result = pacProxySelector.select(TestUtil.HTTP_TEST_URI);
assertEquals(Proxy.NO_PROXY, result.get(0));
result = pacProxySelector.select(TestUtil.HTTPS_TEST_URI);
assertEquals(Proxy.NO_PROXY, result.get(0));
}
public class UsedProxyException extends RuntimeException{
private static final long serialVersionUID = 1L;
public UsedProxyException(String msg) {
super(msg);
}
}
/*************************************************************************
* Test download fix to prevent infinite loop.
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void pacDownloadFromURLShouldNotUseProxy() throws Exception {
ProxySelector oldOne = ProxySelector.getDefault();
try {
ProxySelector.setDefault(new ProxySelector() {
@Override
public List<Proxy> select(URI uri) {
throw new UsedProxyException("Should not download via proxy");
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
// Not used
}
});
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource("http://www.test.invalid/wpad.pac"));
List<Proxy> ret = pacProxySelector.select(TestUtil.HTTPS_TEST_URI);
assertEquals("Should return no proxy for invalid pac url", Proxy.NO_PROXY, ret.get(0));
} catch (UsedProxyException e) {
assertTrue("Tried to use a proxy", false);
} finally {
ProxySelector.setDefault(oldOne);
}
}
/*************************************************************************
* Test method
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testScriptMuliProxy() throws Exception {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("testMultiProxy.pac")));
List<Proxy> result = pacProxySelector.select(TestUtil.HTTP_TEST_URI);
assertEquals(2, result.size());
assertEquals(
new Proxy(Type.HTTP, InetSocketAddress.createUnresolved("my-proxy.com",
80)), result.get(0));
assertEquals(
new Proxy(Type.HTTP, InetSocketAddress.createUnresolved(
"my-proxy2.com", 8080)), result.get(1));
}
/*************************************************************************
* Test method for the override local IP feature.
*
* @throws ProxyException
* on proxy detection error.
* @throws MalformedURLException
* on URL erros
************************************************************************/
@Test
public void testLocalIPOverride() throws Exception {
System.setProperty(PacScriptMethods.OVERRIDE_LOCAL_IP, "123.123.123.123");
try {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("testLocalIP.pac")));
List<Proxy> result = pacProxySelector.select(TestUtil.HTTP_TEST_URI);
assertEquals(
result.get(0),
new Proxy(Type.HTTP, InetSocketAddress.createUnresolved(
"123.123.123.123", 8080)));
} finally {
System.setProperty(PacScriptMethods.OVERRIDE_LOCAL_IP, "");
}
}
@Test
public void testSocksType() throws Exception {
PacProxySelector pacProxySelector = new PacProxySelector(
new UrlPacScriptSource(toUrl("testSocks.pac")));
Proxy ret = pacProxySelector.select(TestUtil.SOCKS_TEST_URI).get(0);
assertEquals("socks url should return socks type", TestUtil.SOCKS_TEST_PROXY.type(),
ret.type());
assertEquals("socks url should return socks type", TestUtil.SOCKS_TEST_PROXY.address(),
ret.address());
}
}
|
Remove assert
|
willow-utils/src/test/java/com/btr/proxy/selector/pac/PacProxySelectorTest.java
|
Remove assert
|
|
Java
|
apache-2.0
|
83b12058d435b9fbc5e99beb7f5ab27ec6c8520d
| 0
|
weld/core,weld/core,antoinesd/weld-core,manovotn/core,antoinesd/weld-core,manovotn/core,manovotn/core,antoinesd/weld-core
|
package org.jboss.webbeans.test.unit.implementation;
import org.jboss.webbeans.tck.AbstractTest;
import org.testng.annotations.Test;
public class RemoveMethodTests extends AbstractTest
{
/**
* If the application directly calls an EJB remove method of an instance of a
* session bean that is a stateful session bean and declares any scope other
* than @Dependent, an UnsupportedOperationException is thrown.
*/
@Test(groups = { "enterprisebean", "removemethod", "lifecycle", "stub" })
public void testApplicationCalledRemoveMethodOfStatefulSessionBeanWithNonDependentScopeFails()
{
assert false;
}
/**
* If the application directly calls an EJB remove method of an instance of a
* session bean that is a stateful session bean and has scope @Dependent then
* no parameters are passed to the method by the container.
*/
@Test(groups = { "enterprisebean", "removemethod", "lifecycle", "stub" })
public void testApplicationCalledRemoveMethodOfStatefulSessionBeanWithDependentScopeHasNoInjectedParameters()
{
deployBeans(Bad.class);
BadLocal x = manager.getInstanceByType(BadLocal.class);
x.bye();
}
/**
* Furthermore, the container ignores the instance instead of destroying it
* when Bean.destroy() is called, as defined in Section 6.5, Lifecycle of
* stateful session beans.
*/
@Test(groups = { "enterprisebean", "removemethod", "lifecycle", "stub" })
public void testApplicationCalledRemoveMethodOfStatefulSessionBeanWithDependentScopeIsIgnoredWhenDestroyed()
{
assert false;
}
}
|
webbeans-ri/src/test/java/org/jboss/webbeans/test/unit/implementation/RemoveMethodTests.java
|
package org.jboss.webbeans.test.unit.implementation;
import org.jboss.webbeans.tck.tests.AbstractTest;
import org.testng.annotations.Test;
public class RemoveMethodTests extends AbstractTest
{
/**
* If the application directly calls an EJB remove method of an instance of a
* session bean that is a stateful session bean and declares any scope other
* than @Dependent, an UnsupportedOperationException is thrown.
*/
@Test(groups = { "enterprisebean", "removemethod", "lifecycle", "stub" })
public void testApplicationCalledRemoveMethodOfStatefulSessionBeanWithNonDependentScopeFails()
{
assert false;
}
/**
* If the application directly calls an EJB remove method of an instance of a
* session bean that is a stateful session bean and has scope @Dependent then
* no parameters are passed to the method by the container.
*/
@Test(groups = { "enterprisebean", "removemethod", "lifecycle", "stub" })
public void testApplicationCalledRemoveMethodOfStatefulSessionBeanWithDependentScopeHasNoInjectedParameters()
{
deployBeans(Bad.class);
BadLocal x = manager.getInstanceByType(BadLocal.class);
x.bye();
}
/**
* Furthermore, the container ignores the instance instead of destroying it
* when Bean.destroy() is called, as defined in Section 6.5, Lifecycle of
* stateful session beans.
*/
@Test(groups = { "enterprisebean", "removemethod", "lifecycle", "stub" })
public void testApplicationCalledRemoveMethodOfStatefulSessionBeanWithDependentScopeIsIgnoredWhenDestroyed()
{
assert false;
}
}
|
updated test
git-svn-id: 811cd8a17a8c3c0c263af499002feedd54a892d0@1114 1c488680-804c-0410-94cd-c6b725194a0e
|
webbeans-ri/src/test/java/org/jboss/webbeans/test/unit/implementation/RemoveMethodTests.java
|
updated test
|
|
Java
|
bsd-2-clause
|
b6c90fec102e5c8cdfd820eaf5773d189185a410
| 0
|
mohanaraosv/jodd,wjw465150/jodd,javachengwc/jodd,tempbottle/jodd,wjw465150/jodd,southwolf/jodd,southwolf/jodd,vilmospapp/jodd,oblac/jodd,tempbottle/jodd,mosoft521/jodd,javachengwc/jodd,mohanaraosv/jodd,mosoft521/jodd,southwolf/jodd,southwolf/jodd,wjw465150/jodd,mtakaki/jodd,wsldl123292/jodd,oetting/jodd,mohanaraosv/jodd,vilmospapp/jodd,mtakaki/jodd,vilmospapp/jodd,oblac/jodd,tempbottle/jodd,mohanaraosv/jodd,mtakaki/jodd,javachengwc/jodd,oblac/jodd,oblac/jodd,mosoft521/jodd,mosoft521/jodd,vilmospapp/jodd,mtakaki/jodd,vilmospapp/jodd,javachengwc/jodd,wsldl123292/jodd,oetting/jodd,oetting/jodd,wsldl123292/jodd,wjw465150/jodd,tempbottle/jodd,wsldl123292/jodd,oetting/jodd
|
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved.
package jodd.json;
import jodd.introspector.ClassDescriptor;
import jodd.introspector.ClassIntrospector;
import jodd.introspector.FieldDescriptor;
import jodd.introspector.Getter;
import jodd.introspector.PropertyDescriptor;
import jodd.json.meta.JsonAnnotationManager;
import java.lang.reflect.Modifier;
/**
* Bean visitor that serializes properties of a bean.
* It analyzes the rules for inclusion/exclusion of a property.
*/
public class BeanSerializer {
private final JsonContext jsonContext;
private final Object source;
private boolean declared;
private final String classMetadataName;
private final Class type;
private int count;
private final JsonAnnotationManager.TypeData typeData;
public BeanSerializer(JsonContext jsonContext, Object bean) {
this.jsonContext = jsonContext;
this.source = bean;
this.count = 0;
this.declared = false;
this.classMetadataName = jsonContext.jsonSerializer.classMetadataName;
type = bean.getClass();
typeData = JoddJson.annotationManager.lookupTypeData(type);
}
/**
* Serializes a bean.
*/
public void serialize() {
Class type = source.getClass();
ClassDescriptor classDescriptor = ClassIntrospector.lookup(type);
if (classMetadataName != null) {
// process first 'meta' fields 'class'
onProperty(classMetadataName, null, null, false);
}
PropertyDescriptor[] propertyDescriptors = classDescriptor.getAllPropertyDescriptors();
for (PropertyDescriptor propertyDescriptor : propertyDescriptors) {
Getter getter = propertyDescriptor.getGetter(declared);
if (getter != null) {
String propertyName = propertyDescriptor.getName();
Class propertyType = propertyDescriptor.getType();
boolean isTransient = false;
// check for transient flag
FieldDescriptor fieldDescriptor = propertyDescriptor.getFieldDescriptor();
if (fieldDescriptor != null) {
isTransient = Modifier.isTransient(fieldDescriptor.getField().getModifiers());
}
onProperty(propertyName, propertyType, propertyDescriptor, isTransient);
}
}
}
/**
* Invoked on each property. Properties are getting matched against the rules.
* If property passes all the rules, it will be processed in
* {@link #onSerializableProperty(String, Class, Object)}.
*/
protected void onProperty(String propertyName, Class propertyType, PropertyDescriptor pd, boolean isTransient) {
Path currentPath = jsonContext.path;
currentPath.push(propertyName);
// determine if name should be included/excluded
boolean include = !typeData.strict;
// + don't include transient fields
if (isTransient) {
include = false;
}
// + all collections are not serialized by default
include = jsonContext.matchIgnoredPropertyTypes(propertyType, include);
// + annotations
include = typeData.rules.apply(propertyName, true, include);
// + path queries: excludes/includes
include = jsonContext.matchPathToQueries(include);
// done
if (!include) {
currentPath.pop();
return;
}
Object value;
if (propertyType == null) {
// metadata - classname
value = source.getClass().getName();
} else {
value = readProperty(source, pd);
// change name for properties
propertyName = typeData.resolveJsonName(propertyName);
}
onSerializableProperty(propertyName, propertyType, value);
currentPath.pop();
}
/**
* Invoked on serializable properties, that have passed all the rules.
*/
protected void onSerializableProperty(String propertyName, Class propertyType, Object value) {
jsonContext.pushName(propertyName, count > 0);
jsonContext.serialize(value);
if (jsonContext.isNamePoped()) {
count++;
}
}
/**
* Reads property using property descriptor.
*/
private Object readProperty(Object source, PropertyDescriptor propertyDescriptor) {
Getter getter = propertyDescriptor.getGetter(declared);
if (getter != null) {
try {
return getter.invokeGetter(source);
}
catch (Exception ex) {
throw new JsonException(ex);
}
}
return null;
}
}
|
jodd-json/src/main/java/jodd/json/BeanSerializer.java
|
// Copyright (c) 2003-2014, Jodd Team (jodd.org). All Rights Reserved.
package jodd.json;
import jodd.introspector.ClassDescriptor;
import jodd.introspector.ClassIntrospector;
import jodd.introspector.FieldDescriptor;
import jodd.introspector.Getter;
import jodd.introspector.PropertyDescriptor;
import jodd.json.meta.JsonAnnotationManager;
import java.lang.reflect.Modifier;
/**
* Bean visitor that serializes properties of a bean.
* It analyzes the rules for inclusion/exclusion of a property.
*/
public class BeanSerializer {
private final JsonContext jsonContext;
private final Object source;
private boolean declared;
private final String classMetadataName;
private final Class type;
private int count;
private final JsonAnnotationManager.TypeData typeData;
public BeanSerializer(JsonContext jsonContext, Object bean) {
this.jsonContext = jsonContext;
this.source = bean;
this.count = 0;
this.declared = false;
this.classMetadataName = jsonContext.jsonSerializer.classMetadataName;
type = bean.getClass();
typeData = JoddJson.annotationManager.lookupTypeData(type);
}
/**
* Serializes a bean.
*/
public void serialize() {
Class type = source.getClass();
ClassDescriptor classDescriptor = ClassIntrospector.lookup(type);
if (classMetadataName != null) {
// process first 'meta' fields 'class'
onProperty(classMetadataName, null, null, false);
}
PropertyDescriptor[] propertyDescriptors = classDescriptor.getAllPropertyDescriptors();
for (PropertyDescriptor propertyDescriptor : propertyDescriptors) {
Getter getter = propertyDescriptor.getGetter(declared);
if (getter != null) {
String propertyName = propertyDescriptor.getName();
Class propertyType = propertyDescriptor.getType();
boolean isTransient = false;
// check for transient flag
FieldDescriptor fieldDescriptor = propertyDescriptor.getFieldDescriptor();
if (fieldDescriptor != null) {
isTransient = Modifier.isTransient(fieldDescriptor.getField().getModifiers());
}
onProperty(propertyName, propertyType, propertyDescriptor, isTransient);
}
}
}
/**
* Invoked on each property. Properties are getting matched against the rules.
* If property passes all the rules, it will be processed in {@link #onSerializableProperty(String, Object)}.
*/
protected void onProperty(String propertyName, Class propertyType, PropertyDescriptor pd, boolean isTransient) {
Path currentPath = jsonContext.path;
currentPath.push(propertyName);
// determine if name should be included/excluded
boolean include = !typeData.strict;
// + don't include transient fields
if (isTransient) {
include = false;
}
// + all collections are not serialized by default
include = jsonContext.matchIgnoredPropertyTypes(propertyType, include);
// + annotations
include = typeData.rules.apply(propertyName, true, include);
// + path queries: excludes/includes
include = jsonContext.matchPathToQueries(include);
// done
if (!include) {
currentPath.pop();
return;
}
Object value;
if (propertyType == null) {
// metadata - classname
value = source.getClass().getName();
} else {
value = readProperty(source, pd);
// change name for properties
propertyName = typeData.resolveJsonName(propertyName);
}
onSerializableProperty(propertyName, value);
currentPath.pop();
}
/**
* Invoked on serializable properties, that have passed all the rules.
*/
protected void onSerializableProperty(String propertyName, Object value) {
jsonContext.pushName(propertyName, count > 0);
jsonContext.serialize(value);
if (jsonContext.isNamePoped()) {
count++;
}
}
/**
* Reads property using property descriptor.
*/
private Object readProperty(Object source, PropertyDescriptor propertyDescriptor) {
Getter getter = propertyDescriptor.getGetter(declared);
if (getter != null) {
try {
return getter.invokeGetter(source);
}
catch (Exception ex) {
throw new JsonException(ex);
}
}
return null;
}
}
|
Added propertyType for onSerializableProperty()
|
jodd-json/src/main/java/jodd/json/BeanSerializer.java
|
Added propertyType for onSerializableProperty()
|
|
Java
|
bsd-2-clause
|
70f35e472a39441a29f899229b1d9769df374d87
| 0
|
biovoxxel/imagej,biovoxxel/imagej,biovoxxel/imagej,TehSAUCE/imagej,TehSAUCE/imagej,TehSAUCE/imagej
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2012 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
* #L%
*/
package imagej.updater.gui;
import imagej.event.EventService;
import imagej.ext.plugin.Menu;
import imagej.ext.plugin.Parameter;
import imagej.ext.plugin.Plugin;
import imagej.updater.core.Checksummer;
import imagej.updater.core.FileObject;
import imagej.updater.core.FilesCollection;
import imagej.updater.core.UpdaterUIPlugin;
import imagej.updater.core.XMLFileDownloader;
import imagej.updater.gui.ViewOptions.Option;
import imagej.updater.util.Canceled;
import imagej.updater.util.Progress;
import imagej.updater.util.UpdaterUserInterface;
import imagej.updater.util.Util;
import imagej.util.FileUtils;
import imagej.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.net.Authenticator;
import java.net.UnknownHostException;
/**
* The Updater. As plugin.
*
* Incidentally, this class can be used as an out-of-ImageJ entry point to the updater,
* as it does not *require* an eventService to run. Use this Beanshell script as a template:
*
* import java.io.File;
* import java.net.URL;
* import java.net.URLClassLoader;
*
* prefix = "file:/path/to/imagej/";
* loader = new URLClassLoader(new URL[] {
* new URL(prefix + "ui/awt-swing/swing/updater/target/ij-ui-swing-updater-2.0.0-SNAPSHOT.jar"),
* new URL(prefix + "core/updater/core/target/ij-updater-core-2.0.0-SNAPSHOT.jar"),
* new URL(prefix + "core/core/target/ij-core-2.0.0-SNAPSHOT.jar"),
* new URL(prefix + "core/ext/target/ij-ext-2.0.0-SNAPSHOT.jar")
* });
* loader.loadClass("imagej.updater.gui.ImageJUpdater").newInstance().run();
*
* @author Johannes Schindelin
*/
@Plugin(type = UpdaterUIPlugin.class, menu = { @Menu(label = "Help"),
@Menu(label = "Update...") })
public class ImageJUpdater implements UpdaterUIPlugin {
@Parameter(persist = false)
private EventService eventService;
@Override
public void run() {
UpdaterUserInterface.set(new SwingUserInterface(eventService));
if (errorIfDebian()) return;
final File imagejRoot = FileUtils.getImageJDirectory();
if (new File(imagejRoot, "update").exists()) {
UpdaterUserInterface.get().error(
"ImageJ restart required to finalize previous update");
return;
}
Util.useSystemProxies();
final FilesCollection files = new FilesCollection(imagejRoot);
try {
files.read();
}
catch (final FileNotFoundException e) { /* ignore */}
catch (final Exception e) {
Log.error(e);
UpdaterUserInterface.get().error(
"There was an error reading the cached metadata: " + e);
return;
}
Authenticator.setDefault(new SwingAuthenticator());
final UpdaterFrame main = new UpdaterFrame(files);
main.setEasyMode(true);
Progress progress = main.getProgress("Starting up...");
final XMLFileDownloader downloader = new XMLFileDownloader(files);
downloader.addProgress(progress);
try {
downloader.start();
}
catch (final Canceled e) {
downloader.done();
main.error("Canceled");
return;
}
catch (final Exception e) {
Log.error(e);
downloader.done();
String message;
if (e instanceof UnknownHostException) message =
"Failed to lookup host " + e.getMessage();
else message = "Download/checksum failed: " + e;
main.error(message);
return;
}
final String warnings = downloader.getWarnings();
if (!warnings.equals("")) main.warn(warnings);
progress = main.getProgress("Matching with local files...");
final Checksummer checksummer = new Checksummer(files, progress);
try {
checksummer.updateFromLocal();
}
catch (final Canceled e) {
checksummer.done();
main.error("Canceled");
return;
}
// TODO: find .jar name from this class' resource
// TODO: mark all dependencies for update
// TODO: we may get away with a custom class loader... but probably not!
final FileObject updater = files.get("jars/ij-updater-core.jar");
if ((updater != null && updater.getStatus() == FileObject.Status.UPDATEABLE))
{
if (SwingTools.showQuestion(main, "Update the updater",
"There is an update available for the Updater. Install now?"))
{
// download just the updater
main.updateTheUpdater();
main
.info("Please restart ImageJ and call Help>Update to continue with the update");
}
// we do not save the files to prevent the mtime from changing
return;
}
main.setLocationRelativeTo(null);
main.setVisible(true);
main.requestFocus();
files.markForUpdate(false);
main.setViewOption(Option.UPDATEABLE);
if (files.hasForcableUpdates()) {
main.warn("There are locally modified files!");
if (files.hasUploadableSites() && !files.hasChanges()) {
main.setViewOption(Option.LOCALLY_MODIFIED);
main.setEasyMode(false);
}
}
else if (!files.hasChanges()) main.info("Your ImageJ is up to date!");
main.updateFilesTable();
}
protected boolean overwriteWithUpdated(final FilesCollection files,
final FileObject file)
{
File downloaded = files.prefix("update/" + file.filename);
if (!downloaded.exists()) return true; // assume all is well if there is no
// updated file
final File jar = files.prefix(file.filename);
if (!jar.delete() && !moveOutOfTheWay(jar)) return false;
if (!downloaded.renameTo(jar)) return false;
for (;;) {
downloaded = downloaded.getParentFile();
if (downloaded == null) return true;
final String[] list = downloaded.list();
if (list != null && list.length > 0) return true;
// dir is empty, remove
if (!downloaded.delete()) return false;
}
}
/**
* This returns true if this seems to be the Debian packaged version of
* ImageJ, or false otherwise.
*/
public static boolean isDebian() {
final String debianProperty = System.getProperty("fiji.debian");
return debianProperty != null && debianProperty.equals("true");
}
/**
* If this seems to be the Debian packaged version of ImageJ, then produce an
* error and return true. Otherwise return false.
*/
public static boolean errorIfDebian() {
// If this is the Debian / Ubuntu packaged version, then
// insist that the user uses apt-get / synaptic instead:
if (isDebian()) {
String message = "You are using the Debian packaged version of ImageJ.\n";
message +=
"You should update ImageJ with your system's usual package manager instead.";
UpdaterUserInterface.get().error(message);
return true;
}
else return false;
}
protected static boolean moveOutOfTheWay(final File file) {
if (!file.exists()) return true;
File backup = new File(file.getParentFile(), file.getName() + ".old");
if (backup.exists() && !backup.delete()) {
final int i = 2;
for (;;) {
backup = new File(file.getParentFile(), file.getName() + ".old" + i);
if (!backup.exists()) break;
}
}
return file.renameTo(backup);
}
}
|
ui/awt-swing/swing/updater/src/main/java/imagej/updater/gui/ImageJUpdater.java
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2012 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
* #L%
*/
package imagej.updater.gui;
import imagej.event.EventService;
import imagej.ext.plugin.Menu;
import imagej.ext.plugin.Parameter;
import imagej.ext.plugin.Plugin;
import imagej.updater.core.Checksummer;
import imagej.updater.core.FileObject;
import imagej.updater.core.FilesCollection;
import imagej.updater.core.UpdaterUIPlugin;
import imagej.updater.core.XMLFileDownloader;
import imagej.updater.gui.ViewOptions.Option;
import imagej.updater.util.Canceled;
import imagej.updater.util.Progress;
import imagej.updater.util.UpdaterUserInterface;
import imagej.updater.util.Util;
import imagej.util.FileUtils;
import imagej.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.net.Authenticator;
import java.net.UnknownHostException;
/**
* The Updater. As plugin.
*
* @author Johannes Schindelin
*/
@Plugin(type = UpdaterUIPlugin.class, menu = { @Menu(label = "Help"),
@Menu(label = "Update...") })
public class ImageJUpdater implements UpdaterUIPlugin {
@Parameter(persist = false)
private EventService eventService;
@Override
public void run() {
UpdaterUserInterface.set(new SwingUserInterface(eventService));
if (errorIfDebian()) return;
final File imagejRoot = FileUtils.getImageJDirectory();
if (new File(imagejRoot, "update").exists()) {
UpdaterUserInterface.get().error(
"ImageJ restart required to finalize previous update");
return;
}
Util.useSystemProxies();
final FilesCollection files = new FilesCollection(imagejRoot);
try {
files.read();
}
catch (final FileNotFoundException e) { /* ignore */}
catch (final Exception e) {
Log.error(e);
UpdaterUserInterface.get().error(
"There was an error reading the cached metadata: " + e);
return;
}
Authenticator.setDefault(new SwingAuthenticator());
final UpdaterFrame main = new UpdaterFrame(files);
main.setEasyMode(true);
Progress progress = main.getProgress("Starting up...");
final XMLFileDownloader downloader = new XMLFileDownloader(files);
downloader.addProgress(progress);
try {
downloader.start();
}
catch (final Canceled e) {
downloader.done();
main.error("Canceled");
return;
}
catch (final Exception e) {
Log.error(e);
downloader.done();
String message;
if (e instanceof UnknownHostException) message =
"Failed to lookup host " + e.getMessage();
else message = "Download/checksum failed: " + e;
main.error(message);
return;
}
final String warnings = downloader.getWarnings();
if (!warnings.equals("")) main.warn(warnings);
progress = main.getProgress("Matching with local files...");
final Checksummer checksummer = new Checksummer(files, progress);
try {
checksummer.updateFromLocal();
}
catch (final Canceled e) {
checksummer.done();
main.error("Canceled");
return;
}
// TODO: find .jar name from this class' resource
// TODO: mark all dependencies for update
// TODO: we may get away with a custom class loader... but probably not!
final FileObject updater = files.get("jars/ij-updater-core.jar");
if ((updater != null && updater.getStatus() == FileObject.Status.UPDATEABLE))
{
if (SwingTools.showQuestion(main, "Update the updater",
"There is an update available for the Updater. Install now?"))
{
// download just the updater
main.updateTheUpdater();
main
.info("Please restart ImageJ and call Help>Update to continue with the update");
}
// we do not save the files to prevent the mtime from changing
return;
}
main.setLocationRelativeTo(null);
main.setVisible(true);
main.requestFocus();
files.markForUpdate(false);
main.setViewOption(Option.UPDATEABLE);
if (files.hasForcableUpdates()) {
main.warn("There are locally modified files!");
if (files.hasUploadableSites() && !files.hasChanges()) {
main.setViewOption(Option.LOCALLY_MODIFIED);
main.setEasyMode(false);
}
}
else if (!files.hasChanges()) main.info("Your ImageJ is up to date!");
main.updateFilesTable();
}
protected boolean overwriteWithUpdated(final FilesCollection files,
final FileObject file)
{
File downloaded = files.prefix("update/" + file.filename);
if (!downloaded.exists()) return true; // assume all is well if there is no
// updated file
final File jar = files.prefix(file.filename);
if (!jar.delete() && !moveOutOfTheWay(jar)) return false;
if (!downloaded.renameTo(jar)) return false;
for (;;) {
downloaded = downloaded.getParentFile();
if (downloaded == null) return true;
final String[] list = downloaded.list();
if (list != null && list.length > 0) return true;
// dir is empty, remove
if (!downloaded.delete()) return false;
}
}
/**
* This returns true if this seems to be the Debian packaged version of
* ImageJ, or false otherwise.
*/
public static boolean isDebian() {
final String debianProperty = System.getProperty("fiji.debian");
return debianProperty != null && debianProperty.equals("true");
}
/**
* If this seems to be the Debian packaged version of ImageJ, then produce an
* error and return true. Otherwise return false.
*/
public static boolean errorIfDebian() {
// If this is the Debian / Ubuntu packaged version, then
// insist that the user uses apt-get / synaptic instead:
if (isDebian()) {
String message = "You are using the Debian packaged version of ImageJ.\n";
message +=
"You should update ImageJ with your system's usual package manager instead.";
UpdaterUserInterface.get().error(message);
return true;
}
else return false;
}
protected static boolean moveOutOfTheWay(final File file) {
if (!file.exists()) return true;
File backup = new File(file.getParentFile(), file.getName() + ".old");
if (backup.exists() && !backup.delete()) {
final int i = 2;
for (;;) {
backup = new File(file.getParentFile(), file.getName() + ".old" + i);
if (!backup.exists()) break;
}
}
return file.renameTo(backup);
}
}
|
Add a proof-of-concept for the Fiji Updater
This sketches the migration path of the Fiji Updater...
Signed-off-by: Johannes Schindelin <johannes.schindelin@gmx.de>
This used to be revision r5417.
|
ui/awt-swing/swing/updater/src/main/java/imagej/updater/gui/ImageJUpdater.java
|
Add a proof-of-concept for the Fiji Updater
|
|
Java
|
bsd-2-clause
|
0efcf81c62ff8397c363c3d07d4d72e725e63772
| 0
|
davecramer/pgjdbc,sehrope/pgjdbc,sehrope/pgjdbc,davecramer/pgjdbc,marschall/pgjdbc,davecramer/pgjdbc,AlexElin/pgjdbc,AlexElin/pgjdbc,pgjdbc/pgjdbc,marschall/pgjdbc,jorsol/pgjdbc,pgjdbc/pgjdbc,marschall/pgjdbc,sehrope/pgjdbc,davecramer/pgjdbc,jorsol/pgjdbc,AlexElin/pgjdbc,pgjdbc/pgjdbc,sehrope/pgjdbc,jorsol/pgjdbc,pgjdbc/pgjdbc,marschall/pgjdbc,jorsol/pgjdbc
|
/*
* Copyright (c) 2004, PostgreSQL Global Development Group
* See the LICENSE file in the project root for more information.
*/
package org.postgresql.ds.common;
import org.postgresql.PGProperty;
import org.postgresql.jdbc.AutoSave;
import org.postgresql.jdbc.PreferQueryMode;
import org.postgresql.util.ExpressionProperties;
import org.postgresql.util.GT;
import org.postgresql.util.PSQLException;
import org.postgresql.util.PSQLState;
import org.postgresql.util.URLCoder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.naming.NamingException;
import javax.naming.RefAddr;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.StringRefAddr;
import javax.sql.CommonDataSource;
/**
* Base class for data sources and related classes.
*
* @author Aaron Mulder (ammulder@chariotsolutions.com)
*/
public abstract class BaseDataSource implements CommonDataSource, Referenceable {
private static final Logger LOGGER = Logger.getLogger(BaseDataSource.class.getName());
// Standard properties, defined in the JDBC 2.0 Optional Package spec
private String serverName = "localhost";
private String databaseName = "";
private String user;
private String password;
private int portNumber = 0;
// Map for all other properties
private Properties properties = new Properties();
/*
* Ensure the driver is loaded as JDBC Driver might be invisible to Java's ServiceLoader.
* Usually, {@code Class.forName(...)} is not required as {@link DriverManager} detects JDBC drivers
* via {@code META-INF/services/java.sql.Driver} entries. However there might be cases when the driver
* is located at the application level classloader, thus it might be required to perform manual
* registration of the driver.
*/
static {
try {
Class.forName("org.postgresql.Driver");
} catch (ClassNotFoundException e) {
throw new IllegalStateException(
"BaseDataSource is unable to load org.postgresql.Driver. Please check if you have proper PostgreSQL JDBC Driver jar on the classpath",
e);
}
}
/**
* Gets a connection to the PostgreSQL database. The database is identified by the DataSource
* properties serverName, databaseName, and portNumber. The user to connect as is identified by
* the DataSource properties user and password.
*
* @return A valid database connection.
* @throws SQLException Occurs when the database connection cannot be established.
*/
public Connection getConnection() throws SQLException {
return getConnection(user, password);
}
/**
* Gets a connection to the PostgreSQL database. The database is identified by the DataSource
* properties serverName, databaseName, and portNumber. The user to connect as is identified by
* the arguments user and password, which override the DataSource properties by the same name.
*
* @param user user
* @param password password
* @return A valid database connection.
* @throws SQLException Occurs when the database connection cannot be established.
*/
public Connection getConnection(String user, String password) throws SQLException {
try {
Connection con = DriverManager.getConnection(getUrl(), user, password);
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, "Created a {0} for {1} at {2}",
new Object[] {getDescription(), user, getUrl()});
}
return con;
} catch (SQLException e) {
LOGGER.log(Level.FINE, "Failed to create a {0} for {1} at {2}: {3}",
new Object[] {getDescription(), user, getUrl(), e});
throw e;
}
}
/**
* This implementation don't use a LogWriter.
*/
@Override
public PrintWriter getLogWriter() {
return null;
}
/**
* This implementation don't use a LogWriter.
*
* @param printWriter Not used
*/
@Override
public void setLogWriter(PrintWriter printWriter) {
// NOOP
}
/**
* Gets the name of the host the PostgreSQL database is running on.
*
* @return name of the host the PostgreSQL database is running on
*/
public String getServerName() {
return serverName;
}
/**
* Sets the name of the host the PostgreSQL database is running on. If this is changed, it will
* only affect future calls to getConnection. The default value is <tt>localhost</tt>.
*
* @param serverName name of the host the PostgreSQL database is running on
*/
public void setServerName(String serverName) {
if (serverName == null || serverName.equals("")) {
this.serverName = "localhost";
} else {
this.serverName = serverName;
}
}
/**
* Gets the name of the PostgreSQL database, running on the server identified by the serverName
* property.
*
* @return name of the PostgreSQL database
*/
public String getDatabaseName() {
return databaseName;
}
/**
* Sets the name of the PostgreSQL database, running on the server identified by the serverName
* property. If this is changed, it will only affect future calls to getConnection.
*
* @param databaseName name of the PostgreSQL database
*/
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
/**
* Gets a description of this DataSource-ish thing. Must be customized by subclasses.
*
* @return description of this DataSource-ish thing
*/
public abstract String getDescription();
/**
* Gets the user to connect as by default. If this is not specified, you must use the
* getConnection method which takes a user and password as parameters.
*
* @return user to connect as by default
*/
public String getUser() {
return user;
}
/**
* Sets the user to connect as by default. If this is not specified, you must use the
* getConnection method which takes a user and password as parameters. If this is changed, it will
* only affect future calls to getConnection.
*
* @param user user to connect as by default
*/
public void setUser(String user) {
this.user = user;
}
/**
* Gets the password to connect with by default. If this is not specified but a password is needed
* to log in, you must use the getConnection method which takes a user and password as parameters.
*
* @return password to connect with by default
*/
public String getPassword() {
return password;
}
/**
* Sets the password to connect with by default. If this is not specified but a password is needed
* to log in, you must use the getConnection method which takes a user and password as parameters.
* If this is changed, it will only affect future calls to getConnection.
*
* @param password password to connect with by default
*/
public void setPassword(String password) {
this.password = password;
}
/**
* Gets the port which the PostgreSQL server is listening on for TCP/IP connections.
*
* @return The port, or 0 if the default port will be used.
*/
public int getPortNumber() {
return portNumber;
}
/**
* Gets the port which the PostgreSQL server is listening on for TCP/IP connections. Be sure the
* -i flag is passed to postmaster when PostgreSQL is started. If this is not set, or set to 0,
* the default port will be used.
*
* @param portNumber port which the PostgreSQL server is listening on for TCP/IP
*/
public void setPortNumber(int portNumber) {
this.portNumber = portNumber;
}
/**
* @return command line options for this connection
*/
public String getOptions() {
return PGProperty.OPTIONS.get(properties);
}
/**
* Set command line options for this connection
*
* @param options string to set options to
*/
public void setOptions(String options) {
PGProperty.OPTIONS.set(properties, options);
}
/**
* @return login timeout
* @see PGProperty#LOGIN_TIMEOUT
*/
@Override
public int getLoginTimeout() {
return PGProperty.LOGIN_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param loginTimeout login timeout
* @see PGProperty#LOGIN_TIMEOUT
*/
@Override
public void setLoginTimeout(int loginTimeout) {
PGProperty.LOGIN_TIMEOUT.set(properties, loginTimeout);
}
/**
* @return connect timeout
* @see PGProperty#CONNECT_TIMEOUT
*/
public int getConnectTimeout() {
return PGProperty.CONNECT_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param connectTimeout connect timeout
* @see PGProperty#CONNECT_TIMEOUT
*/
public void setConnectTimeout(int connectTimeout) {
PGProperty.CONNECT_TIMEOUT.set(properties, connectTimeout);
}
/**
* @return protocol version
* @see PGProperty#PROTOCOL_VERSION
*/
public int getProtocolVersion() {
if (!PGProperty.PROTOCOL_VERSION.isPresent(properties)) {
return 0;
} else {
return PGProperty.PROTOCOL_VERSION.getIntNoCheck(properties);
}
}
/**
* @param protocolVersion protocol version
* @see PGProperty#PROTOCOL_VERSION
*/
public void setProtocolVersion(int protocolVersion) {
if (protocolVersion == 0) {
PGProperty.PROTOCOL_VERSION.set(properties, null);
} else {
PGProperty.PROTOCOL_VERSION.set(properties, protocolVersion);
}
}
/**
* @return receive buffer size
* @see PGProperty#RECEIVE_BUFFER_SIZE
*/
public int getReceiveBufferSize() {
return PGProperty.RECEIVE_BUFFER_SIZE.getIntNoCheck(properties);
}
/**
* @param nbytes receive buffer size
* @see PGProperty#RECEIVE_BUFFER_SIZE
*/
public void setReceiveBufferSize(int nbytes) {
PGProperty.RECEIVE_BUFFER_SIZE.set(properties, nbytes);
}
/**
* @return send buffer size
* @see PGProperty#SEND_BUFFER_SIZE
*/
public int getSendBufferSize() {
return PGProperty.SEND_BUFFER_SIZE.getIntNoCheck(properties);
}
/**
* @param nbytes send buffer size
* @see PGProperty#SEND_BUFFER_SIZE
*/
public void setSendBufferSize(int nbytes) {
PGProperty.SEND_BUFFER_SIZE.set(properties, nbytes);
}
/**
* @param count prepare threshold
* @see PGProperty#PREPARE_THRESHOLD
*/
public void setPrepareThreshold(int count) {
PGProperty.PREPARE_THRESHOLD.set(properties, count);
}
/**
* @return prepare threshold
* @see PGProperty#PREPARE_THRESHOLD
*/
public int getPrepareThreshold() {
return PGProperty.PREPARE_THRESHOLD.getIntNoCheck(properties);
}
/**
* @return prepared statement cache size (number of statements per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_QUERIES
*/
public int getPreparedStatementCacheQueries() {
return PGProperty.PREPARED_STATEMENT_CACHE_QUERIES.getIntNoCheck(properties);
}
/**
* @param cacheSize prepared statement cache size (number of statements per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_QUERIES
*/
public void setPreparedStatementCacheQueries(int cacheSize) {
PGProperty.PREPARED_STATEMENT_CACHE_QUERIES.set(properties, cacheSize);
}
/**
* @return prepared statement cache size (number of megabytes per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_SIZE_MIB
*/
public int getPreparedStatementCacheSizeMiB() {
return PGProperty.PREPARED_STATEMENT_CACHE_SIZE_MIB.getIntNoCheck(properties);
}
/**
* @param cacheSize statement cache size (number of megabytes per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_SIZE_MIB
*/
public void setPreparedStatementCacheSizeMiB(int cacheSize) {
PGProperty.PREPARED_STATEMENT_CACHE_SIZE_MIB.set(properties, cacheSize);
}
/**
* @return database metadata cache fields size (number of fields cached per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS
*/
public int getDatabaseMetadataCacheFields() {
return PGProperty.DATABASE_METADATA_CACHE_FIELDS.getIntNoCheck(properties);
}
/**
* @param cacheSize database metadata cache fields size (number of fields cached per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS
*/
public void setDatabaseMetadataCacheFields(int cacheSize) {
PGProperty.DATABASE_METADATA_CACHE_FIELDS.set(properties, cacheSize);
}
/**
* @return database metadata cache fields size (number of megabytes per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS_MIB
*/
public int getDatabaseMetadataCacheFieldsMiB() {
return PGProperty.DATABASE_METADATA_CACHE_FIELDS_MIB.getIntNoCheck(properties);
}
/**
* @param cacheSize database metadata cache fields size (number of megabytes per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS_MIB
*/
public void setDatabaseMetadataCacheFieldsMiB(int cacheSize) {
PGProperty.DATABASE_METADATA_CACHE_FIELDS_MIB.set(properties, cacheSize);
}
/**
* @param fetchSize default fetch size
* @see PGProperty#DEFAULT_ROW_FETCH_SIZE
*/
public void setDefaultRowFetchSize(int fetchSize) {
PGProperty.DEFAULT_ROW_FETCH_SIZE.set(properties, fetchSize);
}
/**
* @return default fetch size
* @see PGProperty#DEFAULT_ROW_FETCH_SIZE
*/
public int getDefaultRowFetchSize() {
return PGProperty.DEFAULT_ROW_FETCH_SIZE.getIntNoCheck(properties);
}
/**
* @param unknownLength unknown length
* @see PGProperty#UNKNOWN_LENGTH
*/
public void setUnknownLength(int unknownLength) {
PGProperty.UNKNOWN_LENGTH.set(properties, unknownLength);
}
/**
* @return unknown length
* @see PGProperty#UNKNOWN_LENGTH
*/
public int getUnknownLength() {
return PGProperty.UNKNOWN_LENGTH.getIntNoCheck(properties);
}
/**
* @param seconds socket timeout
* @see PGProperty#SOCKET_TIMEOUT
*/
public void setSocketTimeout(int seconds) {
PGProperty.SOCKET_TIMEOUT.set(properties, seconds);
}
/**
* @return socket timeout
* @see PGProperty#SOCKET_TIMEOUT
*/
public int getSocketTimeout() {
return PGProperty.SOCKET_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param seconds timeout that is used for sending cancel command
* @see PGProperty#CANCEL_SIGNAL_TIMEOUT
*/
public void setCancelSignalTimeout(int seconds) {
PGProperty.CANCEL_SIGNAL_TIMEOUT.set(properties, seconds);
}
/**
* @return timeout that is used for sending cancel command in seconds
* @see PGProperty#CANCEL_SIGNAL_TIMEOUT
*/
public int getCancelSignalTimeout() {
return PGProperty.CANCEL_SIGNAL_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param enabled if SSL is enabled
* @see PGProperty#SSL
*/
public void setSsl(boolean enabled) {
if (enabled) {
PGProperty.SSL.set(properties, true);
} else {
PGProperty.SSL.set(properties, false);
}
}
/**
* @return true if SSL is enabled
* @see PGProperty#SSL
*/
public boolean getSsl() {
// "true" if "ssl" is set but empty
return PGProperty.SSL.getBoolean(properties) || "".equals(PGProperty.SSL.get(properties));
}
/**
* @param classname SSL factory class name
* @see PGProperty#SSL_FACTORY
*/
public void setSslfactory(String classname) {
PGProperty.SSL_FACTORY.set(properties, classname);
}
/**
* @return SSL factory class name
* @see PGProperty#SSL_FACTORY
*/
public String getSslfactory() {
return PGProperty.SSL_FACTORY.get(properties);
}
/**
* @return SSL mode
* @see PGProperty#SSL_MODE
*/
public String getSslMode() {
return PGProperty.SSL_MODE.get(properties);
}
/**
* @param mode SSL mode
* @see PGProperty#SSL_MODE
*/
public void setSslMode(String mode) {
PGProperty.SSL_MODE.set(properties, mode);
}
/**
* @return SSL mode
* @see PGProperty#SSL_FACTORY_ARG
*/
public String getSslFactoryArg() {
return PGProperty.SSL_FACTORY_ARG.get(properties);
}
/**
* @param arg argument forwarded to SSL factory
* @see PGProperty#SSL_FACTORY_ARG
*/
public void setSslFactoryArg(String arg) {
PGProperty.SSL_FACTORY_ARG.set(properties, arg);
}
/**
* @return argument forwarded to SSL factory
* @see PGProperty#SSL_HOSTNAME_VERIFIER
*/
public String getSslHostnameVerifier() {
return PGProperty.SSL_HOSTNAME_VERIFIER.get(properties);
}
/**
* @param className SSL hostname verifier
* @see PGProperty#SSL_HOSTNAME_VERIFIER
*/
public void setSslHostnameVerifier(String className) {
PGProperty.SSL_HOSTNAME_VERIFIER.set(properties, className);
}
/**
* @return className SSL hostname verifier
* @see PGProperty#SSL_CERT
*/
public String getSslCert() {
return PGProperty.SSL_CERT.get(properties);
}
/**
* @param file SSL certificate
* @see PGProperty#SSL_CERT
*/
public void setSslCert(String file) {
PGProperty.SSL_CERT.set(properties, file);
}
/**
* @return SSL certificate
* @see PGProperty#SSL_KEY
*/
public String getSslKey() {
return PGProperty.SSL_KEY.get(properties);
}
/**
* @param file SSL key
* @see PGProperty#SSL_KEY
*/
public void setSslKey(String file) {
PGProperty.SSL_KEY.set(properties, file);
}
/**
* @return SSL root certificate
* @see PGProperty#SSL_ROOT_CERT
*/
public String getSslRootCert() {
return PGProperty.SSL_ROOT_CERT.get(properties);
}
/**
* @param file SSL root certificate
* @see PGProperty#SSL_ROOT_CERT
*/
public void setSslRootCert(String file) {
PGProperty.SSL_ROOT_CERT.set(properties, file);
}
/**
* @return SSL password
* @see PGProperty#SSL_PASSWORD
*/
public String getSslPassword() {
return PGProperty.SSL_PASSWORD.get(properties);
}
/**
* @param password SSL password
* @see PGProperty#SSL_PASSWORD
*/
public void setSslPassword(String password) {
PGProperty.SSL_PASSWORD.set(properties, password);
}
/**
* @return SSL password callback
* @see PGProperty#SSL_PASSWORD_CALLBACK
*/
public String getSslPasswordCallback() {
return PGProperty.SSL_PASSWORD_CALLBACK.get(properties);
}
/**
* @param className SSL password callback class name
* @see PGProperty#SSL_PASSWORD_CALLBACK
*/
public void setSslPasswordCallback(String className) {
PGProperty.SSL_PASSWORD_CALLBACK.set(properties, className);
}
/**
* @param applicationName application name
* @see PGProperty#APPLICATION_NAME
*/
public void setApplicationName(String applicationName) {
PGProperty.APPLICATION_NAME.set(properties, applicationName);
}
/**
* @return application name
* @see PGProperty#APPLICATION_NAME
*/
public String getApplicationName() {
return PGProperty.APPLICATION_NAME.get(properties);
}
/**
* @param targetServerType target server type
* @see PGProperty#TARGET_SERVER_TYPE
*/
public void setTargetServerType(String targetServerType) {
PGProperty.TARGET_SERVER_TYPE.set(properties, targetServerType);
}
/**
* @return target server type
* @see PGProperty#TARGET_SERVER_TYPE
*/
public String getTargetServerType() {
return PGProperty.TARGET_SERVER_TYPE.get(properties);
}
/**
* @param loadBalanceHosts load balance hosts
* @see PGProperty#LOAD_BALANCE_HOSTS
*/
public void setLoadBalanceHosts(boolean loadBalanceHosts) {
PGProperty.LOAD_BALANCE_HOSTS.set(properties, loadBalanceHosts);
}
/**
* @return load balance hosts
* @see PGProperty#LOAD_BALANCE_HOSTS
*/
public boolean getLoadBalanceHosts() {
return PGProperty.LOAD_BALANCE_HOSTS.isPresent(properties);
}
/**
* @param hostRecheckSeconds host recheck seconds
* @see PGProperty#HOST_RECHECK_SECONDS
*/
public void setHostRecheckSeconds(int hostRecheckSeconds) {
PGProperty.HOST_RECHECK_SECONDS.set(properties, hostRecheckSeconds);
}
/**
* @return host recheck seconds
* @see PGProperty#HOST_RECHECK_SECONDS
*/
public int getHostRecheckSeconds() {
return PGProperty.HOST_RECHECK_SECONDS.getIntNoCheck(properties);
}
/**
* @param enabled if TCP keep alive should be enabled
* @see PGProperty#TCP_KEEP_ALIVE
*/
public void setTcpKeepAlive(boolean enabled) {
PGProperty.TCP_KEEP_ALIVE.set(properties, enabled);
}
/**
* @return true if TCP keep alive is enabled
* @see PGProperty#TCP_KEEP_ALIVE
*/
public boolean getTcpKeepAlive() {
return PGProperty.TCP_KEEP_ALIVE.getBoolean(properties);
}
/**
* @param enabled if binary transfer should be enabled
* @see PGProperty#BINARY_TRANSFER
*/
public void setBinaryTransfer(boolean enabled) {
PGProperty.BINARY_TRANSFER.set(properties, enabled);
}
/**
* @return true if binary transfer is enabled
* @see PGProperty#BINARY_TRANSFER
*/
public boolean getBinaryTransfer() {
return PGProperty.BINARY_TRANSFER.getBoolean(properties);
}
/**
* @param oidList list of OIDs that are allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_ENABLE
*/
public void setBinaryTransferEnable(String oidList) {
PGProperty.BINARY_TRANSFER_ENABLE.set(properties, oidList);
}
/**
* @return list of OIDs that are allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_ENABLE
*/
public String getBinaryTransferEnable() {
return PGProperty.BINARY_TRANSFER_ENABLE.get(properties);
}
/**
* @param oidList list of OIDs that are not allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_DISABLE
*/
public void setBinaryTransferDisable(String oidList) {
PGProperty.BINARY_TRANSFER_DISABLE.set(properties, oidList);
}
/**
* @return list of OIDs that are not allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_DISABLE
*/
public String getBinaryTransferDisable() {
return PGProperty.BINARY_TRANSFER_DISABLE.get(properties);
}
/**
* @return string type
* @see PGProperty#STRING_TYPE
*/
public String getStringType() {
return PGProperty.STRING_TYPE.get(properties);
}
/**
* @param stringType string type
* @see PGProperty#STRING_TYPE
*/
public void setStringType(String stringType) {
PGProperty.STRING_TYPE.set(properties, stringType);
}
/**
* @return true if column sanitizer is disabled
* @see PGProperty#DISABLE_COLUMN_SANITISER
*/
public boolean isColumnSanitiserDisabled() {
return PGProperty.DISABLE_COLUMN_SANITISER.getBoolean(properties);
}
/**
* @return true if column sanitizer is disabled
* @see PGProperty#DISABLE_COLUMN_SANITISER
*/
public boolean getDisableColumnSanitiser() {
return PGProperty.DISABLE_COLUMN_SANITISER.getBoolean(properties);
}
/**
* @param disableColumnSanitiser if column sanitizer should be disabled
* @see PGProperty#DISABLE_COLUMN_SANITISER
*/
public void setDisableColumnSanitiser(boolean disableColumnSanitiser) {
PGProperty.DISABLE_COLUMN_SANITISER.set(properties, disableColumnSanitiser);
}
/**
* @return current schema
* @see PGProperty#CURRENT_SCHEMA
*/
public String getCurrentSchema() {
return PGProperty.CURRENT_SCHEMA.get(properties);
}
/**
* @param currentSchema current schema
* @see PGProperty#CURRENT_SCHEMA
*/
public void setCurrentSchema(String currentSchema) {
PGProperty.CURRENT_SCHEMA.set(properties, currentSchema);
}
/**
* @return true if connection is readonly
* @see PGProperty#READ_ONLY
*/
public boolean getReadOnly() {
return PGProperty.READ_ONLY.getBoolean(properties);
}
/**
* @param readOnly if connection should be readonly
* @see PGProperty#READ_ONLY
*/
public void setReadOnly(boolean readOnly) {
PGProperty.READ_ONLY.set(properties, readOnly);
}
/**
* @return true if driver should log unclosed connections
* @see PGProperty#LOG_UNCLOSED_CONNECTIONS
*/
public boolean getLogUnclosedConnections() {
return PGProperty.LOG_UNCLOSED_CONNECTIONS.getBoolean(properties);
}
/**
* @param enabled true if driver should log unclosed connections
* @see PGProperty#LOG_UNCLOSED_CONNECTIONS
*/
public void setLogUnclosedConnections(boolean enabled) {
PGProperty.LOG_UNCLOSED_CONNECTIONS.set(properties, enabled);
}
/**
* @return assumed minimal server version
* @see PGProperty#ASSUME_MIN_SERVER_VERSION
*/
public String getAssumeMinServerVersion() {
return PGProperty.ASSUME_MIN_SERVER_VERSION.get(properties);
}
/**
* @param minVersion assumed minimal server version
* @see PGProperty#ASSUME_MIN_SERVER_VERSION
*/
public void setAssumeMinServerVersion(String minVersion) {
PGProperty.ASSUME_MIN_SERVER_VERSION.set(properties, minVersion);
}
/**
* @return JAAS application name
* @see PGProperty#JAAS_APPLICATION_NAME
*/
public String getJaasApplicationName() {
return PGProperty.JAAS_APPLICATION_NAME.get(properties);
}
/**
* @param name JAAS application name
* @see PGProperty#JAAS_APPLICATION_NAME
*/
public void setJaasApplicationName(String name) {
PGProperty.JAAS_APPLICATION_NAME.set(properties, name);
}
/**
* @return true if perform JAAS login before GSS authentication
* @see PGProperty#JAAS_LOGIN
*/
public boolean getJaasLogin() {
return PGProperty.JAAS_LOGIN.getBoolean(properties);
}
/**
* @param doLogin true if perform JAAS login before GSS authentication
* @see PGProperty#JAAS_LOGIN
*/
public void setJaasLogin(boolean doLogin) {
PGProperty.JAAS_LOGIN.set(properties, doLogin);
}
/**
* @return Kerberos server name
* @see PGProperty#KERBEROS_SERVER_NAME
*/
public String getKerberosServerName() {
return PGProperty.KERBEROS_SERVER_NAME.get(properties);
}
/**
* @param serverName Kerberos server name
* @see PGProperty#KERBEROS_SERVER_NAME
*/
public void setKerberosServerName(String serverName) {
PGProperty.KERBEROS_SERVER_NAME.set(properties, serverName);
}
/**
* @return true if use SPNEGO
* @see PGProperty#USE_SPNEGO
*/
public boolean getUseSpNego() {
return PGProperty.USE_SPNEGO.getBoolean(properties);
}
/**
* @param use true if use SPNEGO
* @see PGProperty#USE_SPNEGO
*/
public void setUseSpNego(boolean use) {
PGProperty.USE_SPNEGO.set(properties, use);
}
/**
* @return GSS mode: auto, sspi, or gssapi
* @see PGProperty#GSS_LIB
*/
public String getGssLib() {
return PGProperty.GSS_LIB.get(properties);
}
/**
* @param lib GSS mode: auto, sspi, or gssapi
* @see PGProperty#GSS_LIB
*/
public void setGssLib(String lib) {
PGProperty.GSS_LIB.set(properties, lib);
}
/**
* @return SSPI service class
* @see PGProperty#SSPI_SERVICE_CLASS
*/
public String getSspiServiceClass() {
return PGProperty.SSPI_SERVICE_CLASS.get(properties);
}
/**
* @param serviceClass SSPI service class
* @see PGProperty#SSPI_SERVICE_CLASS
*/
public void setSspiServiceClass(String serviceClass) {
PGProperty.SSPI_SERVICE_CLASS.set(properties, serviceClass);
}
/**
* @return if connection allows encoding changes
* @see PGProperty#ALLOW_ENCODING_CHANGES
*/
public boolean getAllowEncodingChanges() {
return PGProperty.ALLOW_ENCODING_CHANGES.getBoolean(properties);
}
/**
* @param allow if connection allows encoding changes
* @see PGProperty#ALLOW_ENCODING_CHANGES
*/
public void setAllowEncodingChanges(boolean allow) {
PGProperty.ALLOW_ENCODING_CHANGES.set(properties, allow);
}
/**
* @return socket factory class name
* @see PGProperty#SOCKET_FACTORY
*/
public String getSocketFactory() {
return PGProperty.SOCKET_FACTORY.get(properties);
}
/**
* @param socketFactoryClassName socket factory class name
* @see PGProperty#SOCKET_FACTORY
*/
public void setSocketFactory(String socketFactoryClassName) {
PGProperty.SOCKET_FACTORY.set(properties, socketFactoryClassName);
}
/**
* @return socket factory argument
* @see PGProperty#SOCKET_FACTORY_ARG
*/
public String getSocketFactoryArg() {
return PGProperty.SOCKET_FACTORY_ARG.get(properties);
}
/**
* @param socketFactoryArg socket factory argument
* @see PGProperty#SOCKET_FACTORY_ARG
*/
public void setSocketFactoryArg(String socketFactoryArg) {
PGProperty.SOCKET_FACTORY_ARG.set(properties, socketFactoryArg);
}
/**
* @param replication set to 'database' for logical replication or 'true' for physical replication
* @see PGProperty#REPLICATION
*/
public void setReplication(String replication) {
PGProperty.REPLICATION.set(properties, replication);
}
/**
* @return null, 'database', or 'true
* @see PGProperty#REPLICATION
*/
public String getReplication() {
return PGProperty.REPLICATION.get(properties);
}
/**
* @return Logger Level of the JDBC Driver
* @see PGProperty#LOGGER_LEVEL
*/
public String getLoggerLevel() {
return PGProperty.LOGGER_LEVEL.get(properties);
}
/**
* @param loggerLevel of the JDBC Driver
* @see PGProperty#LOGGER_LEVEL
*/
public void setLoggerLevel(String loggerLevel) {
PGProperty.LOGGER_LEVEL.set(properties, loggerLevel);
}
/**
* @return File output of the Logger.
* @see PGProperty#LOGGER_FILE
*/
public String getLoggerFile() {
ExpressionProperties exprProps = new ExpressionProperties(properties, System.getProperties());
return PGProperty.LOGGER_FILE.get(exprProps);
}
/**
* @param loggerFile File output of the Logger.
* @see PGProperty#LOGGER_LEVEL
*/
public void setLoggerFile(String loggerFile) {
PGProperty.LOGGER_FILE.set(properties, loggerFile);
}
/**
* Generates a {@link DriverManager} URL from the other properties supplied.
*
* @return {@link DriverManager} URL from the other properties supplied
*/
public String getUrl() {
StringBuilder url = new StringBuilder(100);
url.append("jdbc:postgresql://");
url.append(serverName);
if (portNumber != 0) {
url.append(":").append(portNumber);
}
url.append("/").append(URLCoder.encode(databaseName));
StringBuilder query = new StringBuilder(100);
for (PGProperty property : PGProperty.values()) {
if (property.isPresent(properties)) {
if (query.length() != 0) {
query.append("&");
}
query.append(property.getName());
query.append("=");
query.append(URLCoder.encode(property.get(properties)));
}
}
if (query.length() > 0) {
url.append("?");
url.append(query);
}
return url.toString();
}
/**
* Generates a {@link DriverManager} URL from the other properties supplied.
*
* @return {@link DriverManager} URL from the other properties supplied
*/
public String getURL() {
return getUrl();
}
/**
* Sets properties from a {@link DriverManager} URL.
*
* @param url properties to set
*/
public void setUrl(String url) {
Properties p = org.postgresql.Driver.parseURL(url, null);
if (p == null) {
throw new IllegalArgumentException("URL invalid " + url);
}
for (PGProperty property : PGProperty.values()) {
if (!this.properties.containsKey(property.getName())) {
setProperty(property, property.get(p));
}
}
}
/**
* Sets properties from a {@link DriverManager} URL.
* Added to follow convention used in other DBMS.
*
* @param url properties to set
*/
public void setURL(String url) {
setUrl(url);
}
public String getProperty(String name) throws SQLException {
PGProperty pgProperty = PGProperty.forName(name);
if (pgProperty != null) {
return getProperty(pgProperty);
} else {
throw new PSQLException(GT.tr("Unsupported property name: {0}", name),
PSQLState.INVALID_PARAMETER_VALUE);
}
}
public void setProperty(String name, String value) throws SQLException {
PGProperty pgProperty = PGProperty.forName(name);
if (pgProperty != null) {
setProperty(pgProperty, value);
} else {
throw new PSQLException(GT.tr("Unsupported property name: {0}", name),
PSQLState.INVALID_PARAMETER_VALUE);
}
}
public String getProperty(PGProperty property) {
return property.get(properties);
}
public void setProperty(PGProperty property, String value) {
if (value == null) {
return;
}
switch (property) {
case PG_HOST:
serverName = value;
break;
case PG_PORT:
try {
portNumber = Integer.parseInt(value);
} catch (NumberFormatException e) {
portNumber = 0;
}
break;
case PG_DBNAME:
databaseName = value;
break;
case USER:
user = value;
break;
case PASSWORD:
password = value;
break;
default:
properties.setProperty(property.getName(), value);
}
}
/**
* Generates a reference using the appropriate object factory.
*
* @return reference using the appropriate object factory
*/
protected Reference createReference() {
return new Reference(getClass().getName(), PGObjectFactory.class.getName(), null);
}
public Reference getReference() throws NamingException {
Reference ref = createReference();
ref.add(new StringRefAddr("serverName", serverName));
if (portNumber != 0) {
ref.add(new StringRefAddr("portNumber", Integer.toString(portNumber)));
}
ref.add(new StringRefAddr("databaseName", databaseName));
if (user != null) {
ref.add(new StringRefAddr("user", user));
}
if (password != null) {
ref.add(new StringRefAddr("password", password));
}
for (PGProperty property : PGProperty.values()) {
if (property.isPresent(properties)) {
ref.add(new StringRefAddr(property.getName(), property.get(properties)));
}
}
return ref;
}
public void setFromReference(Reference ref) {
databaseName = getReferenceProperty(ref, "databaseName");
String port = getReferenceProperty(ref, "portNumber");
if (port != null) {
portNumber = Integer.parseInt(port);
}
serverName = getReferenceProperty(ref, "serverName");
for (PGProperty property : PGProperty.values()) {
setProperty(property, getReferenceProperty(ref, property.getName()));
}
}
private static String getReferenceProperty(Reference ref, String propertyName) {
RefAddr addr = ref.get(propertyName);
if (addr == null) {
return null;
}
return (String) addr.getContent();
}
protected void writeBaseObject(ObjectOutputStream out) throws IOException {
out.writeObject(serverName);
out.writeObject(databaseName);
out.writeObject(user);
out.writeObject(password);
out.writeInt(portNumber);
out.writeObject(properties);
}
protected void readBaseObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
serverName = (String) in.readObject();
databaseName = (String) in.readObject();
user = (String) in.readObject();
password = (String) in.readObject();
portNumber = in.readInt();
properties = (Properties) in.readObject();
}
public void initializeFrom(BaseDataSource source) throws IOException, ClassNotFoundException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
source.writeBaseObject(oos);
oos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bais);
readBaseObject(ois);
}
/**
* @return preferred query execution mode
* @see PGProperty#PREFER_QUERY_MODE
*/
public PreferQueryMode getPreferQueryMode() {
return PreferQueryMode.of(PGProperty.PREFER_QUERY_MODE.get(properties));
}
/**
* @param preferQueryMode extended, simple, extendedForPrepared, or extendedCacheEverything
* @see PGProperty#PREFER_QUERY_MODE
*/
public void setPreferQueryMode(PreferQueryMode preferQueryMode) {
PGProperty.PREFER_QUERY_MODE.set(properties, preferQueryMode.value());
}
/**
* @return connection configuration regarding automatic per-query savepoints
* @see PGProperty#AUTOSAVE
*/
public AutoSave getAutosave() {
return AutoSave.of(PGProperty.AUTOSAVE.get(properties));
}
/**
* @param autoSave connection configuration regarding automatic per-query savepoints
* @see PGProperty#AUTOSAVE
*/
public void setAutosave(AutoSave autoSave) {
PGProperty.AUTOSAVE.set(properties, autoSave.value());
}
/**
* see PGProperty#CLEANUP_SAVEPOINTS
*
* @return boolean indicating property set
*/
public boolean getCleanupSavepoints() {
return PGProperty.CLEANUP_SAVEPOINTS.getBoolean(properties);
}
/**
* see PGProperty#CLEANUP_SAVEPOINTS
*
* @param cleanupSavepoints will cleanup savepoints after a successful transaction
*/
public void setCleanupSavepoints(boolean cleanupSavepoints) {
PGProperty.CLEANUP_SAVEPOINTS.set(properties, cleanupSavepoints);
}
/**
* @return boolean indicating property is enabled or not.
* @see PGProperty#REWRITE_BATCHED_INSERTS
*/
public boolean getReWriteBatchedInserts() {
return PGProperty.REWRITE_BATCHED_INSERTS.getBoolean(properties);
}
/**
* @param reWrite boolean value to set the property in the properties collection
* @see PGProperty#REWRITE_BATCHED_INSERTS
*/
public void setReWriteBatchedInserts(boolean reWrite) {
PGProperty.REWRITE_BATCHED_INSERTS.set(properties, reWrite);
}
//#if mvn.project.property.postgresql.jdbc.spec >= "JDBC4.1"
public java.util.logging.Logger getParentLogger() {
return Logger.getLogger("org.postgresql");
}
//#endif
/*
* Alias methods below, these are to help with ease-of-use with other database tools / frameworks
* which expect normal java bean getters / setters to exist for the property names.
*/
public boolean isSsl() {
return getSsl();
}
public String getSslfactoryarg() {
return getSslFactoryArg();
}
public void setSslfactoryarg(final String arg) {
setSslFactoryArg(arg);
}
public String getSslcert() {
return getSslCert();
}
public void setSslcert(final String file) {
setSslCert(file);
}
public String getSslmode() {
return getSslMode();
}
public void setSslmode(final String mode) {
setSslMode(mode);
}
public String getSslhostnameverifier() {
return getSslHostnameVerifier();
}
public void setSslhostnameverifier(final String className) {
setSslHostnameVerifier(className);
}
public String getSslkey() {
return getSslKey();
}
public void setSslkey(final String file) {
setSslKey(file);
}
public String getSslrootcert() {
return getSslRootCert();
}
public void setSslrootcert(final String file) {
setSslRootCert(file);
}
public String getSslpasswordcallback() {
return getSslPasswordCallback();
}
public void setSslpasswordcallback(final String className) {
setSslPasswordCallback(className);
}
public String getSslpassword() {
return getSslPassword();
}
public void setSslpassword(final String sslpassword) {
setSslPassword(sslpassword);
}
public int getRecvBufferSize() {
return getReceiveBufferSize();
}
public void setRecvBufferSize(final int nbytes) {
setReceiveBufferSize(nbytes);
}
public boolean isAllowEncodingChanges() {
return getAllowEncodingChanges();
}
public boolean isLogUnclosedConnections() {
return getLogUnclosedConnections();
}
public boolean isTcpKeepAlive() {
return getTcpKeepAlive();
}
public boolean isReadOnly() {
return getReadOnly();
}
public boolean isDisableColumnSanitiser() {
return getDisableColumnSanitiser();
}
public boolean isLoadBalanceHosts() {
return getLoadBalanceHosts();
}
public boolean isCleanupSavePoints() {
return getCleanupSavepoints();
}
public void setCleanupSavePoints(final boolean cleanupSavepoints) {
setCleanupSavepoints(cleanupSavepoints);
}
public boolean isReWriteBatchedInserts() {
return getReWriteBatchedInserts();
}
}
|
pgjdbc/src/main/java/org/postgresql/ds/common/BaseDataSource.java
|
/*
* Copyright (c) 2004, PostgreSQL Global Development Group
* See the LICENSE file in the project root for more information.
*/
package org.postgresql.ds.common;
import org.postgresql.PGProperty;
import org.postgresql.jdbc.AutoSave;
import org.postgresql.jdbc.PreferQueryMode;
import org.postgresql.util.ExpressionProperties;
import org.postgresql.util.GT;
import org.postgresql.util.PSQLException;
import org.postgresql.util.PSQLState;
import org.postgresql.util.URLCoder;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.naming.NamingException;
import javax.naming.RefAddr;
import javax.naming.Reference;
import javax.naming.Referenceable;
import javax.naming.StringRefAddr;
import javax.sql.CommonDataSource;
/**
* Base class for data sources and related classes.
*
* @author Aaron Mulder (ammulder@chariotsolutions.com)
*/
public abstract class BaseDataSource implements CommonDataSource, Referenceable {
private static final Logger LOGGER = Logger.getLogger(BaseDataSource.class.getName());
// Standard properties, defined in the JDBC 2.0 Optional Package spec
private String serverName = "localhost";
private String databaseName = "";
private String user;
private String password;
private int portNumber = 0;
// Map for all other properties
private Properties properties = new Properties();
/*
* Ensure the driver is loaded as JDBC Driver might be invisible to Java's ServiceLoader.
* Usually, {@code Class.forName(...)} is not required as {@link DriverManager} detects JDBC drivers
* via {@code META-INF/services/java.sql.Driver} entries. However there might be cases when the driver
* is located at the application level classloader, thus it might be required to perform manual
* registration of the driver.
*/
static {
try {
Class.forName("org.postgresql.Driver");
} catch (ClassNotFoundException e) {
throw new IllegalStateException("BaseDataSource is unable to load org.postgresql.Driver. Please check if you have proper PostgreSQL JDBC Driver jar on the classpath", e);
}
}
/**
* Gets a connection to the PostgreSQL database. The database is identified by the DataSource
* properties serverName, databaseName, and portNumber. The user to connect as is identified by
* the DataSource properties user and password.
*
* @return A valid database connection.
* @throws SQLException Occurs when the database connection cannot be established.
*/
public Connection getConnection() throws SQLException {
return getConnection(user, password);
}
/**
* Gets a connection to the PostgreSQL database. The database is identified by the DataSource
* properties serverName, databaseName, and portNumber. The user to connect as is identified by
* the arguments user and password, which override the DataSource properties by the same name.
*
* @param user user
* @param password password
* @return A valid database connection.
* @throws SQLException Occurs when the database connection cannot be established.
*/
public Connection getConnection(String user, String password) throws SQLException {
try {
Connection con = DriverManager.getConnection(getUrl(), user, password);
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, "Created a {0} for {1} at {2}", new Object[]{getDescription(), user, getUrl()});
}
return con;
} catch (SQLException e) {
LOGGER.log(Level.FINE, "Failed to create a {0} for {1} at {2}: {3}",
new Object[]{getDescription(), user, getUrl(), e});
throw e;
}
}
/**
* This implementation don't use a LogWriter.
*/
@Override
public PrintWriter getLogWriter() {
return null;
}
/**
* This implementation don't use a LogWriter.
* @param printWriter Not used
*/
@Override
public void setLogWriter(PrintWriter printWriter) {
// NOOP
}
/**
* Gets the name of the host the PostgreSQL database is running on.
*
* @return name of the host the PostgreSQL database is running on
*/
public String getServerName() {
return serverName;
}
/**
* Sets the name of the host the PostgreSQL database is running on. If this is changed, it will
* only affect future calls to getConnection. The default value is <tt>localhost</tt>.
*
* @param serverName name of the host the PostgreSQL database is running on
*/
public void setServerName(String serverName) {
if (serverName == null || serverName.equals("")) {
this.serverName = "localhost";
} else {
this.serverName = serverName;
}
}
/**
* Gets the name of the PostgreSQL database, running on the server identified by the serverName
* property.
*
* @return name of the PostgreSQL database
*/
public String getDatabaseName() {
return databaseName;
}
/**
* Sets the name of the PostgreSQL database, running on the server identified by the serverName
* property. If this is changed, it will only affect future calls to getConnection.
*
* @param databaseName name of the PostgreSQL database
*/
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
/**
* Gets a description of this DataSource-ish thing. Must be customized by subclasses.
*
* @return description of this DataSource-ish thing
*/
public abstract String getDescription();
/**
* Gets the user to connect as by default. If this is not specified, you must use the
* getConnection method which takes a user and password as parameters.
*
* @return user to connect as by default
*/
public String getUser() {
return user;
}
/**
* Sets the user to connect as by default. If this is not specified, you must use the
* getConnection method which takes a user and password as parameters. If this is changed, it will
* only affect future calls to getConnection.
*
* @param user user to connect as by default
*/
public void setUser(String user) {
this.user = user;
}
/**
* Gets the password to connect with by default. If this is not specified but a password is needed
* to log in, you must use the getConnection method which takes a user and password as parameters.
*
* @return password to connect with by default
*/
public String getPassword() {
return password;
}
/**
* Sets the password to connect with by default. If this is not specified but a password is needed
* to log in, you must use the getConnection method which takes a user and password as parameters.
* If this is changed, it will only affect future calls to getConnection.
*
* @param password password to connect with by default
*/
public void setPassword(String password) {
this.password = password;
}
/**
* Gets the port which the PostgreSQL server is listening on for TCP/IP connections.
*
* @return The port, or 0 if the default port will be used.
*/
public int getPortNumber() {
return portNumber;
}
/**
* Gets the port which the PostgreSQL server is listening on for TCP/IP connections. Be sure the
* -i flag is passed to postmaster when PostgreSQL is started. If this is not set, or set to 0,
* the default port will be used.
*
* @param portNumber port which the PostgreSQL server is listening on for TCP/IP
*/
public void setPortNumber(int portNumber) {
this.portNumber = portNumber;
}
/**
* @return command line options for this connection
*/
public String getOptions() {
return PGProperty.OPTIONS.get(properties);
}
/**
* Set command line options for this connection
* @param options string to set options to
*/
public void setOptions(String options) {
PGProperty.OPTIONS.set(properties, options);
}
/**
* @return login timeout
* @see PGProperty#LOGIN_TIMEOUT
*/
@Override
public int getLoginTimeout() {
return PGProperty.LOGIN_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param loginTimeout login timeout
* @see PGProperty#LOGIN_TIMEOUT
*/
@Override
public void setLoginTimeout(int loginTimeout) {
PGProperty.LOGIN_TIMEOUT.set(properties, loginTimeout);
}
/**
* @return connect timeout
* @see PGProperty#CONNECT_TIMEOUT
*/
public int getConnectTimeout() {
return PGProperty.CONNECT_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param connectTimeout connect timeout
* @see PGProperty#CONNECT_TIMEOUT
*/
public void setConnectTimeout(int connectTimeout) {
PGProperty.CONNECT_TIMEOUT.set(properties, connectTimeout);
}
/**
* @return protocol version
* @see PGProperty#PROTOCOL_VERSION
*/
public int getProtocolVersion() {
if (!PGProperty.PROTOCOL_VERSION.isPresent(properties)) {
return 0;
} else {
return PGProperty.PROTOCOL_VERSION.getIntNoCheck(properties);
}
}
/**
* @param protocolVersion protocol version
* @see PGProperty#PROTOCOL_VERSION
*/
public void setProtocolVersion(int protocolVersion) {
if (protocolVersion == 0) {
PGProperty.PROTOCOL_VERSION.set(properties, null);
} else {
PGProperty.PROTOCOL_VERSION.set(properties, protocolVersion);
}
}
/**
* @return receive buffer size
* @see PGProperty#RECEIVE_BUFFER_SIZE
*/
public int getReceiveBufferSize() {
return PGProperty.RECEIVE_BUFFER_SIZE.getIntNoCheck(properties);
}
/**
* @param nbytes receive buffer size
* @see PGProperty#RECEIVE_BUFFER_SIZE
*/
public void setReceiveBufferSize(int nbytes) {
PGProperty.RECEIVE_BUFFER_SIZE.set(properties, nbytes);
}
/**
* @return send buffer size
* @see PGProperty#SEND_BUFFER_SIZE
*/
public int getSendBufferSize() {
return PGProperty.SEND_BUFFER_SIZE.getIntNoCheck(properties);
}
/**
* @param nbytes send buffer size
* @see PGProperty#SEND_BUFFER_SIZE
*/
public void setSendBufferSize(int nbytes) {
PGProperty.SEND_BUFFER_SIZE.set(properties, nbytes);
}
/**
* @param count prepare threshold
* @see PGProperty#PREPARE_THRESHOLD
*/
public void setPrepareThreshold(int count) {
PGProperty.PREPARE_THRESHOLD.set(properties, count);
}
/**
* @return prepare threshold
* @see PGProperty#PREPARE_THRESHOLD
*/
public int getPrepareThreshold() {
return PGProperty.PREPARE_THRESHOLD.getIntNoCheck(properties);
}
/**
* @return prepared statement cache size (number of statements per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_QUERIES
*/
public int getPreparedStatementCacheQueries() {
return PGProperty.PREPARED_STATEMENT_CACHE_QUERIES.getIntNoCheck(properties);
}
/**
* @param cacheSize prepared statement cache size (number of statements per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_QUERIES
*/
public void setPreparedStatementCacheQueries(int cacheSize) {
PGProperty.PREPARED_STATEMENT_CACHE_QUERIES.set(properties, cacheSize);
}
/**
* @return prepared statement cache size (number of megabytes per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_SIZE_MIB
*/
public int getPreparedStatementCacheSizeMiB() {
return PGProperty.PREPARED_STATEMENT_CACHE_SIZE_MIB.getIntNoCheck(properties);
}
/**
* @param cacheSize statement cache size (number of megabytes per connection)
* @see PGProperty#PREPARED_STATEMENT_CACHE_SIZE_MIB
*/
public void setPreparedStatementCacheSizeMiB(int cacheSize) {
PGProperty.PREPARED_STATEMENT_CACHE_SIZE_MIB.set(properties, cacheSize);
}
/**
* @return database metadata cache fields size (number of fields cached per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS
*/
public int getDatabaseMetadataCacheFields() {
return PGProperty.DATABASE_METADATA_CACHE_FIELDS.getIntNoCheck(properties);
}
/**
* @param cacheSize database metadata cache fields size (number of fields cached per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS
*/
public void setDatabaseMetadataCacheFields(int cacheSize) {
PGProperty.DATABASE_METADATA_CACHE_FIELDS.set(properties, cacheSize);
}
/**
* @return database metadata cache fields size (number of megabytes per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS_MIB
*/
public int getDatabaseMetadataCacheFieldsMiB() {
return PGProperty.DATABASE_METADATA_CACHE_FIELDS_MIB.getIntNoCheck(properties);
}
/**
* @param cacheSize database metadata cache fields size (number of megabytes per connection)
* @see PGProperty#DATABASE_METADATA_CACHE_FIELDS_MIB
*/
public void setDatabaseMetadataCacheFieldsMiB(int cacheSize) {
PGProperty.DATABASE_METADATA_CACHE_FIELDS_MIB.set(properties, cacheSize);
}
/**
* @param fetchSize default fetch size
* @see PGProperty#DEFAULT_ROW_FETCH_SIZE
*/
public void setDefaultRowFetchSize(int fetchSize) {
PGProperty.DEFAULT_ROW_FETCH_SIZE.set(properties, fetchSize);
}
/**
* @return default fetch size
* @see PGProperty#DEFAULT_ROW_FETCH_SIZE
*/
public int getDefaultRowFetchSize() {
return PGProperty.DEFAULT_ROW_FETCH_SIZE.getIntNoCheck(properties);
}
/**
* @param unknownLength unknown length
* @see PGProperty#UNKNOWN_LENGTH
*/
public void setUnknownLength(int unknownLength) {
PGProperty.UNKNOWN_LENGTH.set(properties, unknownLength);
}
/**
* @return unknown length
* @see PGProperty#UNKNOWN_LENGTH
*/
public int getUnknownLength() {
return PGProperty.UNKNOWN_LENGTH.getIntNoCheck(properties);
}
/**
* @param seconds socket timeout
* @see PGProperty#SOCKET_TIMEOUT
*/
public void setSocketTimeout(int seconds) {
PGProperty.SOCKET_TIMEOUT.set(properties, seconds);
}
/**
* @return socket timeout
* @see PGProperty#SOCKET_TIMEOUT
*/
public int getSocketTimeout() {
return PGProperty.SOCKET_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param seconds timeout that is used for sending cancel command
* @see PGProperty#CANCEL_SIGNAL_TIMEOUT
*/
public void setCancelSignalTimeout(int seconds) {
PGProperty.CANCEL_SIGNAL_TIMEOUT.set(properties, seconds);
}
/**
* @return timeout that is used for sending cancel command in seconds
* @see PGProperty#CANCEL_SIGNAL_TIMEOUT
*/
public int getCancelSignalTimeout() {
return PGProperty.CANCEL_SIGNAL_TIMEOUT.getIntNoCheck(properties);
}
/**
* @param enabled if SSL is enabled
* @see PGProperty#SSL
*/
public void setSsl(boolean enabled) {
if (enabled) {
PGProperty.SSL.set(properties, true);
} else {
PGProperty.SSL.set(properties, false);
}
}
/**
* @return true if SSL is enabled
* @see PGProperty#SSL
*/
public boolean getSsl() {
// "true" if "ssl" is set but empty
return PGProperty.SSL.getBoolean(properties) || "".equals(PGProperty.SSL.get(properties));
}
/**
* @param classname SSL factory class name
* @see PGProperty#SSL_FACTORY
*/
public void setSslfactory(String classname) {
PGProperty.SSL_FACTORY.set(properties, classname);
}
/**
* @return SSL factory class name
* @see PGProperty#SSL_FACTORY
*/
public String getSslfactory() {
return PGProperty.SSL_FACTORY.get(properties);
}
/**
* @return SSL mode
* @see PGProperty#SSL_MODE
*/
public String getSslMode() {
return PGProperty.SSL_MODE.get(properties);
}
/**
* @param mode SSL mode
* @see PGProperty#SSL_MODE
*/
public void setSslMode(String mode) {
PGProperty.SSL_MODE.set(properties, mode);
}
/**
* @return SSL mode
* @see PGProperty#SSL_FACTORY_ARG
*/
public String getSslFactoryArg() {
return PGProperty.SSL_FACTORY_ARG.get(properties);
}
/**
* @param arg argument forwarded to SSL factory
* @see PGProperty#SSL_FACTORY_ARG
*/
public void setSslFactoryArg(String arg) {
PGProperty.SSL_FACTORY_ARG.set(properties, arg);
}
/**
* @return argument forwarded to SSL factory
* @see PGProperty#SSL_HOSTNAME_VERIFIER
*/
public String getSslHostnameVerifier() {
return PGProperty.SSL_HOSTNAME_VERIFIER.get(properties);
}
/**
* @param className SSL hostname verifier
* @see PGProperty#SSL_HOSTNAME_VERIFIER
*/
public void setSslHostnameVerifier(String className) {
PGProperty.SSL_HOSTNAME_VERIFIER.set(properties, className);
}
/**
* @return className SSL hostname verifier
* @see PGProperty#SSL_CERT
*/
public String getSslCert() {
return PGProperty.SSL_CERT.get(properties);
}
/**
* @param file SSL certificate
* @see PGProperty#SSL_CERT
*/
public void setSslCert(String file) {
PGProperty.SSL_CERT.set(properties, file);
}
/**
* @return SSL certificate
* @see PGProperty#SSL_KEY
*/
public String getSslKey() {
return PGProperty.SSL_KEY.get(properties);
}
/**
* @param file SSL key
* @see PGProperty#SSL_KEY
*/
public void setSslKey(String file) {
PGProperty.SSL_KEY.set(properties, file);
}
/**
* @return SSL root certificate
* @see PGProperty#SSL_ROOT_CERT
*/
public String getSslRootCert() {
return PGProperty.SSL_ROOT_CERT.get(properties);
}
/**
* @param file SSL root certificate
* @see PGProperty#SSL_ROOT_CERT
*/
public void setSslRootCert(String file) {
PGProperty.SSL_ROOT_CERT.set(properties, file);
}
/**
* @return SSL password
* @see PGProperty#SSL_PASSWORD
*/
public String getSslPassword() {
return PGProperty.SSL_PASSWORD.get(properties);
}
/**
* @param password SSL password
* @see PGProperty#SSL_PASSWORD
*/
public void setSslPassword(String password) {
PGProperty.SSL_PASSWORD.set(properties, password);
}
/**
* @return SSL password callback
* @see PGProperty#SSL_PASSWORD_CALLBACK
*/
public String getSslPasswordCallback() {
return PGProperty.SSL_PASSWORD_CALLBACK.get(properties);
}
/**
* @param className SSL password callback class name
* @see PGProperty#SSL_PASSWORD_CALLBACK
*/
public void setSslPasswordCallback(String className) {
PGProperty.SSL_PASSWORD_CALLBACK.set(properties, className);
}
/**
* @param applicationName application name
* @see PGProperty#APPLICATION_NAME
*/
public void setApplicationName(String applicationName) {
PGProperty.APPLICATION_NAME.set(properties, applicationName);
}
/**
* @return application name
* @see PGProperty#APPLICATION_NAME
*/
public String getApplicationName() {
return PGProperty.APPLICATION_NAME.get(properties);
}
/**
* @param targetServerType target server type
* @see PGProperty#TARGET_SERVER_TYPE
*/
public void setTargetServerType(String targetServerType) {
PGProperty.TARGET_SERVER_TYPE.set(properties, targetServerType);
}
/**
* @return target server type
* @see PGProperty#TARGET_SERVER_TYPE
*/
public String getTargetServerType() {
return PGProperty.TARGET_SERVER_TYPE.get(properties);
}
/**
* @param loadBalanceHosts load balance hosts
* @see PGProperty#LOAD_BALANCE_HOSTS
*/
public void setLoadBalanceHosts(boolean loadBalanceHosts) {
PGProperty.LOAD_BALANCE_HOSTS.set(properties, loadBalanceHosts);
}
/**
* @return load balance hosts
* @see PGProperty#LOAD_BALANCE_HOSTS
*/
public boolean getLoadBalanceHosts() {
return PGProperty.LOAD_BALANCE_HOSTS.isPresent(properties);
}
/**
* @param hostRecheckSeconds host recheck seconds
* @see PGProperty#HOST_RECHECK_SECONDS
*/
public void setHostRecheckSeconds(int hostRecheckSeconds) {
PGProperty.HOST_RECHECK_SECONDS.set(properties, hostRecheckSeconds);
}
/**
* @return host recheck seconds
* @see PGProperty#HOST_RECHECK_SECONDS
*/
public int getHostRecheckSeconds() {
return PGProperty.HOST_RECHECK_SECONDS.getIntNoCheck(properties);
}
/**
* @param enabled if TCP keep alive should be enabled
* @see PGProperty#TCP_KEEP_ALIVE
*/
public void setTcpKeepAlive(boolean enabled) {
PGProperty.TCP_KEEP_ALIVE.set(properties, enabled);
}
/**
* @return true if TCP keep alive is enabled
* @see PGProperty#TCP_KEEP_ALIVE
*/
public boolean getTcpKeepAlive() {
return PGProperty.TCP_KEEP_ALIVE.getBoolean(properties);
}
/**
* @param enabled if binary transfer should be enabled
* @see PGProperty#BINARY_TRANSFER
*/
public void setBinaryTransfer(boolean enabled) {
PGProperty.BINARY_TRANSFER.set(properties, enabled);
}
/**
* @return true if binary transfer is enabled
* @see PGProperty#BINARY_TRANSFER
*/
public boolean getBinaryTransfer() {
return PGProperty.BINARY_TRANSFER.getBoolean(properties);
}
/**
* @param oidList list of OIDs that are allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_ENABLE
*/
public void setBinaryTransferEnable(String oidList) {
PGProperty.BINARY_TRANSFER_ENABLE.set(properties, oidList);
}
/**
* @return list of OIDs that are allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_ENABLE
*/
public String getBinaryTransferEnable() {
return PGProperty.BINARY_TRANSFER_ENABLE.get(properties);
}
/**
* @param oidList list of OIDs that are not allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_DISABLE
*/
public void setBinaryTransferDisable(String oidList) {
PGProperty.BINARY_TRANSFER_DISABLE.set(properties, oidList);
}
/**
* @return list of OIDs that are not allowed to use binary transfer
* @see PGProperty#BINARY_TRANSFER_DISABLE
*/
public String getBinaryTransferDisable() {
return PGProperty.BINARY_TRANSFER_DISABLE.get(properties);
}
/**
* @return string type
* @see PGProperty#STRING_TYPE
*/
public String getStringType() {
return PGProperty.STRING_TYPE.get(properties);
}
/**
* @param stringType string type
* @see PGProperty#STRING_TYPE
*/
public void setStringType(String stringType) {
PGProperty.STRING_TYPE.set(properties, stringType);
}
/**
* @return true if column sanitizer is disabled
* @see PGProperty#DISABLE_COLUMN_SANITISER
*/
public boolean isColumnSanitiserDisabled() {
return PGProperty.DISABLE_COLUMN_SANITISER.getBoolean(properties);
}
/**
* @return true if column sanitizer is disabled
* @see PGProperty#DISABLE_COLUMN_SANITISER
*/
public boolean getDisableColumnSanitiser() {
return PGProperty.DISABLE_COLUMN_SANITISER.getBoolean(properties);
}
/**
* @param disableColumnSanitiser if column sanitizer should be disabled
* @see PGProperty#DISABLE_COLUMN_SANITISER
*/
public void setDisableColumnSanitiser(boolean disableColumnSanitiser) {
PGProperty.DISABLE_COLUMN_SANITISER.set(properties, disableColumnSanitiser);
}
/**
* @return current schema
* @see PGProperty#CURRENT_SCHEMA
*/
public String getCurrentSchema() {
return PGProperty.CURRENT_SCHEMA.get(properties);
}
/**
* @param currentSchema current schema
* @see PGProperty#CURRENT_SCHEMA
*/
public void setCurrentSchema(String currentSchema) {
PGProperty.CURRENT_SCHEMA.set(properties, currentSchema);
}
/**
* @return true if connection is readonly
* @see PGProperty#READ_ONLY
*/
public boolean getReadOnly() {
return PGProperty.READ_ONLY.getBoolean(properties);
}
/**
* @param readOnly if connection should be readonly
* @see PGProperty#READ_ONLY
*/
public void setReadOnly(boolean readOnly) {
PGProperty.READ_ONLY.set(properties, readOnly);
}
/**
* @return true if driver should log unclosed connections
* @see PGProperty#LOG_UNCLOSED_CONNECTIONS
*/
public boolean getLogUnclosedConnections() {
return PGProperty.LOG_UNCLOSED_CONNECTIONS.getBoolean(properties);
}
/**
* @param enabled true if driver should log unclosed connections
* @see PGProperty#LOG_UNCLOSED_CONNECTIONS
*/
public void setLogUnclosedConnections(boolean enabled) {
PGProperty.LOG_UNCLOSED_CONNECTIONS.set(properties, enabled);
}
/**
* @return assumed minimal server version
* @see PGProperty#ASSUME_MIN_SERVER_VERSION
*/
public String getAssumeMinServerVersion() {
return PGProperty.ASSUME_MIN_SERVER_VERSION.get(properties);
}
/**
* @param minVersion assumed minimal server version
* @see PGProperty#ASSUME_MIN_SERVER_VERSION
*/
public void setAssumeMinServerVersion(String minVersion) {
PGProperty.ASSUME_MIN_SERVER_VERSION.set(properties, minVersion);
}
/**
* @return JAAS application name
* @see PGProperty#JAAS_APPLICATION_NAME
*/
public String getJaasApplicationName() {
return PGProperty.JAAS_APPLICATION_NAME.get(properties);
}
/**
* @param name JAAS application name
* @see PGProperty#JAAS_APPLICATION_NAME
*/
public void setJaasApplicationName(String name) {
PGProperty.JAAS_APPLICATION_NAME.set(properties, name);
}
/**
* @return true if perform JAAS login before GSS authentication
* @see PGProperty#JAAS_LOGIN
*/
public boolean getJaasLogin() {
return PGProperty.JAAS_LOGIN.getBoolean(properties);
}
/**
* @param doLogin true if perform JAAS login before GSS authentication
* @see PGProperty#JAAS_LOGIN
*/
public void setJaasLogin(boolean doLogin) {
PGProperty.JAAS_LOGIN.set(properties, doLogin);
}
/**
* @return Kerberos server name
* @see PGProperty#KERBEROS_SERVER_NAME
*/
public String getKerberosServerName() {
return PGProperty.KERBEROS_SERVER_NAME.get(properties);
}
/**
* @param serverName Kerberos server name
* @see PGProperty#KERBEROS_SERVER_NAME
*/
public void setKerberosServerName(String serverName) {
PGProperty.KERBEROS_SERVER_NAME.set(properties, serverName);
}
/**
* @return true if use SPNEGO
* @see PGProperty#USE_SPNEGO
*/
public boolean getUseSpNego() {
return PGProperty.USE_SPNEGO.getBoolean(properties);
}
/**
* @param use true if use SPNEGO
* @see PGProperty#USE_SPNEGO
*/
public void setUseSpNego(boolean use) {
PGProperty.USE_SPNEGO.set(properties, use);
}
/**
* @return GSS mode: auto, sspi, or gssapi
* @see PGProperty#GSS_LIB
*/
public String getGssLib() {
return PGProperty.GSS_LIB.get(properties);
}
/**
* @param lib GSS mode: auto, sspi, or gssapi
* @see PGProperty#GSS_LIB
*/
public void setGssLib(String lib) {
PGProperty.GSS_LIB.set(properties, lib);
}
/**
* @return SSPI service class
* @see PGProperty#SSPI_SERVICE_CLASS
*/
public String getSspiServiceClass() {
return PGProperty.SSPI_SERVICE_CLASS.get(properties);
}
/**
* @param serviceClass SSPI service class
* @see PGProperty#SSPI_SERVICE_CLASS
*/
public void setSspiServiceClass(String serviceClass) {
PGProperty.SSPI_SERVICE_CLASS.set(properties, serviceClass);
}
/**
* @return if connection allows encoding changes
* @see PGProperty#ALLOW_ENCODING_CHANGES
*/
public boolean getAllowEncodingChanges() {
return PGProperty.ALLOW_ENCODING_CHANGES.getBoolean(properties);
}
/**
* @param allow if connection allows encoding changes
* @see PGProperty#ALLOW_ENCODING_CHANGES
*/
public void setAllowEncodingChanges(boolean allow) {
PGProperty.ALLOW_ENCODING_CHANGES.set(properties, allow);
}
/**
* @return socket factory class name
* @see PGProperty#SOCKET_FACTORY
*/
public String getSocketFactory() {
return PGProperty.SOCKET_FACTORY.get(properties);
}
/**
* @param socketFactoryClassName socket factory class name
* @see PGProperty#SOCKET_FACTORY
*/
public void setSocketFactory(String socketFactoryClassName) {
PGProperty.SOCKET_FACTORY.set(properties, socketFactoryClassName);
}
/**
* @return socket factory argument
* @see PGProperty#SOCKET_FACTORY_ARG
*/
public String getSocketFactoryArg() {
return PGProperty.SOCKET_FACTORY_ARG.get(properties);
}
/**
* @param socketFactoryArg socket factory argument
* @see PGProperty#SOCKET_FACTORY_ARG
*/
public void setSocketFactoryArg(String socketFactoryArg) {
PGProperty.SOCKET_FACTORY_ARG.set(properties, socketFactoryArg);
}
/**
* @param replication set to 'database' for logical replication or 'true' for physical replication
* @see PGProperty#REPLICATION
*/
public void setReplication(String replication) {
PGProperty.REPLICATION.set(properties, replication);
}
/**
* @return null, 'database', or 'true
* @see PGProperty#REPLICATION
*/
public String getReplication() {
return PGProperty.REPLICATION.get(properties);
}
/**
* @return Logger Level of the JDBC Driver
* @see PGProperty#LOGGER_LEVEL
*/
public String getLoggerLevel() {
return PGProperty.LOGGER_LEVEL.get(properties);
}
/**
* @param loggerLevel of the JDBC Driver
* @see PGProperty#LOGGER_LEVEL
*/
public void setLoggerLevel(String loggerLevel) {
PGProperty.LOGGER_LEVEL.set(properties, loggerLevel);
}
/**
* @return File output of the Logger.
* @see PGProperty#LOGGER_FILE
*/
public String getLoggerFile() {
ExpressionProperties exprProps = new ExpressionProperties(properties, System.getProperties());
return PGProperty.LOGGER_FILE.get(exprProps);
}
/**
* @param loggerFile File output of the Logger.
* @see PGProperty#LOGGER_LEVEL
*/
public void setLoggerFile(String loggerFile) {
PGProperty.LOGGER_FILE.set(properties, loggerFile);
}
/**
* Generates a {@link DriverManager} URL from the other properties supplied.
*
* @return {@link DriverManager} URL from the other properties supplied
*/
public String getUrl() {
StringBuilder url = new StringBuilder(100);
url.append("jdbc:postgresql://");
url.append(serverName);
if (portNumber != 0) {
url.append(":").append(portNumber);
}
url.append("/").append(URLCoder.encode(databaseName));
StringBuilder query = new StringBuilder(100);
for (PGProperty property: PGProperty.values()) {
if (property.isPresent(properties)) {
if (query.length() != 0) {
query.append("&");
}
query.append(property.getName());
query.append("=");
query.append(URLCoder.encode(property.get(properties)));
}
}
if (query.length() > 0) {
url.append("?");
url.append(query);
}
return url.toString();
}
/**
* Generates a {@link DriverManager} URL from the other properties supplied.
*
* @return {@link DriverManager} URL from the other properties supplied
*/
public String getURL() {
return getUrl();
}
/**
* Sets properties from a {@link DriverManager} URL.
*
* @param url properties to set
*/
public void setUrl(String url) {
Properties p = org.postgresql.Driver.parseURL(url, null);
if (p == null ) {
throw new IllegalArgumentException("URL invalid " + url);
}
for (PGProperty property : PGProperty.values()) {
if (!this.properties.containsKey(property.getName())) {
setProperty(property, property.get(p));
}
}
}
/**
* Sets properties from a {@link DriverManager} URL.
* Added to follow convention used in other DBMS.
*
* @param url properties to set
*/
public void setURL(String url) {
setUrl(url);
}
public String getProperty(String name) throws SQLException {
PGProperty pgProperty = PGProperty.forName(name);
if (pgProperty != null) {
return getProperty(pgProperty);
} else {
throw new PSQLException(GT.tr("Unsupported property name: {0}", name),
PSQLState.INVALID_PARAMETER_VALUE);
}
}
public void setProperty(String name, String value) throws SQLException {
PGProperty pgProperty = PGProperty.forName(name);
if (pgProperty != null) {
setProperty(pgProperty, value);
} else {
throw new PSQLException(GT.tr("Unsupported property name: {0}", name),
PSQLState.INVALID_PARAMETER_VALUE);
}
}
public String getProperty(PGProperty property) {
return property.get(properties);
}
public void setProperty(PGProperty property, String value) {
if (value == null) {
return;
}
switch (property) {
case PG_HOST:
serverName = value;
break;
case PG_PORT:
try {
portNumber = Integer.parseInt(value);
} catch (NumberFormatException e) {
portNumber = 0;
}
break;
case PG_DBNAME:
databaseName = value;
break;
case USER:
user = value;
break;
case PASSWORD:
password = value;
break;
default:
properties.setProperty(property.getName(), value);
}
}
/**
* Generates a reference using the appropriate object factory.
*
* @return reference using the appropriate object factory
*/
protected Reference createReference() {
return new Reference(getClass().getName(), PGObjectFactory.class.getName(), null);
}
public Reference getReference() throws NamingException {
Reference ref = createReference();
ref.add(new StringRefAddr("serverName", serverName));
if (portNumber != 0) {
ref.add(new StringRefAddr("portNumber", Integer.toString(portNumber)));
}
ref.add(new StringRefAddr("databaseName", databaseName));
if (user != null) {
ref.add(new StringRefAddr("user", user));
}
if (password != null) {
ref.add(new StringRefAddr("password", password));
}
for (PGProperty property : PGProperty.values()) {
if (property.isPresent(properties)) {
ref.add(new StringRefAddr(property.getName(), property.get(properties)));
}
}
return ref;
}
public void setFromReference(Reference ref) {
databaseName = getReferenceProperty(ref, "databaseName");
String port = getReferenceProperty(ref, "portNumber");
if (port != null) {
portNumber = Integer.parseInt(port);
}
serverName = getReferenceProperty(ref, "serverName");
for (PGProperty property : PGProperty.values()) {
setProperty(property, getReferenceProperty(ref, property.getName()));
}
}
private static String getReferenceProperty(Reference ref, String propertyName) {
RefAddr addr = ref.get(propertyName);
if (addr == null) {
return null;
}
return (String) addr.getContent();
}
protected void writeBaseObject(ObjectOutputStream out) throws IOException {
out.writeObject(serverName);
out.writeObject(databaseName);
out.writeObject(user);
out.writeObject(password);
out.writeInt(portNumber);
out.writeObject(properties);
}
protected void readBaseObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
serverName = (String) in.readObject();
databaseName = (String) in.readObject();
user = (String) in.readObject();
password = (String) in.readObject();
portNumber = in.readInt();
properties = (Properties) in.readObject();
}
public void initializeFrom(BaseDataSource source) throws IOException, ClassNotFoundException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
source.writeBaseObject(oos);
oos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bais);
readBaseObject(ois);
}
/**
* @return preferred query execution mode
* @see PGProperty#PREFER_QUERY_MODE
*/
public PreferQueryMode getPreferQueryMode() {
return PreferQueryMode.of(PGProperty.PREFER_QUERY_MODE.get(properties));
}
/**
* @param preferQueryMode extended, simple, extendedForPrepared, or extendedCacheEverything
* @see PGProperty#PREFER_QUERY_MODE
*/
public void setPreferQueryMode(PreferQueryMode preferQueryMode) {
PGProperty.PREFER_QUERY_MODE.set(properties, preferQueryMode.value());
}
/**
* @return connection configuration regarding automatic per-query savepoints
* @see PGProperty#AUTOSAVE
*/
public AutoSave getAutosave() {
return AutoSave.of(PGProperty.AUTOSAVE.get(properties));
}
/**
* @param autoSave connection configuration regarding automatic per-query savepoints
* @see PGProperty#AUTOSAVE
*/
public void setAutosave(AutoSave autoSave) {
PGProperty.AUTOSAVE.set(properties, autoSave.value());
}
/**
* see PGProperty#CLEANUP_SAVEPOINTS
* @return boolean indicating property set
*/
public boolean getCleanupSavepoints() {
return PGProperty.CLEANUP_SAVEPOINTS.getBoolean(properties);
}
/**
* see PGProperty#CLEANUP_SAVEPOINTS
* @param cleanupSavepoints will cleanup savepoints after a successful transaction
*/
public void setCleanupSavepoints(boolean cleanupSavepoints) {
PGProperty.CLEANUP_SAVEPOINTS.set(properties, cleanupSavepoints);
}
/**
* @return boolean indicating property is enabled or not.
* @see PGProperty#REWRITE_BATCHED_INSERTS
*/
public boolean getReWriteBatchedInserts() {
return PGProperty.REWRITE_BATCHED_INSERTS.getBoolean(properties);
}
/**
* @param reWrite boolean value to set the property in the properties collection
* @see PGProperty#REWRITE_BATCHED_INSERTS
*/
public void setReWriteBatchedInserts(boolean reWrite) {
PGProperty.REWRITE_BATCHED_INSERTS.set(properties, reWrite);
}
//#if mvn.project.property.postgresql.jdbc.spec >= "JDBC4.1"
public java.util.logging.Logger getParentLogger() {
return Logger.getLogger("org.postgresql");
}
//#endif
}
|
Add method alias's for each property which didn't use the conventiona… (#1436)
* Add method alias's for each property which didn't use the conventional Java bean method naming for the specific property name.
This allows other libraries like connection pools to properly set any properties by the property names through their own interface when you may not have direct access to the jdbc datasource.
* fix checkstyle errors
|
pgjdbc/src/main/java/org/postgresql/ds/common/BaseDataSource.java
|
Add method alias's for each property which didn't use the conventiona… (#1436)
|
|
Java
|
bsd-3-clause
|
b7a65c6eccd5ca2876ebd563f4d939c266f99a0a
| 0
|
MarinnaCole/LightZone,MarinnaCole/LightZone,MarinnaCole/LightZone,MarinnaCole/LightZone,ktgw0316/LightZone,ktgw0316/LightZone,ktgw0316/LightZone,MarinnaCole/LightZone,MarinnaCole/LightZone,ktgw0316/LightZone,ktgw0316/LightZone,MarinnaCole/LightZone,ktgw0316/LightZone,ktgw0316/LightZone
|
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.jai.utils;
/*
* $RCSfile: SunTileScheduler.java,v $
*
* Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved.
*
* Use is subject to license terms.
*
* $Revision: 1.1 $
* $Date: 2005/02/11 04:57:02 $
* $State: Exp $
*/
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.image.Raster;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.lightcrafts.mediax.jai.OpImage;
import com.lightcrafts.mediax.jai.PlanarImage;
import com.lightcrafts.mediax.jai.TileCache;
import com.lightcrafts.mediax.jai.TileComputationListener;
import com.lightcrafts.mediax.jai.TileRequest;
import com.lightcrafts.mediax.jai.TileScheduler;
import com.lightcrafts.mediax.jai.util.ImagingException;
import com.lightcrafts.mediax.jai.util.ImagingListener;
import com.lightcrafts.media.jai.util.ImageUtil;
/**
* A class representing a request for non-prefetch background computation
* of tiles. The object stores the image, the indices of all tiles being
* requested, and references to all listeners associated with the request.
*
* <code>TileRequest</code> methods are not commented.
*/
class Request implements TileRequest {
private final TileScheduler scheduler;
final PlanarImage image;
final List<Point> indices;
final Set<TileComputationListener> listeners;
final Hashtable<Point, Integer> tileStatus;
/**
* Constructs a <code>Request</code>.
*
* @param scheduler The scheduler processing this request.
* @param image The image for which tiles are being computed.
* @param tileIndices The indices of the tiles to be computed.
* @param tileListeners The listeners to be notified of tile
* computation, cancellation, or failure.
*
* @exception IllegalArgumentException if <code>scheduler</code>,
* <code>image</code>, or <code>tileIndices</code> is
* <code>null</code> or if <code>tileIndices</code> is
* zero-length.
*/
Request(TileScheduler scheduler,
PlanarImage image,
Point[] tileIndices,
TileComputationListener[] tileListeners) {
// Save a reference to the scheduler.
if(scheduler == null) {
throw new IllegalArgumentException(); // Internal error - no message.
}
this.scheduler = scheduler;
// Save a reference to the image.
if(image == null) {
throw new IllegalArgumentException(); // Internal error - no message.
}
this.image = image;
// Ensure there is at least one tile in the request.
if(tileIndices == null || tileIndices.length == 0) {
// If this happens it is an internal programming error.
throw new IllegalArgumentException(); // Internal error - no message.
}
// Save the tile indices.
indices = Arrays.asList(tileIndices);
// Save references to the listeners, if any.
if(tileListeners != null) {
int numListeners = tileListeners.length;
if(numListeners > 0) {
listeners = new HashSet<TileComputationListener>(numListeners);
Collections.addAll(listeners, tileListeners);
} else {
listeners = null;
}
} else {
listeners = null;
}
// Initialize status table.
tileStatus = new Hashtable<Point, Integer>(tileIndices.length);
}
// --- TileRequest implementation ---
public PlanarImage getImage() {
return image;
}
public Point[] getTileIndices() {
return indices.toArray(new Point[indices.size()]);
}
public TileComputationListener[] getTileListeners() {
return listeners.toArray(new TileComputationListener[listeners.size()]);
}
public boolean isStatusAvailable() {
return true;
}
public int getTileStatus(int tileX, int tileY) {
Point p = new Point(tileX, tileY);
int status;
if(tileStatus.containsKey(p)) {
status = tileStatus.get(p);
} else {
status = TileRequest.TILE_STATUS_PENDING;
}
return status;
}
public void cancelTiles(Point[] tileIndices) {
// Forward the call to the scheduler.
scheduler.cancelTiles(this, tileIndices);
}
}
/** A job to put in a job queue. */
interface Job {
/** Computes the job required. */
void compute();
/** Returns <code>true</code> if the job is not done. */
boolean notDone();
/** Returns the image for which tiles are being computed. */
PlanarImage getOwner();
/**
* Returns <code>true</code> if and only if the job should block the
* thread which processes it. In this case the scheduler and the
* processing thread must communicate using <code>wait()</code> and
* <code>notify()</code>.
*/
boolean isBlocking();
/** Returns the first exception encountered or <code>null</code>. */
Exception getException();
}
/**
* A <code>Job</code> which computes a single tile at a time for a
* non-prefetch background job queued by the version of scheduleTiles()
* which returns a <code>TileRequest</code>. This <code>Job</code>
* notifies all <code>TileComputationListener</code>s of all
* <code>TileRequest</code>s with which this tile is associated of
* whether the tile was computed or the computation failed.
*/
final class RequestJob implements Job {
final LCTileScheduler scheduler; // the TileScheduler
final PlanarImage owner; // the image this tile belongs to
final int tileX; // tile's X index
final int tileY; // tile's Y index
final Raster[] tiles; // the computed tiles
final int offset; // offset into arrays
boolean done = false; // flag indicating completion status
Exception exception = null; // Any exception that might have occured
// during computeTile
/** Constructor. */
RequestJob(LCTileScheduler scheduler,
PlanarImage owner, int tileX, int tileY,
Raster[] tiles, int offset) {
this.scheduler = scheduler;
this.owner = owner;
this.tileX = tileX;
this.tileY = tileY;
this.tiles = tiles;
this.offset = offset;
}
/**
* Tile computation. Does the actual call to getTile().
*/
public void compute() {
// Get the Request List.
List<Request> reqList;
synchronized(scheduler.tileRequests) {
// Initialize the tile ID.
Object tileID = LCTileScheduler.tileKey(owner, tileX, tileY);
// Remove the List of Requests from the request Map.
reqList = scheduler.tileRequests.remove(tileID);
// Remove the tile Job from the job Map.
scheduler.tileJobs.remove(tileID);
}
// Check whether reqList is valid in case job was cancelled while
// blocking on the tileRequests Map above.
// XXX Do not need empty check in next line?
if(reqList != null && !reqList.isEmpty()) {
// Update tile status to "processing".
Point p = new Point(tileX, tileY);
Integer tileStatus = TileRequest.TILE_STATUS_PROCESSING;
for (Request r : reqList) {
r.tileStatus.put(p, tileStatus);
}
try {
tiles[offset] = owner.getTile(tileX, tileY);
} catch (Exception e) {
exception = e;
} catch (Error e) {
exception = new Exception(e);
} finally {
// Extract the Set of all TileComputationListeners.
Set<TileComputationListener> listeners = LCTileScheduler.getListeners(reqList);
// XXX Do not need empty check in next line.
if(listeners != null && !listeners.isEmpty()) {
// Get TileRequests as an array for later use.
TileRequest[] requests = reqList.toArray(new TileRequest[reqList.size()]);
// Update tile status as needed.
tileStatus = exception == null ?
TileRequest.TILE_STATUS_COMPUTED :
TileRequest.TILE_STATUS_FAILED;
for (TileRequest r : requests) {
((Request)r).tileStatus.put(p, tileStatus);
}
// Notify listeners.
if(exception == null) {
// Tile computation successful.
for (TileComputationListener listener : listeners) {
listener.tileComputed(scheduler, requests,
owner, tileX, tileY,
tiles[offset]);
}
} else {
// Tile computation unsuccessful.
for (TileComputationListener listener : listeners) {
listener.tileComputationFailure(scheduler, requests,
owner, tileX, tileY,
exception);
}
}
}
}
}
// Set the flag indicating job completion.
done = true;
}
/**
* Returns <code>true</code> if the job is not done; that is,
* the tile is not computed and no exceptions have occurred.
*/
public boolean notDone() {
return !done;
}
/** Returns the image for which the tile is being computed. */
public PlanarImage getOwner() {
return owner;
}
/** Always returns <code>true</code>. */
public boolean isBlocking() {
// Big Change: this should prevent enqueueing of new tiles while an image is being processed
return true;
}
/** Returns any encountered exception or <code>null</code>. */
public Exception getException() {
return exception;
}
/** Returns a string representation of the class object. */
public String toString() {
String tString = "null";
if (tiles[offset] != null) {
tString = tiles[offset].toString();
}
return getClass().getName() + "@" + Integer.toHexString(hashCode()) +
": owner = " + owner.toString() +
" tileX = " + Integer.toString(tileX) +
" tileY = " + Integer.toString(tileY) +
" tile = " + tString;
}
}
/**
* A <code>Job</code> which computes one or more tiles at a time for either
* a prefetch job or a blocking job.
*/
final class TileJob implements Job {
final LCTileScheduler scheduler; // the TileScheduler
final boolean isBlocking; // whether the job is blocking
final PlanarImage owner; // the image this tile belongs to
final Point[] tileIndices; // the tile indices
final Raster[] tiles; // the computed tiles
final int offset; // offset into arrays
final int numTiles; // number of elements to use in indices array
boolean done = false; // flag indicating completion status
Exception exception = null; // The first exception that might have
// occured during computeTile
/** Constructor. */
TileJob(LCTileScheduler scheduler, boolean isBlocking,
PlanarImage owner, Point[] tileIndices,
Raster[] tiles, int offset, int numTiles) {
this.scheduler = scheduler;
this.isBlocking = isBlocking;
this.owner = owner;
this.tileIndices = tileIndices;
this.tiles = tiles;
this.offset = offset;
this.numTiles = numTiles;
}
/**
* Tile computation. Does the actual calls to getTile().
*/
public void compute() {
exception = scheduler.compute(owner, tileIndices, tiles,
offset, numTiles, null);
done = true;
}
/**
* Returns <code>true</code> if the job is not done; that is,
* the tile is not computed and no exceptions have occurred.
*/
public boolean notDone() {
return !done;
}
/** Returns the image for which tiles are being computed. */
public PlanarImage getOwner() {
return owner;
}
/** Returns <code>true</code> if and only if there is a listener. */
public boolean isBlocking() {
return isBlocking;
}
/** Returns any encountered exception or <code>null</code>. */
public Exception getException() {
return exception;
}
}
/**
* Worker thread that takes jobs from the tile computation queue and does
* the actual computation.
*/
class WorkerThread extends Thread {
/** <code>Object</code> indicating the the thread should exit. */
public static final Object TERMINATE = new Object();
/** The scheduler that spawned this thread. */
final LCTileScheduler scheduler;
/** Whether this is a prefetch thread. */
boolean isPrefetch;
/** Constructor. */
public WorkerThread(ThreadGroup group,
LCTileScheduler scheduler,
boolean isPrefetch) {
super(group, group.getName() + group.activeCount());
this.scheduler = scheduler;
this.isPrefetch = isPrefetch;
setDaemon(true);
start();
}
/** Does the tile computation. */
public void run() {
LinkedList<Object> jobQueue = scheduler.getQueue(isPrefetch);
while(true) {
Object dequeuedObject = null;
// Check the job queue.
if(jobQueue.size() > 0) {
// Remove the first job.
dequeuedObject = jobQueue.removeFirst();
} else {
try {
// Wait for a notify() on the queue.
jobQueue.wait();
continue;
} catch(InterruptedException ie) {
// Ignore: should never happen.
}
}
if(dequeuedObject == TERMINATE ||
getThreadGroup() == null || getThreadGroup().isDestroyed()) {
// Remove WorkerThread from appropriate ArrayList.
LinkedList<Thread> threads;
synchronized(threads = scheduler.getWorkers(isPrefetch)) {
threads.remove(this);
}
// Exit the thread.
return;
}
Job job = (Job)dequeuedObject;
// Execute tile job.
if (job != null) {
job.compute();
// Notify the scheduler only if the Job is blocking.
if(job.isBlocking()) {
synchronized(scheduler) {
scheduler.notify();
}
}
}
} // infinite loop
}
}
/**
* This is Sun Microsystems' reference implementation of the
* <code>com.lightcrafts.mediax.jai.TileScheduler</code> interface. It provides
* a mechanism for scheduling tile calculation. Multi-threading is
* used whenever possible.
*
* @see com.lightcrafts.mediax.jai.TileScheduler
*/
public final class LCTileScheduler implements TileScheduler {
/** The default number of worker threads. */
private static final int NUM_THREADS_DEFAULT = 2;
/** The default number of prefetch threads. */
private static final int NUM_PREFETCH_THREADS_DEFAULT = 1;
/** The instance counter. It is used to compose the name of the
* ThreadGroup.
*/
private static int numInstances = 0;
/** The root ThreadGroup, which holds two sub-groups:
* the ThreadGroup for the standard jobs, and the ThreadGroup for
* the prefetch jobs.
*/
private ThreadGroup rootGroup;
/** The ThreadGroup contains all the standard jobs. */
private ThreadGroup standardGroup;
/** The ThreadGroup contains all the prefetch jobs. */
private ThreadGroup prefetchGroup;
/** The worker thread parallelism. */
private int parallelism = NUM_THREADS_DEFAULT;
/** The processing thread parallelism. */
private int prefetchParallelism = NUM_PREFETCH_THREADS_DEFAULT;
/** The worker thread priority. */
private int priority = Thread.NORM_PRIORITY;
/** The prefetch thread priority. */
private int prefetchPriority = Thread.MIN_PRIORITY;
/** A job queue for tiles waiting to be computed by the worker threads. */
private final LinkedList<Object> queue;
/** A job queue for tiles waiting to be computed by prefetch workers. */
private final LinkedList<Object> prefetchQueue;
/**
* A <code>LinkedList</code> of <code>WorkerThread</code>s that persist
* to do the actual tile computation for normal processing. This
* variable should never be set to <code>null</code>.
*/
private LinkedList<Thread> workers = new LinkedList<Thread>();
/**
* A <code>LinkedList</code> of <code>WorkerThread</code>s that persist
* to do the actual tile computation for prefetch processing. This
* variable should never be set to <code>null</code>.
*/
private LinkedList<Thread> prefetchWorkers = new LinkedList<Thread>();
/**
* The effective number of worker threads; may differ from
* <code>workers.size()</code> due to latency. This value should
* equal the size of <code>workers</code> less the number of
* <code>WorkerThread.TERMINATE</code>s in <code>queue</code>.
*/
private int numWorkerThreads = 0;
/**
* The effective number of prefetch worker threads; may differ from
* <code>prefetchWorkers.size()</code> due to latency. This value should
* equal the size of <code>prefetchWorkers</code> less the number of
* <code>WorkerThread.TERMINATE</code>s in <code>prefetchQueue</code>.
*/
private int numPrefetchThreads = 0;
/**
* <code>Map</code> of tiles currently being computed. The key is
* created from the image and tile indices by the <code>tileKey()</code>
* method. Each key is mapped to an <code>Object[1]</code> which may
* contain <code>null</code>, a <code>Raster</code>, or an indefinite
* <code>Object</code> which represent, respectively, that the tile is
* being computed, the tile itself, and that the tile computation failed.
*/
private final Map<Object, Object[]> tilesInProgress = new HashMap<Object, Object[]>();
/**
* <code>Map</code> of tiles to <code>Request</code>s. The key is
* created from the image and tile indices by the <code>tileKey()</code>
* method. Each key is mapped to a <code>List</code> of
* <code>Request</code> for the tile. If there is no mapping for the
* tile, then there are no current requests. If a mapping exists, it
* should always be non-null and the <code>List</code> value should
* have size of at least unity.
*/
final Map<Object, List<Request>> tileRequests = new HashMap<Object, List<Request>>();
/**
* <code>Map</code> of tiles to <code>Job</code>s.The key is
* created from the image and tile indices by the <code>tileKey()</code>
* method. Each key is mapped to a <code>Job</code> for the tile. If
* there is no mapping for the tile, then there is no enqueued
* <code>RequestJob</code>.
*/
Map<Object, Job> tileJobs = new HashMap<Object, Job>();
/** The name of this instance. */
private String nameOfThisInstance;
/**
* Returns the hash table "key" as a <code>Object</code> for this
* tile. For <code>PlanarImage</code> and
* <code>SerializableRenderedImage</code>, the key is generated by
* the method <code>ImageUtilgenerateID(Object) </code>. For the
* other cases, a <code>Long</code> object is returned.
* The upper 32 bits for this <code>Long</code> is the tile owner's
* hash code, and the lower 32 bits is the tile's index.
*/
static Object tileKey(PlanarImage owner, int tileX, int tileY) {
long idx = tileY * (long)owner.getNumXTiles() + tileX;
BigInteger imageID = (BigInteger)owner.getImageID();
byte[] buf = imageID.toByteArray();
int length = buf.length;
byte[] buf1 = new byte[length + 8];
System.arraycopy(buf, 0, buf1, 0, length);
for (int i = 7, j = 0; i >= 0; i--, j += 8)
buf1[length++] = (byte)(idx >> j);
return new BigInteger(buf1);
}
/**
* Returns all <code>TileComputationListener</code>s for the supplied
* <code>List</code> of <code>Request</code>s.
*/
static Set<TileComputationListener> getListeners(List<Request> reqList) {
// Extract the Set of all TileComputationListeners.
HashSet<TileComputationListener> listeners = null;
for (Request req : reqList) {
// XXX Do not need empty check in next line.
if (req.listeners != null && !req.listeners.isEmpty()) {
if (listeners == null) {
listeners = new HashSet<TileComputationListener>();
}
listeners.addAll(req.listeners);
}
}
return listeners;
}
/**
* Constructor.
*
* @param parallelism The number of worker threads to do tile computation.
* If this number is less than 1, no multi-threading is used.
* @param priority The priority of worker threads.
* @param prefetchParallelism The number of threads to do prefetching.
* If this number is less than 1, no multi-threading is used.
* @param prefetchPriority The priority of prefetch threads.
*/
public LCTileScheduler(int parallelism, int priority,
int prefetchParallelism, int prefetchPriority) {
// Create queues and set parallelism and priority to default values.
this();
setParallelism(parallelism);
setPriority(priority);
setPrefetchParallelism(prefetchParallelism);
setPrefetchPriority(prefetchPriority);
}
/**
* Constructor. Processing and prefetch queues are created and all
* parallelism and priority values are set to default values.
*/
public LCTileScheduler() {
queue = new LinkedList<Object>();
prefetchQueue = new LinkedList<Object>();
// The tile scheduler name. It is used to compose the name of the
// ThreadGroup.
String name = "LCTileSchedulerName";
nameOfThisInstance = name + numInstances;
rootGroup = new ThreadGroup(nameOfThisInstance);
rootGroup.setDaemon(true);
standardGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Standard");
standardGroup.setDaemon(true);
prefetchGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Prefetch");
prefetchGroup.setDaemon(true);
numInstances++;
}
/**
* Tile computation. Does the actual calls to getTile().
*/
Exception compute(PlanarImage owner, Point[] tileIndices,
Raster[] tiles, int offset, int numTiles,
Request request) {
Exception exception = null;
int j = offset;
if(request == null || request.listeners == null) {
for(int i = 0; i < numTiles; i++, j++) {
final Point p = tileIndices[j];
try {
tiles[j] = owner.getTile(p.x, p.y);
} catch (Exception e) {
exception = e;
// Abort the remaining tiles in the job.
break;
}
}
} else { // listeners present
final Request[] reqs = new Request[] {request};
for(int i = 0; i < numTiles; i++, j++) {
final Point p = tileIndices[j];
// Update tile status to "processing".
Integer tileStatus = TileRequest.TILE_STATUS_PROCESSING;
request.tileStatus.put(p, tileStatus);
try {
tiles[j] = owner.getTile(p.x, p.y);
for (TileComputationListener listener : request.listeners) {
// Update tile status to "computed".
tileStatus = TileRequest.TILE_STATUS_COMPUTED;
request.tileStatus.put(p, tileStatus);
listener.tileComputed(this,
reqs,
owner,
p.x, p.y,
tiles[j]);
}
} catch (Exception e) {
exception = e;
// Abort the remaining tiles in the job.
break;
}
}
}
// If an exception occurred, notify listeners that all remaining
// tiles in the job have failed.
if(exception != null && request != null && request.listeners != null) {
final int lastOffset = j;
final int numFailed = numTiles - (lastOffset - offset);
// Mark all tiles starting with the one which generated the
// Exception as "failed".
for(int i = 0, k = lastOffset; i < numFailed; i++) {
Integer tileStatus = TileRequest.TILE_STATUS_FAILED;
request.tileStatus.put(tileIndices[k++], tileStatus);
}
// Notify listeners.
Request[] reqs = new Request[] {request};
for(int i = 0, k = lastOffset; i < numFailed; i++) {
Point p = tileIndices[k++];
for (TileComputationListener listener : request.listeners) {
listener.tileComputationFailure(this, reqs,
owner, p.x, p.y,
exception);
}
}
}
return exception;
}
/**
* Schedules a single tile for computation.
*
* @param owner The image the tiles belong to.
* @param tileX The tile's X index.
* @param tileY The tile's Y index.
*
* @exception IllegalArgumentException if <code>owner</code> is
* <code>null</code>.
*
* @return The computed tile
*/
//
// This method blocks on the 'tilesInProgress' Map to avoid simultaneous
// computation of the same tile in two or more different threads. The idea
// is to release the resources of all but one thread so that the computation
// occurs more quickly. The synchronization variable is an Object[] of length
// unity. The computed tile is passed from the computing thread to the
// waiting threads via the contents of this Object[]. Thus this method does
// not depend on the TileCache to transfer the data.
//
public Raster scheduleTile(OpImage owner,
int tileX,
int tileY) {
if (owner == null) {
throw new IllegalArgumentException("Null owner");
}
// Eventual tile to be returned.
Raster tile = null;
// Get the tile's unique ID.
final Object tileID = tileKey(owner, tileX, tileY);
// Set the computation flag and initialize or retrieve the tile cache.
boolean computeTile;
final Object[] cache;
synchronized(tilesInProgress) {
if(computeTile = !tilesInProgress.containsKey(tileID)) {
// Computing: add tile ID to the map.
tilesInProgress.put(tileID, cache = new Object[1]);
} else {
// Waiting: get tile cache from the Map.
cache = tilesInProgress.get(tileID);
}
}
if(computeTile) {
try {
try {
// Attempt to compute the tile.
tile = owner.computeTile(tileX, tileY);
} catch (OutOfMemoryError e) {
// Free some space in cache
TileCache tileCache = owner.getTileCache();
if(tileCache != null) {
tileCache.removeTiles(owner);
}
try {
// Re-attempt to compute the tile.
tile = owner.computeTile(tileX, tileY);
} catch (OutOfMemoryError e1) {
// Empty the cache
if(tileCache != null) {
tileCache.flush();
}
}
// Re-attempt to compute the tile.
tile = owner.computeTile(tileX, tileY);
}
} catch(Throwable e) {
// Re-throw the Error or Exception.
if(e instanceof Error) {
throw (Error)e;
} else {
sendExceptionToListener("RuntimeException", e);
}
} finally {
// Always set the cached tile to a non-null value.
cache[0] = tile != null ? tile : new Object();
// Notify the thread(s).
cache.notifyAll();
// Remove the tile ID from the Map.
tilesInProgress.remove(tileID);
}
} else {
// Check the cache: a null value indicates computation is
// still in progress.
if(cache[0] == null) {
// Wait for the computation to complete.
try {
cache.wait(); // XXX Should there be a timeout?
} catch(Exception e) {
// XXX What response here?
}
}
// Set the result only if cache contains a Raster.
if(cache[0] instanceof Raster) {
tile = (Raster)cache[0];
} else {
throw new RuntimeException("Not a Raster instance?");
}
}
return tile;
}
/**
* General purpose method for job creation and queueing. Note that
* the returned value should be ignored if the <code>listener</code>
* parameter is non-<code>null</code>.
*
* @param owner The image for which tile computation jobs will be queued.
* @param tileIndices The indices of the tiles to be computed.
* @param isPrefetch Whether the operation is a prefetch.
* @param listeners A <code>TileComputationListener</code> of the
* processing. May be <code>null</code>.
*
* @return The computed tiles. This value is meaningless if
* <code>listener</code> is non-<code>null</code>.
*/
// The allowable arguments are constained as follows:
// A) owner and tileIndices non-null.
// B) (isBlocking,isPrefetch) in {(true,false),(false,false),(false,true)}
// C) listeners != null <=> (isBlocking,isPrefetch) == (false,false)
// The returned value is one of:
// Raster[] <=> (isBlocking,isPrefetch) == (true,false)
// Integer <=> (isBlocking,isPrefetch) == (false,false)
// (Raster[])null <=> (isBlocking,isPrefetch) == (false,true)
private Object scheduleJob(PlanarImage owner,
Point[] tileIndices,
boolean isBlocking,
boolean isPrefetch,
TileComputationListener[] listeners) {
if(owner == null || tileIndices == null) {
// null parameters
throw new IllegalArgumentException(); // coding error - no message
} else if((isBlocking || isPrefetch) && listeners != null) {
// listeners for blocking or prefetch job
throw new IllegalArgumentException(); // coding error - no message
} else if(isBlocking && isPrefetch) {
throw new IllegalArgumentException(); // coding error - no message
}
int numTiles = tileIndices.length;
Raster[] tiles = new Raster[numTiles];
Object returnValue = tiles;
final int numThreads;
Job[] jobs = null;
int numJobs = 0;
synchronized(getWorkers(isPrefetch)) {
numThreads = getNumThreads(isPrefetch);
if(numThreads > 0) { // worker threads exist
if(numTiles <= numThreads || // no more tiles than threads
(!isBlocking && !isPrefetch)) { // non-blocking, non-prefetch
jobs = new Job[numTiles];
if(!isBlocking && !isPrefetch) {
Request request =
new Request(this, owner, tileIndices, listeners);
// Override return value.
returnValue = request;
// Queue all tiles as single-tile jobs.
while(numJobs < numTiles) {
Point p = tileIndices[numJobs];
Object tileID = tileKey(owner, p.x, p.y);
synchronized(tileRequests) {
List<Request> reqList = tileRequests.get(tileID);
if (reqList != null) {
// This tile is already queued in a
// non-blocking, non-prefetch job.
reqList.add(request);
numTiles--;
} else {
// This tile has not yet been queued.
reqList = new ArrayList<Request>();
reqList.add(request);
tileRequests.put(tileID, reqList);
jobs[numJobs] = new RequestJob(this, owner,
p.x, p.y,
tiles, numJobs);
tileJobs.put(tileID, jobs[numJobs]);
addJob(jobs[numJobs++], false);
}
}
}
} else { // numTiles <= numThreads
while(numJobs < numTiles) {
jobs[numJobs] = new TileJob(this,
isBlocking,
owner,
tileIndices,
tiles,
numJobs,
1);
addJob(jobs[numJobs++], isPrefetch);
}
}
} else { // more tiles than worker threads
// Set the fraction of unqueued tiles to be processed by
// each worker thread.
float frac = 1.0F/(2.0F*numThreads);
// Set the minimum number of tiles each thread may process.
// If there is only one thread this will equal the total
// number of tiles.
int minTilesPerThread = numThreads == 1 ? numTiles :
Math.min(Math.max(1, (int)(frac*numTiles/2.0F + 0.5F)),
numTiles);
// Allocate the maximum possible number of multi-tile jobs.
// This will be larger than the actual number of jobs but
// a more precise calcuation is not possible and a dynamic
// storage object such as a Collection would not be useful
// since as calculated maxNumJobs = 4*numThreads if the
// preceeding values of "frac" and "minTilesPerThread" are
// 1/(2*numThreads) and frac*numTiles/2, respectively.
int maxNumJobs = numThreads == 1 ? 1 :
(int)((float)numTiles/(float)minTilesPerThread+0.5F);
jobs = new TileJob[maxNumJobs];
// Set the number of enqueued tiles and the number left.
int numTilesQueued = 0;
int numTilesLeft = numTiles - numTilesQueued;
// Assign a number of tiles to each thread determined by
// the number of remaining tiles, the fraction of remaining
// tiles to be processed and the minimum chunk size.
while(numTilesLeft > 0) {
// Set the number of tiles to the pre-calculated
// fraction of tiles yet to be computed.
int numTilesInThread = (int)(frac*numTilesLeft + 0.5F);
// Ensure that the number to be processed is at
// least the minimum chunk size.
if(numTilesInThread < minTilesPerThread) {
numTilesInThread = minTilesPerThread;
}
// Clamp number of tiles in thread to number unqueued.
if(numTilesInThread > numTilesLeft) {
numTilesInThread = numTilesLeft;
}
// Decrement the count of remaining tiles. Note that
// this value will be non-negative due to the clamping
// above.
numTilesLeft -= numTilesInThread;
// If the number left is smaller than the minimum chunk
// size then process these tiles in the current job.
if(numTilesLeft < minTilesPerThread) {
numTilesInThread += numTilesLeft;
numTilesLeft = 0;
}
// Create a job to process the number of tiles needed.
jobs[numJobs] = new TileJob(this,
isBlocking,
owner,
tileIndices,
tiles,
numTilesQueued,
numTilesInThread);
// Queue the job and increment the job count.
addJob(jobs[numJobs++], isPrefetch);
// Increment the count of tiles queued.
numTilesQueued += numTilesInThread;
}
} // SingleTile vs. MultiTile Jobs
} // numThreads > 0
} // end synchronized block
if(numThreads != 0) {
// If blocking, wait until all tiles have been computed.
// There is no 'else' block for non-blocking as in that
// case we just want to continue.
if(isBlocking) {
for (int i = 0; i < numJobs; i++) {
synchronized(this) {
while (jobs[i].notDone()) {
try {
wait();
} catch(InterruptedException ie) {
// Ignore: should never happen.
}
}
}
// XXX: should we re-throw the exception or
// should we reschedule this job ?? krishnag
Exception e = jobs[i].getException();
if (e != null) {
// Throw a RuntimeException with the Exception's
// message concatenated with the stack trace.
String message = "Exception while scheduling tiles: ";
sendExceptionToListener(message,
new ImagingException(message, e));
}
}
}
} else { // numThreads == 0
Request request = null;
if(!isBlocking && !isPrefetch) {
request = new Request(this, owner, tileIndices, listeners);
returnValue = request;
}
// no workers; sequentially compute tiles in main thread
Exception e = compute(owner, tileIndices, tiles, 0, numTiles,
request);
// Throw a RuntimeException with the Exception's
// message concatenated with the stack trace.
if(e != null) {
String message = "Exception while scheduling tiles: ";
sendExceptionToListener(message,
new ImagingException(message, e));
}
}
return returnValue;
}
/**
* Schedules multiple tiles of an image for computation.
*
* @param owner The image the tiles belong to.
* @param tileIndices An array of tile X and Y indices.
*
* @return An array of computed tiles.
*/
public Raster[] scheduleTiles(OpImage owner,
Point tileIndices[]) {
if (owner == null || tileIndices == null) {
throw new IllegalArgumentException("Null owner or TileIndices");
}
return (Raster[])scheduleJob(owner, tileIndices, true, false, null);
}
/**
* Schedule a list of tiles for computation. The supplied listeners
* will be notified after each tile has been computed. This
* method ideally should be non-blocking. If the <code>TileScheduler</code>
* implementation uses multithreading, it is at the discretion of the
* implementation which thread invokes the
* <code>TileComputationListener</code> methods.
*/
public TileRequest scheduleTiles(PlanarImage target, Point[] tileIndices,
TileComputationListener[] tileListeners) {
if (target == null || tileIndices == null) {
throw new IllegalArgumentException("Null owner or TileIndices");
}
return (TileRequest)scheduleJob(target, tileIndices, false, false,
tileListeners);
}
/**
* Issues an advisory cancellation request to the
* <code>TileScheduler</code> stating that the indicated tiles of the
* specified image should not be processed. The handling of this request
* is at the discretion of the scheduler which may cancel tile processing
* in progress and remove tiles from its internal queue, remove tiles from
* the queue but not terminate current processing, or simply do nothing.
*
* <p> In the Sun Microsystems reference implementation of
* <code>TileScheduler</code> the second tile cancellation option is
* implemented, i.e., tiles are removed from the internal queue but
* computation already in progress is not terminated. If there is at
* least one worker thread this method should be non-blocking. Any tiles
* allowed to complete computation subsequent to this call are complete
* and will be treated as if they had not been cancelled, e.g., with
* respect to caching, notification of registered listeners, etc.
* Furthermore, cancelling a tile request in no way invalidates the tile
* as a candidate for future recomputation.
*/
public void cancelTiles(TileRequest request, Point[] tileIndices) {
if(request == null) {
throw new IllegalArgumentException("Null TileRequest");
}
Request req = (Request)request;
synchronized(tileRequests) {
// Save the list of all tile indices in this request.
List<Point> reqIndexList = req.indices;
// Initialize the set of tile indices to cancel.
Point[] indices;
if(tileIndices != null && tileIndices.length > 0) {
// Create a Set from the supplied indices.
List<Point> tileIndexList = Arrays.asList(tileIndices);
// Retain only indices which were actually in the request.
tileIndexList.retainAll(reqIndexList);
indices = tileIndexList.toArray(new Point[tileIndexList.size()]);
} else {
indices = reqIndexList.toArray(new Point[reqIndexList.size()]);
}
// Cache status value.
Integer tileStatus = TileRequest.TILE_STATUS_CANCELLED;
// Loop over tile indices to be cancelled.
for (Point p : indices) {
// Get the tile's ID.
Object tileID = tileKey(req.image, p.x, p.y);
// Get the list of requests for this tile.
List<Request> reqList = tileRequests.get(tileID);
// If there are none, proceed to next index.
if(reqList == null) {
continue;
}
// Remove this Request from the Request List for this tile.
reqList.remove(req);
// If the request list is now empty, dequeue the job and
// remove the tile from the hashes.
if(reqList.isEmpty()) {
synchronized(queue) {
Object job = tileJobs.remove(tileID);
if(job != null) {
queue.remove(job);
}
}
tileRequests.remove(tileID);
}
// Update tile status to "cancelled".
req.tileStatus.put(p, tileStatus);
// Notify any listeners.
if(req.listeners != null) {
TileRequest[] reqArray = new TileRequest[]{req};
for (TileComputationListener listener : req.listeners) {
listener.tileCancelled(this, reqArray,
req.image, p.x, p.y);
}
}
}
}
}
/**
* Prefetchs a list of tiles of an image.
*
* @param owner The image the tiles belong to.
* @param tileIndices An array of tile X and Y indices.
*/
public void prefetchTiles(PlanarImage owner,
Point[] tileIndices) {
if(owner == null || tileIndices == null) {
throw new IllegalArgumentException("Null owner or TileIndices");
}
scheduleJob(owner, tileIndices, false, true, null);
}
/**
* Suggests to the scheduler the degree of parallelism to use in
* processing invocations of <code>scheduleTiles()</code>. For
* example, this might set the number of threads to spawn. It is
* legal to implement this method as a no-op.
*
* <p> In the Sun Microsystems reference implementation of TileScheduler
* this method sets the number of worker threads actually used for tile
* computation. Ideally this number should equal the number of processors
* actually available on the system. It is the responsibility of the
* application to set this value as the number of processors is not
* available via the virtual machine. A parallelism value of zero
* indicates that all tile computation will be effected in the primary
* thread. A parallelism value of <i>N</i> indicates that there will be
* <i>N</i> worker threads in addition to the primary scheduler thread.
* In JAI the parallelism defaults to a value of 2 unless explicity set
* by the application.
*
* @param parallelism The suggested degree of parallelism.
* @throws IllegalArgumentException if <code>parallelism</code>
* is negative.
*/
public void setParallelism(int parallelism) {
if (parallelism < 0) {
throw new IllegalArgumentException("Negative Parallelism?");
}
this.parallelism = parallelism;
}
/**
* Returns the degree of parallelism of the scheduler.
*/
public int getParallelism() {
return parallelism;
}
/**
* Identical to <code>setParallelism()</code> but applies only to
* <code>prefetchTiles()</code>.
*/
public void setPrefetchParallelism(int parallelism) {
if (parallelism < 0) {
throw new IllegalArgumentException("Negative Parallelism?");
}
prefetchParallelism = parallelism;
}
/**
* Identical to <code>getParallelism()</code> but applies only to
* <code>prefetchTiles()</code>.
*/
public int getPrefetchParallelism() {
return prefetchParallelism;
}
/**
* Suggests to the scheduler the priority to assign to processing
* effected by <code>scheduleTiles()</code>. For example, this might
* set thread priority. Values outside of the accepted priority range
* will be clamped to the nearest extremum. An implementation may clamp
* the prefetch priority to less than the scheduling priority. It is
* legal to implement this method as a no-op.
*
* <p> In the Sun Microsystems reference implementation of TileScheduler
* this method sets the priority of the worker threads used for tile
* computation. Its initial value is <code>Thread.NORM_PRIORITY</code>.
*
* @param priority The suggested priority.
*/
public void setPriority(int priority) {
this.priority = Math.max(Math.min(priority, Thread.MAX_PRIORITY),
Thread.MIN_PRIORITY);
}
/**
* Returns the priority of <code>scheduleTiles()</code> processing.
*/
public int getPriority() {
return priority;
}
/**
* Identical to <code>setPriority()</code> but applies only to
* <code>prefetchTiles()</code>.
*
* <p> In the Sun Microsystems reference implementation of
* <code>TileScheduler</code>, this method sets the priority of any threads
* spawned to prefetch tiles. Its initial value is
* <code>Thread.MIN_PRIORITY</code>.
*/
public void setPrefetchPriority(int priority) {
prefetchPriority = Math.max(Math.min(priority, Thread.MAX_PRIORITY),
Thread.MIN_PRIORITY);
}
/**
* Identical to <code>getPriority()</code> but applies only to
* <code>prefetchTiles()</code>.
*/
public int getPrefetchPriority() {
return prefetchPriority;
}
/** Recreate the <code>ThreadGroup</code>is and <code>WorkThread</code>s.
* This happens in the case of applet: the java plugin will exist after
* the termination of the applet so that JAI and LCTileScheduler will
* also exist. However, the <code>ThreadGroup</code>s are destroyed.
* Thus, the old workers should be terminated and new i
* <code>ThreadGroup</code> and workers should be created.
*/
// private synchronized void createThreadGroup(boolean isPrefetch) {
private void createThreadGroup(boolean isPrefetch) {
if (rootGroup == null || rootGroup.isDestroyed()) {
rootGroup = new ThreadGroup(nameOfThisInstance);
rootGroup.setDaemon(true);
}
if (isPrefetch &&
(prefetchGroup == null || prefetchGroup.isDestroyed())) {
prefetchGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Prefetch");
prefetchGroup.setDaemon(true);
}
if (!isPrefetch &&
(standardGroup == null || standardGroup.isDestroyed())) {
standardGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Standard");
standardGroup.setDaemon(true);
}
LinkedList<Thread> thr = getWorkers(isPrefetch);
for(Thread t : thr) {
if (!t.isAlive())
thr.remove(t);
}
if (isPrefetch)
numPrefetchThreads = thr.size();
else
numWorkerThreads = thr.size();
}
/**
* Returns the effective number of threads of the specified type.
* This method also updates the number and priority of threads of
* the specified type according to the global settings. This method
* may add <code>WorkerThread.TERMINATE</code>s to the appropriate
* queue if there are too many effective threads.
*/
private int getNumThreads(boolean isPrefetch) {
createThreadGroup(isPrefetch);
// Local variables.
LinkedList<Thread> thr = getWorkers(isPrefetch);
int nthr;
final int prll;
final int prty;
// Set local variables depending on the thread type.
if(isPrefetch) {
nthr = numPrefetchThreads;
prll = prefetchParallelism;
prty = prefetchPriority;
} else {
nthr = numWorkerThreads;
prll = parallelism;
prty = priority;
}
// Update priority if it has changed.
if(nthr > 0 &&
(thr.get(0)).getPriority() != prty) {
for (Thread t : thr) {
if (t != null && t.getThreadGroup() != null) {
t.setPriority(prty);
}
}
}
if(nthr < prll) {
// Not enough processing threads.
// Add more threads at current priority.
while(nthr < prll) {
Thread t =
new WorkerThread(isPrefetch ? prefetchGroup : standardGroup,
this, isPrefetch);
t.setPriority(prty);
thr.add(t);
nthr++;
}
} else {
// Too many processing threads: queue WorkerThread.TERMINATEs.
// WorkerThread will remove itself later from the appropriate
// ArrayList.
while(nthr > prll) {
addJob(WorkerThread.TERMINATE, isPrefetch);
nthr--;
}
}
// Update the number of effective threads.
if(isPrefetch) {
numPrefetchThreads = nthr;
} else {
numWorkerThreads = nthr;
}
return nthr;
}
/** Returns the appropriate worker list. */
LinkedList<Thread> getWorkers(boolean isPrefetch) {
return isPrefetch ? workers : prefetchWorkers;
}
/** Returns the appropriate queue. */
LinkedList<Object> getQueue(boolean isPrefetch) {
return isPrefetch ? prefetchQueue : queue;
}
/** Append a job to the appropriate queue. */
private void addJob(Object job, boolean isPrefetch) {
if(job == null ||
(job != WorkerThread.TERMINATE && !(job instanceof Job))) {
// Programming error: deliberately no message.
throw new IllegalArgumentException();
}
final LinkedList<Object> jobQueue = getQueue(isPrefetch);
if(isPrefetch ||
jobQueue.isEmpty() ||
job instanceof RequestJob) {
// Append job to queue.
jobQueue.addLast(job);
} else {
// If the queue is non-empty or the job is a TileJob
// insert the job after the last TileJob in the queue.
boolean inserted = false;
for(int idx = jobQueue.size() - 1; idx >= 0; idx--) {
if(jobQueue.get(idx) instanceof TileJob) {
jobQueue.add(idx+1, job);
inserted = true;
break;
}
}
if(!inserted) {
jobQueue.addFirst(job);
}
}
jobQueue.notify();
}
/** Queue WorkerThread.TERMINATEs to all workers. */
protected void finalize() throws Throwable {
terminateAll(false);
terminateAll(true);
super.finalize();
}
/** Queue WorkerThread.TERMINATEs to all appropriate workers. */
private void terminateAll(boolean isPrefetch) {
synchronized(getWorkers(isPrefetch)) {
int numThreads = isPrefetch ?
numPrefetchThreads : numWorkerThreads;
for(int i = 0; i < numThreads; i++) {
addJob(WorkerThread.TERMINATE, isPrefetch);
if(isPrefetch) {
numPrefetchThreads--;
} else {
numWorkerThreads--;
}
}
}
}
void sendExceptionToListener(String message, Throwable e) {
ImagingListener listener =
ImageUtil.getImagingListener((RenderingHints)null);
listener.errorOccurred(message, e, this, false);
}
}
|
lightcrafts/src/com/lightcrafts/jai/utils/LCTileScheduler.java
|
/* Copyright (C) 2005-2011 Fabio Riccardi */
package com.lightcrafts.jai.utils;
/*
* $RCSfile: SunTileScheduler.java,v $
*
* Copyright (c) 2005 Sun Microsystems, Inc. All rights reserved.
*
* Use is subject to license terms.
*
* $Revision: 1.1 $
* $Date: 2005/02/11 04:57:02 $
* $State: Exp $
*/
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.image.Raster;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import com.lightcrafts.mediax.jai.OpImage;
import com.lightcrafts.mediax.jai.PlanarImage;
import com.lightcrafts.mediax.jai.TileCache;
import com.lightcrafts.mediax.jai.TileComputationListener;
import com.lightcrafts.mediax.jai.TileRequest;
import com.lightcrafts.mediax.jai.TileScheduler;
import com.lightcrafts.mediax.jai.util.ImagingException;
import com.lightcrafts.mediax.jai.util.ImagingListener;
import com.lightcrafts.media.jai.util.ImageUtil;
/**
* A class representing a request for non-prefetch background computation
* of tiles. The object stores the image, the indices of all tiles being
* requested, and references to all listeners associated with the request.
*
* <code>TileRequest</code> methods are not commented.
*/
class Request implements TileRequest {
private final TileScheduler scheduler;
final PlanarImage image;
final List indices;
final Set listeners;
final Hashtable tileStatus;
/**
* Constructs a <code>Request</code>.
*
* @param scheduler The scheduler processing this request.
* @param image The image for which tiles are being computed.
* @param tileIndices The indices of the tiles to be computed.
* @param tileListeners The listeners to be notified of tile
* computation, cancellation, or failure.
*
* @exception IllegalArgumentException if <code>scheduler</code>,
* <code>image</code>, or <code>tileIndices</code> is
* <code>null</code> or if <code>tileIndices</code> is
* zero-length.
*/
Request(TileScheduler scheduler,
PlanarImage image,
Point[] tileIndices,
TileComputationListener[] tileListeners) {
// Save a reference to the scheduler.
if(scheduler == null) {
throw new IllegalArgumentException(); // Internal error - no message.
}
this.scheduler = scheduler;
// Save a reference to the image.
if(image == null) {
throw new IllegalArgumentException(); // Internal error - no message.
}
this.image = image;
// Ensure there is at least one tile in the request.
if(tileIndices == null || tileIndices.length == 0) {
// If this happens it is an internal programming error.
throw new IllegalArgumentException(); // Internal error - no message.
}
// Save the tile indices.
indices = Arrays.asList(tileIndices);
// Save references to the listeners, if any.
if(tileListeners != null) {
int numListeners = tileListeners.length;
if(numListeners > 0) {
listeners = new HashSet(numListeners);
for(int i = 0; i < numListeners; i++) {
listeners.add(tileListeners[i]);
}
} else {
listeners = null;
}
} else {
listeners = null;
}
// Initialize status table.
tileStatus = new Hashtable(tileIndices.length);
}
// --- TileRequest implementation ---
public PlanarImage getImage() {
return image;
}
public Point[] getTileIndices() {
return (Point[])indices.toArray(new Point[0]);
}
public TileComputationListener[] getTileListeners() {
return (TileComputationListener[])
listeners.toArray(new TileComputationListener[0]);
}
public boolean isStatusAvailable() {
return true;
}
public int getTileStatus(int tileX, int tileY) {
Point p = new Point(tileX, tileY);
int status;
if(tileStatus.containsKey(p)) {
status = ((Integer)tileStatus.get(p)).intValue();
} else {
status = TileRequest.TILE_STATUS_PENDING;
}
return status;
}
public void cancelTiles(Point[] tileIndices) {
// Forward the call to the scheduler.
scheduler.cancelTiles(this, tileIndices);
}
}
/** A job to put in a job queue. */
interface Job {
/** Computes the job required. */
void compute();
/** Returns <code>true</code> if the job is not done. */
boolean notDone();
/** Returns the image for which tiles are being computed. */
PlanarImage getOwner();
/**
* Returns <code>true</code> if and only if the job should block the
* thread which processes it. In this case the scheduler and the
* processing thread must communicate using <code>wait()</code> and
* <code>notify()</code>.
*/
boolean isBlocking();
/** Returns the first exception encountered or <code>null</code>. */
Exception getException();
}
/**
* A <code>Job</code> which computes a single tile at a time for a
* non-prefetch background job queued by the version of scheduleTiles()
* which returns a <code>TileRequest</code>. This <code>Job</code>
* notifies all <code>TileComputationListener</code>s of all
* <code>TileRequest</code>s with which this tile is associated of
* whether the tile was computed or the computation failed.
*/
final class RequestJob implements Job {
final LCTileScheduler scheduler; // the TileScheduler
final PlanarImage owner; // the image this tile belongs to
final int tileX; // tile's X index
final int tileY; // tile's Y index
final Raster[] tiles; // the computed tiles
final int offset; // offset into arrays
boolean done = false; // flag indicating completion status
Exception exception = null; // Any exception that might have occured
// during computeTile
/** Constructor. */
RequestJob(LCTileScheduler scheduler,
PlanarImage owner, int tileX, int tileY,
Raster[] tiles, int offset) {
this.scheduler = scheduler;
this.owner = owner;
this.tileX = tileX;
this.tileY = tileY;
this.tiles = tiles;
this.offset = offset;
}
/**
* Tile computation. Does the actual call to getTile().
*/
public void compute() {
// Get the Request List.
List reqList;
synchronized(scheduler.tileRequests) {
// Initialize the tile ID.
Object tileID = LCTileScheduler.tileKey(owner, tileX, tileY);
// Remove the List of Requests from the request Map.
reqList = (List)scheduler.tileRequests.remove(tileID);
// Remove the tile Job from the job Map.
scheduler.tileJobs.remove(tileID);
}
// Check whether reqList is valid in case job was cancelled while
// blocking on the tileRequests Map above.
// XXX Do not need empty check in next line?
if(reqList != null && !reqList.isEmpty()) {
// Update tile status to "processing".
Point p = new Point(tileX, tileY);
Integer tileStatus = new Integer(TileRequest.TILE_STATUS_PROCESSING);
Iterator reqIter = reqList.iterator();
while(reqIter.hasNext()) {
Request r = (Request)reqIter.next();
r.tileStatus.put(p, tileStatus);
}
try {
tiles[offset] = owner.getTile(tileX, tileY);
} catch (Exception e) {
exception = e;
} catch (Error e) {
exception = new Exception(e);
} finally {
// Extract the Set of all TileComputationListeners.
int numReq = reqList.size();
Set listeners = LCTileScheduler.getListeners(reqList);
// XXX Do not need empty check in next line.
if(listeners != null && !listeners.isEmpty()) {
// Get TileRequests as an array for later use.
TileRequest[] requests =
(TileRequest[])reqList.toArray(new TileRequest[0]);
// Update tile status as needed.
tileStatus = new Integer(exception == null ?
TileRequest.TILE_STATUS_COMPUTED :
TileRequest.TILE_STATUS_FAILED);
for(int i = 0; i < numReq; i++) {
((Request)requests[i]).tileStatus.put(p, tileStatus);
}
// Create an Iterator over the listeners.
Iterator iter = listeners.iterator();
// Notify listeners.
if(exception == null) {
// Tile computation successful.
while(iter.hasNext()) {
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileComputed(scheduler, requests,
owner, tileX, tileY,
tiles[offset]);
}
} else {
// Tile computation unsuccessful.
while(iter.hasNext()) {
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileComputationFailure(scheduler, requests,
owner, tileX, tileY,
exception);
}
}
}
}
}
// Set the flag indicating job completion.
done = true;
}
/**
* Returns <code>true</code> if the job is not done; that is,
* the tile is not computed and no exceptions have occurred.
*/
public boolean notDone() {
return !done;
}
/** Returns the image for which the tile is being computed. */
public PlanarImage getOwner() {
return owner;
}
/** Always returns <code>true</code>. */
public boolean isBlocking() {
// Big Change: this should prevent enqueueing of new tiles while an image is being processed
return true;
}
/** Returns any encountered exception or <code>null</code>. */
public Exception getException() {
return exception;
}
/** Returns a string representation of the class object. */
public String toString() {
String tString = "null";
if (tiles[offset] != null) {
tString = tiles[offset].toString();
}
return getClass().getName() + "@" + Integer.toHexString(hashCode()) +
": owner = " + owner.toString() +
" tileX = " + Integer.toString(tileX) +
" tileY = " + Integer.toString(tileY) +
" tile = " + tString;
}
}
/**
* A <code>Job</code> which computes one or more tiles at a time for either
* a prefetch job or a blocking job.
*/
final class TileJob implements Job {
final LCTileScheduler scheduler; // the TileScheduler
final boolean isBlocking; // whether the job is blocking
final PlanarImage owner; // the image this tile belongs to
final Point[] tileIndices; // the tile indices
final Raster[] tiles; // the computed tiles
final int offset; // offset into arrays
final int numTiles; // number of elements to use in indices array
boolean done = false; // flag indicating completion status
Exception exception = null; // The first exception that might have
// occured during computeTile
/** Constructor. */
TileJob(LCTileScheduler scheduler, boolean isBlocking,
PlanarImage owner, Point[] tileIndices,
Raster[] tiles, int offset, int numTiles) {
this.scheduler = scheduler;
this.isBlocking = isBlocking;
this.owner = owner;
this.tileIndices = tileIndices;
this.tiles = tiles;
this.offset = offset;
this.numTiles = numTiles;
}
/**
* Tile computation. Does the actual calls to getTile().
*/
public void compute() {
exception = scheduler.compute(owner, tileIndices, tiles,
offset, numTiles, null);
done = true;
}
/**
* Returns <code>true</code> if the job is not done; that is,
* the tile is not computed and no exceptions have occurred.
*/
public boolean notDone() {
return !done;
}
/** Returns the image for which tiles are being computed. */
public PlanarImage getOwner() {
return owner;
}
/** Returns <code>true</code> if and only if there is a listener. */
public boolean isBlocking() {
return isBlocking;
}
/** Returns any encountered exception or <code>null</code>. */
public Exception getException() {
return exception;
}
}
/**
* Worker thread that takes jobs from the tile computation queue and does
* the actual computation.
*/
class WorkerThread extends Thread {
/** <code>Object</code> indicating the the thread should exit. */
public static final Object TERMINATE = new Object();
/** The scheduler that spawned this thread. */
LCTileScheduler scheduler;
/** Whether this is a prefetch thread. */
boolean isPrefetch;
/** Constructor. */
public WorkerThread(ThreadGroup group,
LCTileScheduler scheduler,
boolean isPrefetch) {
super(group, group.getName() + group.activeCount());
this.scheduler = scheduler;
this.isPrefetch = isPrefetch;
setDaemon(true);
start();
}
/** Does the tile computation. */
public void run() {
LinkedList jobQueue = scheduler.getQueue(isPrefetch);
while(true) {
Object dequeuedObject = null;
// Check the job queue.
synchronized(jobQueue) {
if(jobQueue.size() > 0) {
// Remove the first job.
dequeuedObject = jobQueue.removeFirst();
} else {
try {
// Wait for a notify() on the queue.
jobQueue.wait();
continue;
} catch(InterruptedException ie) {
// Ignore: should never happen.
}
}
}
if(dequeuedObject == TERMINATE ||
getThreadGroup() == null || getThreadGroup().isDestroyed()) {
// Remove WorkerThread from appropriate Vector.
Vector threads;
synchronized(threads = scheduler.getWorkers(isPrefetch)) {
threads.remove(this);
}
// Exit the thread.
return;
}
Job job = (Job)dequeuedObject;
// Execute tile job.
if (job != null) {
job.compute();
// Notify the scheduler only if the Job is blocking.
if(job.isBlocking()) {
synchronized(scheduler) {
scheduler.notify();
}
}
}
} // infinite loop
}
}
/**
* This is Sun Microsystems' reference implementation of the
* <code>com.lightcrafts.mediax.jai.TileScheduler</code> interface. It provides
* a mechanism for scheduling tile calculation. Multi-threading is
* used whenever possible.
*
* @see com.lightcrafts.mediax.jai.TileScheduler
*/
public final class LCTileScheduler implements TileScheduler {
/** The default number of worker threads. */
private static final int NUM_THREADS_DEFAULT = 2;
/** The default number of worker threads. */
private static final int NUM_PREFETCH_THREADS_DEFAULT = 1;
/** The instance counter. It is used to compose the name of the
* ThreadGroup.
*/
private static int numInstances = 0;
/** The tile schedular name. It is used to compose the name of the
* ThreadGroup.
*/
private static String name = "LCTileSchedulerName";
/** The root ThreadGroup, which holds two sub-groups:
* the ThreadGroup for the standard jobs, and the ThreadGroup for
* the prefetch jobs.
*/
private ThreadGroup rootGroup;
/** The ThreadGroup contains all the standard jobs. */
private ThreadGroup standardGroup;
/** The ThreadGroup contains all the prefetch jobs. */
private ThreadGroup prefetchGroup;
/** The worker thread parallelism. */
private int parallelism = NUM_THREADS_DEFAULT;
/** The processing thread parallelism. */
private int prefetchParallelism = NUM_PREFETCH_THREADS_DEFAULT;
/** The worker thread priority. */
private int priority = Thread.NORM_PRIORITY;
/** The prefetch thread priority. */
private int prefetchPriority = Thread.MIN_PRIORITY;
/** A job queue for tiles waiting to be computed by the worker threads. */
private LinkedList queue = null;
/** A job queue for tiles waiting to be computed by prefetch workers. */
private LinkedList prefetchQueue = null;
/**
* A <code>Vector</code> of <code>WorkerThread</code>s that persist
* to do the actual tile computation for normal processing. This
* variable should never be set to <code>null</code>.
*/
private Vector workers = new Vector();
/**
* A <code>Vector</code> of <code>WorkerThread</code>s that persist
* to do the actual tile computation for prefetch processing. This
* variable should never be set to <code>null</code>.
*/
private Vector prefetchWorkers = new Vector();
/**
* The effective number of worker threads; may differ from
* <code>workers.size()</code> due to latency. This value should
* equal the size of <code>workers</code> less the number of
* <code>WorkerThread.TERMINATE</code>s in <code>queue</code>.
*/
private int numWorkerThreads = 0;
/**
* The effective number of prefetch worker threads; may differ from
* <code>prefetchWorkers.size()</code> due to latency. This value should
* equal the size of <code>prefetchWorkers</code> less the number of
* <code>WorkerThread.TERMINATE</code>s in <code>prefetchQueue</code>.
*/
private int numPrefetchThreads = 0;
/**
* <code>Map</code> of tiles currently being computed. The key is
* created from the image and tile indices by the <code>tileKey()</code>
* method. Each key is mapped to an <code>Object[1]</code> which may
* contain <code>null</code>, a <code>Raster</code>, or an indefinite
* <code>Object</code> which represent, respectively, that the tile is
* being computed, the tile itself, and that the tile computation failed.
*/
private Map tilesInProgress = new HashMap();
/**
* <code>Map</code> of tiles to <code>Request</code>s. The key is
* created from the image and tile indices by the <code>tileKey()</code>
* method. Each key is mapped to a <code>List</code> of
* <code>Request</code> for the tile. If there is no mapping for the
* tile, then there are no current requests. If a mapping exists, it
* should always be non-null and the <code>List</code> value should
* have size of at least unity.
*/
Map tileRequests = new HashMap();
/**
* <code>Map</code> of tiles to <code>Job</code>s.The key is
* created from the image and tile indices by the <code>tileKey()</code>
* method. Each key is mapped to a <code>Job</code> for the tile. If
* there is no mapping for the tile, then there is no enqueued
* <code>RequestJob</code>.
*/
Map tileJobs = new HashMap();
/** The name of this instance. */
private String nameOfThisInstance;
/**
* Returns the hash table "key" as a <code>Object</code> for this
* tile. For <code>PlanarImage</code> and
* <code>SerializableRenderedImage</code>, the key is generated by
* the method <code>ImageUtilgenerateID(Object) </code>. For the
* other cases, a <code>Long</code> object is returned.
* The upper 32 bits for this <code>Long</code> is the tile owner's
* hash code, and the lower 32 bits is the tile's index.
*/
static Object tileKey(PlanarImage owner, int tileX, int tileY) {
long idx = tileY * (long)owner.getNumXTiles() + tileX;
BigInteger imageID = (BigInteger)owner.getImageID();
byte[] buf = imageID.toByteArray();
int length = buf.length;
byte[] buf1 = new byte[length + 8];
System.arraycopy(buf, 0, buf1, 0, length);
for (int i = 7, j = 0; i >= 0; i--, j += 8)
buf1[length++] = (byte)(idx >> j);
return new BigInteger(buf1);
}
/**
* Returns all <code>TileComputationListener</code>s for the supplied
* <code>List</code> of <code>Request</code>s.
*/
static Set getListeners(List reqList) {
// Extract the Set of all TileComputationListeners.
int numReq = reqList.size();
HashSet listeners = null;
for(int j = 0; j < numReq; j++) {
Request req = (Request)reqList.get(j);
// XXX Do not need empty check in next line.
if(req.listeners != null && !req.listeners.isEmpty()) {
if(listeners == null) {
listeners = new HashSet();
}
listeners.addAll(req.listeners);
}
}
return listeners;
}
/**
* Converts the supplied <code>Exception</code>'s stack trace
* to a <code>String</code>.
*/
private static String getStackTraceString(Throwable e) {
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
PrintStream printStream = new PrintStream(byteStream);
e.printStackTrace(printStream);
printStream.flush();
String stackTraceString = byteStream.toString();
printStream.close();
return stackTraceString;
}
/**
* Constructor.
*
* @param parallelism The number of worker threads to do tile computation.
* If this number is less than 1, no multi-threading is used.
* @param priority The priority of worker threads.
* @param prefetchParallelism The number of threads to do prefetching.
* If this number is less than 1, no multi-threading is used.
* @param prefetchPriority The priority of prefetch threads.
*/
public LCTileScheduler(int parallelism, int priority,
int prefetchParallelism, int prefetchPriority) {
// Create queues and set parallelism and priority to default values.
this();
setParallelism(parallelism);
setPriority(priority);
setPrefetchParallelism(prefetchParallelism);
setPrefetchPriority(prefetchPriority);
}
/**
* Constructor. Processing and prefetch queues are created and all
* parallelism and priority values are set to default values.
*/
public LCTileScheduler() {
queue = new LinkedList();
prefetchQueue = new LinkedList();
nameOfThisInstance = name + numInstances;
rootGroup = new ThreadGroup(nameOfThisInstance);
rootGroup.setDaemon(true);
standardGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Standard");
standardGroup.setDaemon(true);
prefetchGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Prefetch");
prefetchGroup.setDaemon(true);
numInstances++;
}
/**
* Tile computation. Does the actual calls to getTile().
*/
Exception compute(PlanarImage owner, Point[] tileIndices,
Raster[] tiles, int offset, int numTiles,
Request request) {
Exception exception = null;
int j = offset;
if(request == null || request.listeners == null) {
for(int i = 0; i < numTiles; i++, j++) {
Point p = tileIndices[j];
try {
tiles[j] = owner.getTile(p.x, p.y);
} catch (Exception e) {
exception = e;
// Abort the remaining tiles in the job.
break;
}
}
} else { // listeners present
Request[] reqs = new Request[] {request};
for(int i = 0; i < numTiles; i++, j++) {
Point p = tileIndices[j];
// Update tile status to "processing".
Integer tileStatus =
new Integer(TileRequest.TILE_STATUS_PROCESSING);
request.tileStatus.put(p, tileStatus);
try {
tiles[j] = owner.getTile(p.x, p.y);
Iterator iter = request.listeners.iterator();
while(iter.hasNext()) {
// Update tile status to "computed".
tileStatus =
new Integer(TileRequest.TILE_STATUS_COMPUTED);
request.tileStatus.put(p, tileStatus);
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileComputed(this,
reqs,
owner,
p.x, p.y,
tiles[j]);
}
} catch (Exception e) {
exception = e;
// Abort the remaining tiles in the job.
break;
}
/* XXX
try {
List reqList;
synchronized(tileRequests) {
Long tileID = tileKey(owner, p.x, p.y);
reqList = (List)tileRequests.remove(tileID);
tileJobs.remove(tileID);
}
if(reqList != null) {
tiles[j] = owner.getTile(p.x, p.y);
TileRequest[] reqs =
(TileRequest[])reqList.toArray(new TileRequest[0]);
Set listeners = getListeners(reqList);
if(listeners != null) {
Iterator iter = listeners.iterator();
while(iter.hasNext()) {
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileComputed(this,
reqs,
owner,
p.x, p.y,
tiles[j]);
}
}
}
} catch (Exception e) {
exception = e;
// Abort the remaining tiles in the job.
break;
}
*/
}
}
// If an exception occured, notify listeners that all remaining
// tiles in the job have failed.
if(exception != null && request != null && request.listeners != null) {
int lastOffset = j;
int numFailed = numTiles - (lastOffset - offset);
// Mark all tiles starting with the one which generated the
// Exception as "failed".
for(int i = 0, k = lastOffset; i < numFailed; i++) {
Integer tileStatus =
new Integer(TileRequest.TILE_STATUS_FAILED);
request.tileStatus.put(tileIndices[k++], tileStatus);
}
// Notify listeners.
Request[] reqs = new Request[] {request};
for(int i = 0, k = lastOffset; i < numFailed; i++) {
Point p = tileIndices[k++];
Iterator iter = request.listeners.iterator();
while(iter.hasNext()) {
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileComputationFailure(this, reqs,
owner, p.x, p.y,
exception);
}
}
}
/* XXX
if(exception != null) {
int numFailed = numTiles - (j - offset);
for(int i = 0; i < numFailed; i++) {
Point p = tileIndices[j++];
Long tileID = tileKey(owner, p.x, p.y);
List reqList = (List)tileRequests.remove(tileID);
tileJobs.remove(tileID);
if(reqList != null) {
TileRequest[] reqs =
(TileRequest[])reqList.toArray(new TileRequest[0]);
Set listeners = getListeners(reqList);
if(listeners != null) {
Iterator iter = listeners.iterator();
while(iter.hasNext()) {
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileComputationFailure(this, reqs,
owner, p.x, p.y,
exception);
}
}
}
}
}
*/
return exception;
}
/**
* Schedules a single tile for computation.
*
* @param owner The image the tiles belong to.
* @param tileX The tile's X index.
* @param tileY The tile's Y index.
*
* @exception IllegalArgumentException if <code>owner</code> is
* <code>null</code>.
*
* @return The computed tile
*/
//
// This method blocks on the 'tilesInProgress' Map to avoid simultaneous
// computation of the same tile in two or more different threads. The idea
// is to release the resources of all but one thread so that the computation
// occurs more quickly. The synchronization variable is an Object[] of length
// unity. The computed tile is passed from the computing thread to the
// waiting threads via the contents of this Object[]. Thus this method does
// not depend on the TileCache to transfer the data.
//
public Raster scheduleTile(OpImage owner,
int tileX,
int tileY) {
if (owner == null) {
throw new IllegalArgumentException("Null owner");
}
// Eventual tile to be returned.
Raster tile = null;
// Get the tile's unique ID.
Object tileID = tileKey(owner, tileX, tileY);
// Set the computation flag and initialize or retrieve the tile cache.
boolean computeTile = false;
Object[] cache = null;
synchronized(tilesInProgress) {
if(computeTile = !tilesInProgress.containsKey(tileID)) {
// Computing: add tile ID to the map.
tilesInProgress.put(tileID, cache = new Object[1]);
} else {
// Waiting: get tile cache from the Map.
cache = (Object[])tilesInProgress.get(tileID);
}
}
if(computeTile) {
try {
try {
// Attempt to compute the tile.
tile = owner.computeTile(tileX, tileY);
} catch (OutOfMemoryError e) {
// Free some space in cache
TileCache tileCache = owner.getTileCache();
if(tileCache != null) {
tileCache.removeTiles(owner);
}
try {
// Re-attempt to compute the tile.
tile = owner.computeTile(tileX, tileY);
} catch (OutOfMemoryError e1) {
// Empty the cache
if(tileCache != null) {
tileCache.flush();
}
}
// Re-attempt to compute the tile.
tile = owner.computeTile(tileX, tileY);
}
} catch(Throwable e) {
// Re-throw the Error or Exception.
if(e instanceof Error) {
throw (Error)e;
} else if(e instanceof RuntimeException) {
sendExceptionToListener("RuntimeException", e);
// throw (RuntimeException)e;
} else {
String message = "Tile Scheduler Exception";
sendExceptionToListener(message,
new ImagingException(message, e));
/*
throw new RuntimeException(e.getMessage()+"\n"+
getStackTraceString(e));
*/
}
} finally {
synchronized(cache) {
// Always set the cached tile to a non-null value.
cache[0] = tile != null ? tile : new Object();
// Notify the thread(s).
cache.notifyAll();
synchronized(tilesInProgress) {
// Remove the tile ID from the Map.
tilesInProgress.remove(tileID);
}
}
}
} else {
synchronized(cache) {
// Check the cache: a null value indicates computation is
// still in progress.
if(cache[0] == null) {
// Wait for the computation to complete.
try {
cache.wait(); // XXX Should there be a timeout?
} catch(Exception e) {
// XXX What response here?
}
}
// Set the result only if cache contains a Raster.
if(cache[0] instanceof Raster) {
tile = (Raster)cache[0];
} else {
throw new RuntimeException("Not a Raster instance?");
}
}
}
return tile;
}
/**
* General purpose method for job creation and queueing. Note that
* the returned value should be ignored if the <code>listener</code>
* parameter is non-<code>null</code>.
*
* @param owner The image for which tile computation jobs will be queued.
* @param tileIndices The indices of the tiles to be computed.
* @param isPrefetch Whether the operation is a prefetch.
* @param listener A <code>TileComputationListener</code> of the
* processing. May be <code>null</code>.
*
* @return The computed tiles. This value is meaningless if
* <code>listener</code> is non-<code>null</code>.
*/
// The allowable arguments are constained as follows:
// A) owner and tileIndices non-null.
// B) (isBlocking,isPrefetch) in {(true,false),(false,false),(false,true)}
// C) listeners != null <=> (isBlocking,isPrefetch) == (false,false)
// The returned value is one of:
// Raster[] <=> (isBlocking,isPrefetch) == (true,false)
// Integer <=> (isBlocking,isPrefetch) == (false,false)
// (Raster[])null <=> (isBlocking,isPrefetch) == (false,true)
private Object scheduleJob(PlanarImage owner,
Point[] tileIndices,
boolean isBlocking,
boolean isPrefetch,
TileComputationListener[] listeners) {
if(owner == null || tileIndices == null) {
// null parameters
throw new IllegalArgumentException(); // coding error - no message
} else if((isBlocking || isPrefetch) && listeners != null) {
// listeners for blocking or prefetch job
throw new IllegalArgumentException(); // coding error - no message
} else if(isBlocking && isPrefetch) {
throw new IllegalArgumentException(); // coding error - no message
}
int numTiles = tileIndices.length;
Raster[] tiles = new Raster[numTiles];
Object returnValue = tiles;
int numThreads = 0;
Job[] jobs = null;
int numJobs = 0;
synchronized(getWorkers(isPrefetch)) {
numThreads = getNumThreads(isPrefetch);
if(numThreads > 0) { // worker threads exist
if(numTiles <= numThreads || // no more tiles than threads
(!isBlocking && !isPrefetch)) { // non-blocking, non-prefetch
jobs = new Job[numTiles];
if(!isBlocking && !isPrefetch) {
Request request =
new Request(this, owner, tileIndices, listeners);
// Override return value.
returnValue = request;
// Queue all tiles as single-tile jobs.
while(numJobs < numTiles) {
Point p = tileIndices[numJobs];
Object tileID = tileKey(owner, p.x, p.y);
synchronized(tileRequests) {
List reqList = null;
if(tileRequests.containsKey(tileID)) {
// This tile is already queued in a
// non-blocking, non-prefetch job.
reqList = (List)tileRequests.get(tileID);
reqList.add(request);
numTiles--;
} else {
// This tile has not yet been queued.
reqList = new ArrayList();
reqList.add(request);
tileRequests.put(tileID, reqList);
jobs[numJobs] =
new RequestJob(this,
owner, p.x, p.y,
tiles, numJobs);
tileJobs.put(tileID, jobs[numJobs]);
addJob(jobs[numJobs++], false);
}
}
}
} else { // numTiles <= numThreads
while(numJobs < numTiles) {
jobs[numJobs] = new TileJob(this,
isBlocking,
owner,
tileIndices,
tiles,
numJobs,
1);
addJob(jobs[numJobs++], isPrefetch);
}
}
} else { // more tiles than worker threads
// Set the fraction of unqueued tiles to be processed by
// each worker thread.
float frac = 1.0F/(2.0F*numThreads);
// Set the minimum number of tiles each thread may process.
// If there is only one thread this will equal the total
// number of tiles.
int minTilesPerThread = numThreads == 1 ? numTiles :
Math.min(Math.max(1, (int)(frac*numTiles/2.0F + 0.5F)),
numTiles);
// Allocate the maximum possible number of multi-tile jobs.
// This will be larger than the actual number of jobs but
// a more precise calcuation is not possible and a dynamic
// storage object such as a Collection would not be useful
// since as calculated maxNumJobs = 4*numThreads if the
// preceeding values of "frac" and "minTilesPerThread" are
// 1/(2*numThreads) and frac*numTiles/2, respectively.
int maxNumJobs = numThreads == 1 ? 1 :
(int)((float)numTiles/(float)minTilesPerThread+0.5F);
jobs = new TileJob[maxNumJobs];
// Set the number of enqueued tiles and the number left.
int numTilesQueued = 0;
int numTilesLeft = numTiles - numTilesQueued;
// Assign a number of tiles to each thread determined by
// the number of remaining tiles, the fraction of remaining
// tiles to be processed and the minimum chunk size.
while(numTilesLeft > 0) {
// Set the number of tiles to the pre-calculated
// fraction of tiles yet to be computed.
int numTilesInThread = (int)(frac*numTilesLeft + 0.5F);
// Ensure that the number to be processed is at
// least the minimum chunk size.
if(numTilesInThread < minTilesPerThread) {
numTilesInThread = minTilesPerThread;
}
// Clamp number of tiles in thread to number unqueued.
if(numTilesInThread > numTilesLeft) {
numTilesInThread = numTilesLeft;
}
// Decrement the count of remaining tiles. Note that
// this value will be non-negative due to the clamping
// above.
numTilesLeft -= numTilesInThread;
// If the number left is smaller than the minimum chunk
// size then process these tiles in the current job.
if(numTilesLeft < minTilesPerThread) {
numTilesInThread += numTilesLeft;
numTilesLeft = 0;
}
// Create a job to process the number of tiles needed.
jobs[numJobs] = new TileJob(this,
isBlocking,
owner,
tileIndices,
tiles,
numTilesQueued,
numTilesInThread);
// Queue the job and increment the job count.
addJob(jobs[numJobs++], isPrefetch);
// Increment the count of tiles queued.
numTilesQueued += numTilesInThread;
}
} // SingleTile vs. MultiTile Jobs
} // numThreads > 0
} // end synchronized block
if(numThreads != 0) {
// If blocking, wait until all tiles have been computed.
// There is no 'else' block for non-blocking as in that
// case we just want to continue.
if(isBlocking) {
LinkedList jobQueue = getQueue(isPrefetch);
for (int i = 0; i < numJobs; i++) {
synchronized(this) {
while (jobs[i].notDone()) {
try {
wait();
} catch(InterruptedException ie) {
// Ignore: should never happen.
}
}
}
// XXX: should we re-throw the exception or
// should we reschedule this job ?? krishnag
Exception e = jobs[i].getException();
if (e != null) {
// Throw a RuntimeException with the Exception's
// message concatenated with the stack trace.
String message = "Exception while scheduling tiles: ";
sendExceptionToListener(message,
new ImagingException(message, e));
/*
throw new RuntimeException(e.getMessage()+"\n"+
getStackTraceString(e));
*/
}
}
}
} else { // numThreads == 0
Request request = null;
if(!isBlocking && !isPrefetch) {
request = new Request(this, owner, tileIndices, listeners);
returnValue = request;
}
// no workers; sequentially compute tiles in main thread
Exception e = compute(owner, tileIndices, tiles, 0, numTiles,
request);
// Throw a RuntimeException with the Exception's
// message concatenated with the stack trace.
if(e != null) {
String message = "Exception while scheduling tiles: ";
sendExceptionToListener(message,
new ImagingException(message, e));
/*
throw new RuntimeException(e.getMessage()+"\n"+
getStackTraceString(e));
*/
}
}
return returnValue;
}
/**
* Schedules multiple tiles of an image for computation.
*
* @param owner The image the tiles belong to.
* @param tileIndices An array of tile X and Y indices.
*
* @return An array of computed tiles.
*/
public Raster[] scheduleTiles(OpImage owner,
Point tileIndices[]) {
if (owner == null || tileIndices == null) {
throw new IllegalArgumentException("Null owner or TileIndices");
}
return (Raster[])scheduleJob(owner, tileIndices, true, false, null);
}
/**
* Schedule a list of tiles for computation. The supplied listeners
* will be notified after each tile has been computed. This
* method ideally should be non-blocking. If the <code>TileScheduler</code>
* implementation uses multithreading, it is at the discretion of the
* implementation which thread invokes the
* <code>TileComputationListener</code> methods.
*/
public TileRequest scheduleTiles(PlanarImage target, Point[] tileIndices,
TileComputationListener[] tileListeners) {
if (target == null || tileIndices == null) {
throw new IllegalArgumentException("Null owner or TileIndices");
}
return (TileRequest)scheduleJob(target, tileIndices, false, false,
tileListeners);
}
/**
* Issues an advisory cancellation request to the
* <code>TileScheduler</code> stating that the indicated tiles of the
* specified image should not be processed. The handling of this request
* is at the discretion of the scheduler which may cancel tile processing
* in progress and remove tiles from its internal queue, remove tiles from
* the queue but not terminate current processing, or simply do nothing.
*
* <p> In the Sun Microsystems reference implementation of
* <code>TileScheduler</code> the second tile cancellation option is
* implemented, i.e., tiles are removed from the internal queue but
* computation already in progress is not terminated. If there is at
* least one worker thread this method should be non-blocking. Any tiles
* allowed to complete computation subsequent to this call are complete
* and will be treated as if they had not been cancelled, e.g., with
* respect to caching, notification of registered listeners, etc.
* Furthermore, cancelling a tile request in no way invalidates the tile
* as a candidate for future recomputation.
*/
public void cancelTiles(TileRequest request, Point[] tileIndices) {
if(request == null) {
throw new IllegalArgumentException("Null TileRequest");
}
Request req = (Request)request;
synchronized(tileRequests) {
// Save the list of all tile indices in this request.
List reqIndexList = req.indices;
// Initialize the set of tile indices to cancel.
Point[] indices;
if(tileIndices != null && tileIndices.length > 0) {
// Create a Set from the supplied indices.
List tileIndexList = Arrays.asList(tileIndices);
// Retain only indices which were actually in the request.
tileIndexList.retainAll(reqIndexList);
indices = (Point[])tileIndexList.toArray(new Point[0]);
} else {
indices = (Point[])reqIndexList.toArray(new Point[0]);
}
// Cache the count.
int numTiles = indices.length;
// Cache status value.
Integer tileStatus = new Integer(TileRequest.TILE_STATUS_CANCELLED);
// Loop over tile indices to be cancelled.
for(int i = 0; i < numTiles; i++) {
Point p = indices[i];
// Get the tile's ID.
Object tileID = tileKey(req.image, p.x, p.y);
// Get the list of requests for this tile.
List reqList = (List)tileRequests.get(tileID);
// If there are none, proceed to next index.
if(reqList == null) {
continue;
}
// Remove this Request from the Request List for this tile.
reqList.remove(req);
// If the request list is now empty, dequeue the job and
// remove the tile from the hashes.
if(reqList.isEmpty()) {
synchronized(queue) {
Object job = tileJobs.remove(tileID);
if(job != null) {
queue.remove(job);
}
}
tileRequests.remove(tileID);
}
// Update tile status to "cancelled".
req.tileStatus.put(p, tileStatus);
// Notify any listeners.
if(req.listeners != null) {
TileRequest[] reqArray = new TileRequest[] {req};
Iterator iter = req.listeners.iterator();
while(iter.hasNext()) {
TileComputationListener listener =
(TileComputationListener)iter.next();
listener.tileCancelled(this, reqArray,
req.image, p.x, p.y);
}
}
}
}
}
/**
* Prefetchs a list of tiles of an image.
*
* @param owner The image the tiles belong to.
* @param tileIndices An array of tile X and Y indices.
*/
public void prefetchTiles(PlanarImage owner,
Point[] tileIndices) {
if (owner == null || tileIndices == null) {
throw new IllegalArgumentException("Null owner or TileIndices");
}
scheduleJob(owner, tileIndices, false, true, null);
}
/**
* Suggests to the scheduler the degree of parallelism to use in
* processing invocations of <code>scheduleTiles()</code>. For
* example, this might set the number of threads to spawn. It is
* legal to implement this method as a no-op.
*
* <p> In the Sun Microsystems reference implementation of TileScheduler
* this method sets the number of worker threads actually used for tile
* computation. Ideally this number should equal the number of processors
* actually available on the system. It is the responsibility of the
* application to set this value as the number of processors is not
* available via the virtual machine. A parallelism value of zero
* indicates that all tile computation will be effected in the primary
* thread. A parallelism value of <i>N</i> indicates that there will be
* <i>N</i> worker threads in addition to the primary scheduler thread.
* In JAI the parallelism defaults to a value of 2 unless explicity set
* by the application.
*
* @param parallelism The suggested degree of parallelism.
* @throws IllegalArgumentException if <code>parallelism</code>
* is negative.
*/
public void setParallelism(int parallelism) {
if (parallelism < 0) {
throw new IllegalArgumentException("Negative Parallelism?");
}
this.parallelism = parallelism;
}
/**
* Returns the degree of parallelism of the scheduler.
*/
public int getParallelism() {
return parallelism;
}
/**
* Identical to <code>setParallelism()</code> but applies only to
* <code>prefetchTiles()</code>.
*/
public void setPrefetchParallelism(int parallelism) {
if (parallelism < 0) {
throw new IllegalArgumentException("Negative Parallelism?");
}
prefetchParallelism = parallelism;
}
/**
* Identical to <code>getParallelism()</code> but applies only to
* <code>prefetchTiles()</code>.
*/
public int getPrefetchParallelism() {
return prefetchParallelism;
}
/**
* Suggests to the scheduler the priority to assign to processing
* effected by <code>scheduleTiles()</code>. For example, this might
* set thread priority. Values outside of the accepted priority range
* will be clamped to the nearest extremum. An implementation may clamp
* the prefetch priority to less than the scheduling priority. It is
* legal to implement this method as a no-op.
*
* <p> In the Sun Microsystems reference implementation of TileScheduler
* this method sets the priority of the worker threads used for tile
* computation. Its initial value is <code>Thread.NORM_PRIORITY</code>.
*
* @param priority The suggested priority.
*/
public void setPriority(int priority) {
this.priority = Math.max(Math.min(priority, Thread.MAX_PRIORITY),
Thread.MIN_PRIORITY);
}
/**
* Returns the priority of <code>scheduleTiles()</code> processing.
*/
public int getPriority() {
return priority;
}
/**
* Identical to <code>setPriority()</code> but applies only to
* <code>prefetchTiles()</code>.
*
* <p> In the Sun Microsystems reference implementation of
* <code>TileScheduler</code>, this method sets the priority of any threads
* spawned to prefetch tiles. Its initial value is
* <code>Thread.MIN_PRIORITY</code>.
*/
public void setPrefetchPriority(int priority) {
prefetchPriority = Math.max(Math.min(priority, Thread.MAX_PRIORITY),
Thread.MIN_PRIORITY);
}
/**
* Identical to <code>getPriority()</code> but applies only to
* <code>prefetchTiles()</code>.
*/
public int getPrefetchPriority() {
return prefetchPriority;
}
/** Recreate the <code>ThreadGroup</code>is and <code>WorkThread</code>s.
* This happens in the case of applet: the java plugin will exist after
* the termination of the applet so that JAI and LCTileScheduler will
* also exist. However, the <code>ThreadGroup</code>s are destroyed.
* Thus, the old workers should be terminated and new i
* <code>ThreadGroup</code> and workers should be created.
*/
// private synchronized void createThreadGroup(boolean isPrefetch) {
private void createThreadGroup(boolean isPrefetch) {
if (rootGroup == null || rootGroup.isDestroyed()) {
rootGroup = new ThreadGroup(nameOfThisInstance);
rootGroup.setDaemon(true);
}
if (isPrefetch &&
(prefetchGroup == null || prefetchGroup.isDestroyed())) {
prefetchGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Prefetch");
prefetchGroup.setDaemon(true);
}
if (!isPrefetch &&
(standardGroup == null || standardGroup.isDestroyed())) {
standardGroup = new ThreadGroup(rootGroup,
nameOfThisInstance + "Standard");
standardGroup.setDaemon(true);
}
Vector thr = getWorkers(isPrefetch);
int size = thr.size();
for(int i = size - 1; i >= 0; i--) {
Thread t = (Thread)thr.get(i);
if (!t.isAlive())
thr.remove(t);
}
if (isPrefetch)
numPrefetchThreads = thr.size();
else
numWorkerThreads = thr.size();
}
/**
* Returns the effective number of threads of the specified type.
* This method also updates the number and priority of threads of
* the specified type according to the global settings. This method
* may add <code>WorkerThread.TERMINATE</code>s to the appropriate
* queue if there are too many effective threads.
*/
private int getNumThreads(boolean isPrefetch) {
createThreadGroup(isPrefetch);
// Local variables.
Vector thr = getWorkers(isPrefetch);
int nthr;
int prll;
int prty;
// Set local variables depending on the thread type.
if(isPrefetch) {
nthr = numPrefetchThreads;
prll = prefetchParallelism;
prty = prefetchPriority;
} else {
nthr = numWorkerThreads;
prll = parallelism;
prty = priority;
}
// Update priority if it has changed.
if(nthr > 0 &&
((Thread)thr.get(0)).getPriority() != prty) {
int size = thr.size();
for(int i = 0; i < size; i++) {
Thread t = (Thread)thr.get(i);
if (t != null && t.getThreadGroup() != null) {
t.setPriority(prty);
}
}
}
if(nthr < prll) {
// Not enough processing threads.
// Add more threads at current priority.
while(nthr < prll) {
Thread t =
new WorkerThread(isPrefetch ? prefetchGroup : standardGroup,
this, isPrefetch);
t.setPriority(prty);
thr.add(t);
nthr++;
}
} else {
// Too many processing threads: queue WorkerThread.TERMINATEs.
// WorkerThread will remove itself later from the appropriate
// Vector.
while(nthr > prll) {
addJob(WorkerThread.TERMINATE, isPrefetch);
nthr--;
}
}
// Update the number of effective threads.
if(isPrefetch) {
numPrefetchThreads = nthr;
} else {
numWorkerThreads = nthr;
}
return nthr;
}
/** Returns the appropriate worker list. */
Vector getWorkers(boolean isPrefetch) {
return isPrefetch ? workers : prefetchWorkers;
}
/** Returns the appropriate queue. */
LinkedList getQueue(boolean isPrefetch) {
return isPrefetch ? prefetchQueue : queue;
}
/** Append a job to the appropriate queue. */
private void addJob(Object job, boolean isPrefetch) {
if(job == null ||
(job != WorkerThread.TERMINATE && !(job instanceof Job))) {
// Programming error: deliberately no message.
throw new IllegalArgumentException();
}
LinkedList jobQueue;
synchronized(jobQueue = getQueue(isPrefetch)) {
if(isPrefetch ||
jobQueue.isEmpty() ||
job instanceof RequestJob) {
// Append job to queue.
jobQueue.addLast(job);
} else {
// If the queue is non-empty or the job is a TileJob
// insert the job after the last TileJob in the queue.
boolean inserted = false;
for(int idx = jobQueue.size() - 1; idx >= 0; idx--) {
if(jobQueue.get(idx) instanceof TileJob) {
jobQueue.add(idx+1, job);
inserted = true;
break;
}
}
if(!inserted) {
jobQueue.addFirst(job);
}
}
jobQueue.notify();
}
}
/** Queue WorkerThread.TERMINATEs to all workers. */
protected void finalize() throws Throwable {
terminateAll(false);
terminateAll(true);
super.finalize();
}
/** Queue WorkerThread.TERMINATEs to all appropriate workers. */
private void terminateAll(boolean isPrefetch) {
synchronized(getWorkers(isPrefetch)) {
int numThreads = isPrefetch ?
numPrefetchThreads : numWorkerThreads;
for(int i = 0; i < numThreads; i++) {
addJob(WorkerThread.TERMINATE, isPrefetch);
if(isPrefetch) {
numPrefetchThreads--;
} else {
numWorkerThreads--;
}
}
}
}
void sendExceptionToListener(String message, Throwable e) {
ImagingListener listener =
ImageUtil.getImagingListener((RenderingHints)null);
listener.errorOccurred(message, e, this, false);
}
}
|
Refactor LCTileScheduler
|
lightcrafts/src/com/lightcrafts/jai/utils/LCTileScheduler.java
|
Refactor LCTileScheduler
|
|
Java
|
bsd-3-clause
|
f7256b0555442528f7b1edc6ef998c8ac361090e
| 0
|
dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk
|
/*
* Copyright (c) 2017, University of Oslo
*
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.user;
import org.hisp.dhis.android.core.common.ModelBuilder;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnit;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitModel;
import java.util.Set;
import static org.hisp.dhis.android.core.organisationunit.OrganisationUnitTree.findRoots;
public class UserOrganisationUnitLinkModelBuilder
extends ModelBuilder<OrganisationUnit, UserOrganisationUnitLinkModel> {
private final UserOrganisationUnitLinkModel.Builder builder;
private final User user;
public UserOrganisationUnitLinkModelBuilder(OrganisationUnitModel.Scope scope, User user) {
this.user = user;
this.builder = UserOrganisationUnitLinkModel.builder()
.organisationUnitScope(scope.name())
.user(user.uid());
}
@Override
public UserOrganisationUnitLinkModel buildModel(OrganisationUnit organisationUnit) {
Set<String> rootOrgUnitUids = findRoots(user.organisationUnits());
boolean root = rootOrgUnitUids.contains(organisationUnit.uid());
return builder
.organisationUnit(organisationUnit.uid())
.root(root)
.build();
}
}
|
core/src/main/java/org/hisp/dhis/android/core/user/UserOrganisationUnitLinkModelBuilder.java
|
/*
* Copyright (c) 2017, University of Oslo
*
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.user;
import org.hisp.dhis.android.core.common.ModelBuilder;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnit;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitModel;
public class UserOrganisationUnitLinkModelBuilder
extends ModelBuilder<OrganisationUnit, UserOrganisationUnitLinkModel> {
private final UserOrganisationUnitLinkModel.Builder builder;
public UserOrganisationUnitLinkModelBuilder(OrganisationUnitModel.Scope scope, User user) {
this.builder = UserOrganisationUnitLinkModel.builder()
.organisationUnitScope(scope.name())
.user(user.uid());
}
@Override
public UserOrganisationUnitLinkModel buildModel(OrganisationUnit organisationUnit) {
return builder
.organisationUnit(organisationUnit.uid())
.build();
}
}
|
n-teis-call: adapt UserOrganisationUnitLinkModelBuilder
|
core/src/main/java/org/hisp/dhis/android/core/user/UserOrganisationUnitLinkModelBuilder.java
|
n-teis-call: adapt UserOrganisationUnitLinkModelBuilder
|
|
Java
|
bsd-3-clause
|
cb850fca9ff546be429ecb55a559ca9de9e7ea8b
| 0
|
TheGreenMachine/Zephyr-Java
|
package com.edinarobotics.zephyr.parts;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.Relay;
/**
*The wrapper for the shooter components, contains the 2 jaguars driving the shooter
* along with the rotating jaguar and the piston.
*/
public class ShooterComponents {
private Jaguar shooterLeftJaguar;
private Jaguar shooterRightJaguar;
private Jaguar shooterRotator;
private Relay ballLoadPiston;
/*
* Constructs shooterLeftJaguar, shooterRightJaguar, shooterRotator and ballLoadPiston
* with leftJaguar, rightJaguar, rotator and piston respectively.
*/
public ShooterComponents(int leftJaguar, int rightJaguar, int rotator, int piston){
shooterLeftJaguar = new Jaguar(leftJaguar);
shooterRightJaguar = new Jaguar(rightJaguar);
shooterRotator = new Jaguar(rotator);
ballLoadPiston = new Relay(piston);
}
/*
* sets the shooterLeftJaguar to speed and shooterRightJaguar to -speed
*/
public void setSpeed(double speed){
shooterLeftJaguar.set(-speed);
shooterRightJaguar.set(speed);
}
/*
* Sets the rotator to speed
*/
public void rotate(double speed){
shooterRotator.set(speed);
}
/*
* Sets the piston up if position is true, else it lowers it.
*/
public void firePiston(boolean position){
ballLoadPiston.set((position ? Relay.Value.kForward :Relay.Value.kReverse));
}
}
|
src/com/edinarobotics/zephyr/parts/ShooterComponents.java
|
package com.edinarobotics.zephyr.parts;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.Relay;
/**
*The wrapper for the shooter components, contains the 2 jaguars driving the shooter
* along with the rotating jaguar and the piston.
*/
public class ShooterComponents {
private Jaguar shooterLeftJaguar;
private Jaguar shooterRightJaguar;
private Jaguar shooterRotator;
private Relay ballLoadPiston;
/*
* Constructs shooterLeftJaguar, shooterRightJaguar, shooterRotator and ballLoadPiston
* with leftJaguar, rightJaguar, rotator and piston respectively.
*/
public ShooterComponents(int leftJaguar, int rightJaguar, int rotator, int piston){
shooterLeftJaguar = new Jaguar(leftJaguar);
shooterRightJaguar = new Jaguar(rightJaguar);
shooterRotator = new Jaguar(rotator);
ballLoadPiston = new Relay(piston);
}
/*
* sets the shooterLeftJaguar to speed and shooterRightJaguar to -speed
*/
public void setSpeed(double speed){
shooterLeftJaguar.set(-speed);
shooterRightJaguar.set(speed);
}
/*
* Sets the rotator to speed
*/
public void rotate(double speed){
shooterRotator.set(speed);
}
/*
* Sets the piston up if position is true, else it lowers it.
*/
public void firePiston(boolean position){
ballLoadPiston.set((position ? Relay.Value.kReverse :Relay.Value.kForward));
}
}
|
Fix relay values for ball loader piston in ShooterComponents.
|
src/com/edinarobotics/zephyr/parts/ShooterComponents.java
|
Fix relay values for ball loader piston in ShooterComponents.
|
|
Java
|
bsd-3-clause
|
06cd5380bc390edd6b879da2663598aca2ff8613
| 0
|
vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro
|
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.freemarker;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.Map;
import java.util.Map.Entry;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.UnavailableException;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import edu.cornell.mannlib.vedit.beans.LoginStatusBean;
import edu.cornell.mannlib.vitro.webapp.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.auth.AuthorizationHelper;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestActionConstants;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestedAction;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.AddDataPropStmt;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.DropObjectPropStmt;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditObjPropStmt;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ForwardResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RedirectResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.filestorage.backend.FileStorage;
import edu.cornell.mannlib.vitro.webapp.filestorage.backend.FileStorageSetup;
import edu.cornell.mannlib.vitro.webapp.filestorage.model.FileInfo;
import edu.cornell.mannlib.vitro.webapp.filestorage.model.ImageInfo;
import edu.cornell.mannlib.vitro.webapp.filestorage.uploadrequest.FileUploadServletRequest;
import edu.cornell.mannlib.vitro.webapp.filters.VitroRequestPrep;
/**
* Handle adding, replacing or deleting the main image on an Individual.
*/
public class ImageUploadController extends FreemarkerHttpServlet {
private static final long serialVersionUID = 1L;
private static final Log log = LogFactory
.getLog(ImageUploadController.class);
private static final String ATTRIBUTE_REFERRING_PAGE = "ImageUploadController.referringPage";
private static final String DEFAULT_NAMESPACE = ConfigurationProperties
.getProperty("Vitro.defaultNamespace");
public static final String DUMMY_THUMBNAIL_URL = "/images/placeholders/person.thumbnail.jpg";
/** Limit file size to 6 megabytes. */
public static final int MAXIMUM_FILE_SIZE = 6 * 1024 * 1024;
/** Generated thumbnails will be this big. */
public static final int THUMBNAIL_HEIGHT = 200;
public static final int THUMBNAIL_WIDTH = 200;
/** The form field that tells what we are doing: uploading? deleting? */
public static final String PARAMETER_ACTION = "action";
/** The form field that identifies the Individual. */
public static final String PARAMETER_ENTITY_URI = "entityUri";
/** The form field of the uploaded file; use as a key to the FileItem map. */
public static final String PARAMETER_UPLOADED_FILE = "datafile";
/** Here is the main image file. Hold on to it. */
public static final String ACTION_UPLOAD = "upload";
/** Here is the cropping info; we're ready to save the image. */
public static final String ACTION_SAVE = "save";
/** A request to delete the file and return to the referring page. */
public static final String ACTION_DELETE = "delete";
/** A request to delete the file and return to the "new image" screen. */
public static final String ACTION_DELETE_EDIT = "deleteEdit";
public static final String BODY_TITLE = "title";
public static final String BODY_ENTITY_NAME = "entityName";
public static final String BODY_MAIN_IMAGE_URL = "imageUrl";
public static final String BODY_MAIN_IMAGE_HEIGHT = "imageHeight";
public static final String BODY_MAIN_IMAGE_WIDTH = "imageWidth";
public static final String BODY_THUMBNAIL_URL = "thumbnailUrl";
public static final String BODY_CANCEL_URL = "cancelUrl";
public static final String BODY_DELETE_URL = "deleteUrl";
public static final String BODY_FORM_ACTION = "formAction";
public static final String BODY_ERROR_MESSAGE = "errorMessage";
public static final String TEMPLATE_NEW = "imageUpload-newImage.ftl";
public static final String TEMPLATE_REPLACE = "imageUpload-replaceImage.ftl";
public static final String TEMPLATE_CROP = "imageUpload-cropImage.ftl";
public static final String TEMPLATE_ERROR = "error-standard.ftl";
private static final String URL_HERE = UrlBuilder.getUrl("/uploadImages");
private FileStorage fileStorage;
/**
* When initialized, get a reference to the File Storage system. Without
* that, we can do nothing.
*/
@Override
public void init() throws ServletException {
super.init();
Object o = getServletContext().getAttribute(
FileStorageSetup.ATTRIBUTE_NAME);
if (o instanceof FileStorage) {
fileStorage = (FileStorage) o;
} else if (o == null) {
throw new UnavailableException(this.getClass().getSimpleName()
+ " could not initialize. Attribute '"
+ FileStorageSetup.ATTRIBUTE_NAME
+ "' was not set in the servlet context.");
} else {
throw new UnavailableException(this.getClass().getSimpleName()
+ " could not initialize. Attribute '"
+ FileStorageSetup.ATTRIBUTE_NAME
+ "' in the servlet context contained an instance of '"
+ o.getClass().getName() + "' instead of '"
+ FileStorage.class.getName() + "'");
}
}
/**
* <p>
* Parse the multi-part request, process the request, and produce the
* output.
* </p>
* <p>
* If the request was a multi-part file upload, it will parse to a
* normal-looking request with a "file_item_map" attribute.
* </p>
* <p>
* The processing will produce a {@link ResponseValues} object, which
* represents either a request for a FreeMarker template or a forwarding
* operation.
* <ul>
* <li>If a FreeMarker template, we emulate the actions that
* FreeMarkerHttpServlet would have taken to produce the output.</li>
* <li>If a forwarding operation, we create a {@link RequestDispatcher} to
* do the forwarding.</li>
* </ul>
* </p>
*/
@Override
protected ResponseValues processRequest(VitroRequest vreq) {
try {
// Parse the multi-part request.
FileUploadServletRequest request = FileUploadServletRequest
.parseRequest(vreq, MAXIMUM_FILE_SIZE);
if (log.isTraceEnabled()) {
dumpRequestDetails(vreq);
}
// If they aren't authorized to do this, send them to login.
if (!checkAuthorized(vreq)) {
String loginPage = request.getContextPath() + Controllers.LOGIN;
return new RedirectResponseValues(loginPage);
}
return buildTheResponse(vreq);
} catch (Exception e) {
// log.error("Could not produce response page", e);
return new ExceptionResponseValues(e);
}
}
/**
* Handle the different actions. If not specified, the default action is to
* show the intro screen.
*/
private ResponseValues buildTheResponse(VitroRequest vreq) {
String action = vreq.getParameter(PARAMETER_ACTION);
try {
Individual entity = validateEntityUri(vreq);
if (ACTION_UPLOAD.equals(action)) {
return doUploadImage(vreq, entity);
} else if (ACTION_SAVE.equals(action)) {
return doCreateThumbnail(vreq, entity);
} else if (ACTION_DELETE.equals(action)) {
captureReferringUrl(vreq);
return doDeleteImage(vreq, entity);
} else if (ACTION_DELETE_EDIT.equals(action)) {
return doDeleteThenEdit(vreq, entity);
} else {
captureReferringUrl(vreq);
return doIntroScreen(vreq, entity);
}
} catch (UserMistakeException e) {
// Can't find the entity? Complain.
return showAddImagePageWithError(vreq, null, e.getMessage());
} catch (Exception e) {
// We weren't expecting this - log it, and apologize to the user.
return new ExceptionResponseValues(e);
}
}
/**
* We are just starting the upload process. Record where we came from, so if
* they hit "cancel" we know where to send them. If we have problems, just
* clear it.
*/
private void captureReferringUrl(VitroRequest vreq) {
String referrer = vreq.getHeader("Referer");
if (referrer == null) {
vreq.getSession().removeAttribute(ATTRIBUTE_REFERRING_PAGE);
} else {
vreq.getSession().setAttribute(ATTRIBUTE_REFERRING_PAGE, referrer);
}
}
/**
* Show the first screen in the upload process: Add or Replace.
*/
private ResponseValues doIntroScreen(VitroRequest vreq, Individual entity) {
ImageInfo imageInfo = ImageInfo.instanceFromEntityUri(
vreq.getFullWebappDaoFactory(), entity);
if (imageInfo == null) {
return showAddImagePage(vreq, entity);
} else {
return showReplaceImagePage(vreq, entity, imageInfo);
}
}
/**
* The user has selected their main image file. Remove any previous main
* image (and thumbnail), and attach the new main image.
*/
private ResponseValues doUploadImage(VitroRequest vreq, Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
try {
// Did they provide a file to upload? If not, show an error.
FileItem fileItem = helper.validateImageFromRequest(vreq);
// Put it in the file system, and store a reference in the session.
FileInfo fileInfo = helper.storeNewImage(fileItem, vreq);
// How big is the new image? If not big enough, show an error.
Dimensions size = helper.getNewImageSize(fileInfo);
// Go to the cropping page.
return showCropImagePage(vreq, entity,
fileInfo.getBytestreamAliasUrl(), size);
} catch (UserMistakeException e) {
return showErrorMessage(vreq, entity, e.getMessage());
}
}
/**
* Are we writing the error message to the "Add" page or to the "Replace"
* page?
*/
private ResponseValues showErrorMessage(VitroRequest vreq,
Individual entity, String message) {
ImageInfo imageInfo = ImageInfo.instanceFromEntityUri(
vreq.getFullWebappDaoFactory(), entity);
if (imageInfo == null) {
return showAddImagePageWithError(vreq, entity, message);
} else {
return showReplaceImagePageWithError(vreq, entity, imageInfo,
message);
}
}
/**
* The user has specified how to crop the thumbnail. Crop it and attach it
* to the main image.
*/
private ResponseValues doCreateThumbnail(VitroRequest vreq,
Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
try {
CropRectangle crop = validateCropCoordinates(vreq);
FileInfo newImage = helper.getNewImageInfo(vreq);
FileInfo thumbnail = helper.generateThumbnail(crop, newImage);
helper.removeExistingImage(entity);
helper.storeImageFiles(entity, newImage, thumbnail);
return showExitPage(vreq, entity);
} catch (UserMistakeException e) {
return showErrorMessage(vreq, entity, e.getMessage());
}
}
/**
* Delete the main image and the thumbnail, and go back to the referring
* page.
*/
private ResponseValues doDeleteImage(VitroRequest vreq, Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
helper.removeExistingImage(entity);
return showExitPage(vreq, entity);
}
/**
* Delete the main image and the thumbnail, and go to the "add image"
* screen.
*/
private ResponseValues doDeleteThenEdit(VitroRequest vreq, Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
helper.removeExistingImage(entity);
return showAddImagePage(vreq, entity);
}
/**
* We need to be talking about an actual Individual here.
*/
private Individual validateEntityUri(VitroRequest vreq)
throws UserMistakeException {
String entityUri = vreq.getParameter(PARAMETER_ENTITY_URI);
if (entityUri == null) {
throw new UserMistakeException("No entity URI was provided");
}
Individual entity = vreq.getFullWebappDaoFactory().getIndividualDao()
.getIndividualByURI(entityUri);
if (entity == null) {
throw new UserMistakeException(
"This URI is not recognized as belonging to anyone: '"
+ entityUri + "'");
}
return entity;
}
/**
* Did we get the cropping coordinates?
*/
private CropRectangle validateCropCoordinates(VitroRequest vreq) {
int x = getIntegerParameter(vreq, "x", 0);
int y = getIntegerParameter(vreq, "y", 0);
int h = getIntegerParameter(vreq, "h", THUMBNAIL_HEIGHT);
int w = getIntegerParameter(vreq, "w", THUMBNAIL_WIDTH);
return new CropRectangle(x, y, h, w);
}
/**
* We need this parameter on the request, and it must be a valid integer.
*/
private int getIntegerParameter(HttpServletRequest req, String key,
int defaultValue) {
String string = req.getParameter(key);
if ((string == null) || (string.isEmpty())) {
log.debug("No value for '" + key + "'; using default value = "
+ defaultValue);
return defaultValue;
}
try {
return Integer.parseInt(string);
} catch (NumberFormatException e) {
log.warn("Value for '" + key + "' was not a valid integer: '"
+ string + "'; using default value = " + defaultValue);
return defaultValue;
}
}
/**
* The individual has no image - go to the Add Image page.
*
* @param entity
* if this is null, then all URLs lead to the welcome page.
*/
private TemplateResponseValues showAddImagePage(VitroRequest vreq,
Individual entity) {
String formAction = (entity == null) ? "" : formAction(entity.getURI(),
ACTION_UPLOAD);
String cancelUrl = (entity == null) ? "" : exitPageUrl(vreq,
entity.getURI());
TemplateResponseValues rv = new TemplateResponseValues(TEMPLATE_NEW);
rv.put(BODY_THUMBNAIL_URL, UrlBuilder.getUrl(DUMMY_THUMBNAIL_URL));
rv.put(BODY_FORM_ACTION, formAction);
rv.put(BODY_CANCEL_URL, cancelUrl);
rv.put(BODY_TITLE, "Upload image" + forName(entity));
return rv;
}
/**
* The individual has no image, but the user did something wrong.
*/
private TemplateResponseValues showAddImagePageWithError(VitroRequest vreq,
Individual entity, String message) {
return showAddImagePage(vreq, entity).put(BODY_ERROR_MESSAGE, message);
}
/**
* The individual has an image - go to the Replace Image page.
*/
private TemplateResponseValues showReplaceImagePage(VitroRequest vreq,
Individual entity, ImageInfo imageInfo) {
TemplateResponseValues rv = new TemplateResponseValues(TEMPLATE_REPLACE);
rv.put(BODY_THUMBNAIL_URL, UrlBuilder.getUrl(imageInfo.getThumbnail()
.getBytestreamAliasUrl()));
rv.put(BODY_DELETE_URL, formAction(entity.getURI(), ACTION_DELETE_EDIT));
rv.put(BODY_FORM_ACTION, formAction(entity.getURI(), ACTION_UPLOAD));
rv.put(BODY_CANCEL_URL, exitPageUrl(vreq, entity.getURI()));
rv.put(BODY_TITLE, "Replace image" + forName(entity));
return rv;
}
/**
* The individual has an image, but the user did something wrong.
*/
private TemplateResponseValues showReplaceImagePageWithError(
VitroRequest vreq, Individual entity, ImageInfo imageInfo,
String message) {
TemplateResponseValues rv = showReplaceImagePage(vreq, entity,
imageInfo);
rv.put(BODY_ERROR_MESSAGE, message);
return rv;
}
/**
* We got their main image - go to the Crop Image page.
*/
private TemplateResponseValues showCropImagePage(VitroRequest vreq,
Individual entity, String imageUrl, Dimensions dimensions) {
TemplateResponseValues rv = new TemplateResponseValues(TEMPLATE_CROP);
rv.put(BODY_MAIN_IMAGE_URL, UrlBuilder.getUrl(imageUrl));
rv.put(BODY_MAIN_IMAGE_HEIGHT, dimensions.height);
rv.put(BODY_MAIN_IMAGE_WIDTH, dimensions.width);
rv.put(BODY_FORM_ACTION, formAction(entity.getURI(), ACTION_SAVE));
rv.put(BODY_CANCEL_URL, exitPageUrl(vreq, entity.getURI()));
rv.put(BODY_TITLE, "Crop Photo" + forName(entity));
return rv;
}
/**
* All done - go to the individual display page.
*/
private ForwardResponseValues showExitPage(VitroRequest vreq,
Individual entity) {
return new ForwardResponseValues(exitPageUrl(vreq, entity.getURI()));
}
/**
* When we complete the process, by success or by cancellation, go to the
* initial referring page. If there wasn't one, go to the individual display
* page,
*/
private String exitPageUrl(VitroRequest vreq, String entityUri) {
String referrer = (String) vreq.getSession().getAttribute(
ATTRIBUTE_REFERRING_PAGE);
if (referrer != null) {
return referrer;
}
if (DEFAULT_NAMESPACE == null) {
return "";
} else if (!entityUri.startsWith(DEFAULT_NAMESPACE)) {
return "";
} else {
String tail = entityUri.substring(DEFAULT_NAMESPACE.length());
if (!tail.startsWith("/")) {
tail = "/" + tail;
}
return "display" + tail;
}
}
/**
* The "action" parameter on the HTML "form" tag should include the path
* back to this controller, along with the desired action and the Entity
* URI.
*/
private String formAction(String entityUri, String action) {
UrlBuilder.ParamMap params = new UrlBuilder.ParamMap(
PARAMETER_ENTITY_URI, entityUri, PARAMETER_ACTION, action);
return UrlBuilder.getPath(URL_HERE, params);
}
/**
* Format the entity's name for display as part of the page title.
*/
private String forName(Individual entity) {
if (entity != null) {
String name = entity.getName();
if (name != null) {
return " for " + name;
}
}
return "";
}
/**
* Holds an error message to use as a complaint to the user.
*/
static class UserMistakeException extends Exception {
UserMistakeException(String message) {
super(message);
}
}
/**
* Holds the coordinates that we use to crop the main image.
*/
static class CropRectangle {
final int x;
final int y;
final int height;
final int width;
CropRectangle(int x, int y, int height, int width) {
this.x = x;
this.y = y;
this.height = height;
this.width = width;
}
/** Produce a new crop rectangle that compensates for scaling. */
public CropRectangle unscale(float scale) {
int newX = (int) (x / scale);
int newY = (int) (y / scale);
int newHeight = (int) (height / scale);
int newWidth = (int) (width / scale);
return new CropRectangle(newX, newY, newHeight, newWidth);
}
@Override
public String toString() {
return "CropRectangle[x=" + x + ", y=" + y + ", w=" + width
+ ", h=" + height + "]";
}
}
/**
* For debugging, dump all sorts of information about the request.
*
* WARNING: if "req" represents a Multi-part request which has not yet been
* parsed, then reading these parameters will consume them.
*/
@SuppressWarnings("unchecked")
private void dumpRequestDetails(HttpServletRequest req) {
log.trace("Request is " + req.getClass().getName());
Map<String, String[]> parms = req.getParameterMap();
for (Entry<String, String[]> entry : parms.entrySet()) {
log.trace("Parameter '" + entry.getKey() + "'="
+ Arrays.deepToString(entry.getValue()));
}
Enumeration<String> attrs = req.getAttributeNames();
while (attrs.hasMoreElements()) {
String key = attrs.nextElement();
String valueString = String.valueOf(req.getAttribute(key));
String valueOneLine = valueString.replace("\n", " | ");
log.trace("Attribute '" + key + "'=" + valueOneLine);
}
}
static class Dimensions {
final int width;
final int height;
Dimensions(int width, int height) {
this.width = width;
this.height = height;
}
@Override
public String toString() {
return "Dimensions[width=" + width + ", height=" + height + "]";
}
}
/**
* If they are logged in as an Editor or better, they can do whatever they
* want.
*
* Otherwise, they will need to be self-editing, and will need to have
* authorization for this specific operation they are requesting.
*/
private boolean checkAuthorized(VitroRequest vreq)
throws UserMistakeException {
if (LoginStatusBean.getBean(vreq).isLoggedInAtLeast(
LoginStatusBean.EDITOR)) {
log.debug("Authorized because logged in as Editor");
return true;
}
if (!VitroRequestPrep.isSelfEditing(vreq)) {
log.debug("Not Authorized because not self-editing");
return false;
}
String action = vreq.getParameter(PARAMETER_ACTION);
Individual entity = validateEntityUri(vreq);
String imageUri = entity.getMainImageUri();
// What are we trying to do? Check if authorized.
RequestedAction ra;
if (ACTION_DELETE.equals(action) || ACTION_DELETE_EDIT.equals(action)) {
ra = new DropObjectPropStmt(entity.getURI(),
VitroVocabulary.IND_MAIN_IMAGE, imageUri);
} else if (imageUri != null) {
ra = new EditObjPropStmt(entity.getURI(),
VitroVocabulary.IND_MAIN_IMAGE, imageUri);
} else {
ra = new AddDataPropStmt(entity.getURI(),
VitroVocabulary.IND_MAIN_IMAGE,
RequestActionConstants.SOME_LITERAL, null, null);
}
AuthorizationHelper helper = new AuthorizationHelper(vreq);
boolean authorized = helper.isAuthorizedForRequestedAction(ra);
log.debug((authorized ? "" : "Not ") + "Authorized for '" + action
+ "' as self-editor; requested action = " + ra);
return authorized;
}
}
|
webapp/src/edu/cornell/mannlib/vitro/webapp/controller/freemarker/ImageUploadController.java
|
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.freemarker;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.Map;
import java.util.Map.Entry;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.UnavailableException;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import edu.cornell.mannlib.vedit.beans.LoginStatusBean;
import edu.cornell.mannlib.vitro.webapp.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.auth.AuthorizationHelper;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestActionConstants;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestedAction;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.AddDataPropStmt;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.DropObjectPropStmt;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditObjPropStmt;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ForwardResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RedirectResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.filestorage.backend.FileStorage;
import edu.cornell.mannlib.vitro.webapp.filestorage.backend.FileStorageSetup;
import edu.cornell.mannlib.vitro.webapp.filestorage.model.FileInfo;
import edu.cornell.mannlib.vitro.webapp.filestorage.model.ImageInfo;
import edu.cornell.mannlib.vitro.webapp.filestorage.uploadrequest.FileUploadServletRequest;
import edu.cornell.mannlib.vitro.webapp.filters.VitroRequestPrep;
/**
* Handle adding, replacing or deleting the main image on an Individual.
*/
public class ImageUploadController extends FreemarkerHttpServlet {
private static final long serialVersionUID = 1L;
private static final Log log = LogFactory
.getLog(ImageUploadController.class);
private static final String ATTRIBUTE_REFERRING_PAGE = "ImageUploadController.referringPage";
private static final String DEFAULT_NAMESPACE = ConfigurationProperties
.getProperty("Vitro.defaultNamespace");
public static final String DUMMY_THUMBNAIL_URL = "/images/placeholders/person.thumbnail.jpg";
/** Limit file size to 6 megabytes. */
public static final int MAXIMUM_FILE_SIZE = 6 * 1024 * 1024;
/** Generated thumbnails will be this big. */
public static final int THUMBNAIL_HEIGHT = 115;
public static final int THUMBNAIL_WIDTH = 115;
/** The form field that tells what we are doing: uploading? deleting? */
public static final String PARAMETER_ACTION = "action";
/** The form field that identifies the Individual. */
public static final String PARAMETER_ENTITY_URI = "entityUri";
/** The form field of the uploaded file; use as a key to the FileItem map. */
public static final String PARAMETER_UPLOADED_FILE = "datafile";
/** Here is the main image file. Hold on to it. */
public static final String ACTION_UPLOAD = "upload";
/** Here is the cropping info; we're ready to save the image. */
public static final String ACTION_SAVE = "save";
/** A request to delete the file and return to the referring page. */
public static final String ACTION_DELETE = "delete";
/** A request to delete the file and return to the "new image" screen. */
public static final String ACTION_DELETE_EDIT = "deleteEdit";
public static final String BODY_TITLE = "title";
public static final String BODY_ENTITY_NAME = "entityName";
public static final String BODY_MAIN_IMAGE_URL = "imageUrl";
public static final String BODY_MAIN_IMAGE_HEIGHT = "imageHeight";
public static final String BODY_MAIN_IMAGE_WIDTH = "imageWidth";
public static final String BODY_THUMBNAIL_URL = "thumbnailUrl";
public static final String BODY_CANCEL_URL = "cancelUrl";
public static final String BODY_DELETE_URL = "deleteUrl";
public static final String BODY_FORM_ACTION = "formAction";
public static final String BODY_ERROR_MESSAGE = "errorMessage";
public static final String TEMPLATE_NEW = "imageUpload-newImage.ftl";
public static final String TEMPLATE_REPLACE = "imageUpload-replaceImage.ftl";
public static final String TEMPLATE_CROP = "imageUpload-cropImage.ftl";
public static final String TEMPLATE_ERROR = "error-standard.ftl";
private static final String URL_HERE = UrlBuilder.getUrl("/uploadImages");
private FileStorage fileStorage;
/**
* When initialized, get a reference to the File Storage system. Without
* that, we can do nothing.
*/
@Override
public void init() throws ServletException {
super.init();
Object o = getServletContext().getAttribute(
FileStorageSetup.ATTRIBUTE_NAME);
if (o instanceof FileStorage) {
fileStorage = (FileStorage) o;
} else if (o == null) {
throw new UnavailableException(this.getClass().getSimpleName()
+ " could not initialize. Attribute '"
+ FileStorageSetup.ATTRIBUTE_NAME
+ "' was not set in the servlet context.");
} else {
throw new UnavailableException(this.getClass().getSimpleName()
+ " could not initialize. Attribute '"
+ FileStorageSetup.ATTRIBUTE_NAME
+ "' in the servlet context contained an instance of '"
+ o.getClass().getName() + "' instead of '"
+ FileStorage.class.getName() + "'");
}
}
/**
* <p>
* Parse the multi-part request, process the request, and produce the
* output.
* </p>
* <p>
* If the request was a multi-part file upload, it will parse to a
* normal-looking request with a "file_item_map" attribute.
* </p>
* <p>
* The processing will produce a {@link ResponseValues} object, which
* represents either a request for a FreeMarker template or a forwarding
* operation.
* <ul>
* <li>If a FreeMarker template, we emulate the actions that
* FreeMarkerHttpServlet would have taken to produce the output.</li>
* <li>If a forwarding operation, we create a {@link RequestDispatcher} to
* do the forwarding.</li>
* </ul>
* </p>
*/
@Override
protected ResponseValues processRequest(VitroRequest vreq) {
try {
// Parse the multi-part request.
FileUploadServletRequest request = FileUploadServletRequest
.parseRequest(vreq, MAXIMUM_FILE_SIZE);
if (log.isTraceEnabled()) {
dumpRequestDetails(vreq);
}
// If they aren't authorized to do this, send them to login.
if (!checkAuthorized(vreq)) {
String loginPage = request.getContextPath() + Controllers.LOGIN;
return new RedirectResponseValues(loginPage);
}
return buildTheResponse(vreq);
} catch (Exception e) {
// log.error("Could not produce response page", e);
return new ExceptionResponseValues(e);
}
}
/**
* Handle the different actions. If not specified, the default action is to
* show the intro screen.
*/
private ResponseValues buildTheResponse(VitroRequest vreq) {
String action = vreq.getParameter(PARAMETER_ACTION);
try {
Individual entity = validateEntityUri(vreq);
if (ACTION_UPLOAD.equals(action)) {
return doUploadImage(vreq, entity);
} else if (ACTION_SAVE.equals(action)) {
return doCreateThumbnail(vreq, entity);
} else if (ACTION_DELETE.equals(action)) {
captureReferringUrl(vreq);
return doDeleteImage(vreq, entity);
} else if (ACTION_DELETE_EDIT.equals(action)) {
return doDeleteThenEdit(vreq, entity);
} else {
captureReferringUrl(vreq);
return doIntroScreen(vreq, entity);
}
} catch (UserMistakeException e) {
// Can't find the entity? Complain.
return showAddImagePageWithError(vreq, null, e.getMessage());
} catch (Exception e) {
// We weren't expecting this - log it, and apologize to the user.
return new ExceptionResponseValues(e);
}
}
/**
* We are just starting the upload process. Record where we came from, so if
* they hit "cancel" we know where to send them. If we have problems, just
* clear it.
*/
private void captureReferringUrl(VitroRequest vreq) {
String referrer = vreq.getHeader("Referer");
if (referrer == null) {
vreq.getSession().removeAttribute(ATTRIBUTE_REFERRING_PAGE);
} else {
vreq.getSession().setAttribute(ATTRIBUTE_REFERRING_PAGE, referrer);
}
}
/**
* Show the first screen in the upload process: Add or Replace.
*/
private ResponseValues doIntroScreen(VitroRequest vreq, Individual entity) {
ImageInfo imageInfo = ImageInfo.instanceFromEntityUri(
vreq.getFullWebappDaoFactory(), entity);
if (imageInfo == null) {
return showAddImagePage(vreq, entity);
} else {
return showReplaceImagePage(vreq, entity, imageInfo);
}
}
/**
* The user has selected their main image file. Remove any previous main
* image (and thumbnail), and attach the new main image.
*/
private ResponseValues doUploadImage(VitroRequest vreq, Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
try {
// Did they provide a file to upload? If not, show an error.
FileItem fileItem = helper.validateImageFromRequest(vreq);
// Put it in the file system, and store a reference in the session.
FileInfo fileInfo = helper.storeNewImage(fileItem, vreq);
// How big is the new image? If not big enough, show an error.
Dimensions size = helper.getNewImageSize(fileInfo);
// Go to the cropping page.
return showCropImagePage(vreq, entity,
fileInfo.getBytestreamAliasUrl(), size);
} catch (UserMistakeException e) {
return showErrorMessage(vreq, entity, e.getMessage());
}
}
/**
* Are we writing the error message to the "Add" page or to the "Replace"
* page?
*/
private ResponseValues showErrorMessage(VitroRequest vreq,
Individual entity, String message) {
ImageInfo imageInfo = ImageInfo.instanceFromEntityUri(
vreq.getFullWebappDaoFactory(), entity);
if (imageInfo == null) {
return showAddImagePageWithError(vreq, entity, message);
} else {
return showReplaceImagePageWithError(vreq, entity, imageInfo,
message);
}
}
/**
* The user has specified how to crop the thumbnail. Crop it and attach it
* to the main image.
*/
private ResponseValues doCreateThumbnail(VitroRequest vreq,
Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
try {
CropRectangle crop = validateCropCoordinates(vreq);
FileInfo newImage = helper.getNewImageInfo(vreq);
FileInfo thumbnail = helper.generateThumbnail(crop, newImage);
helper.removeExistingImage(entity);
helper.storeImageFiles(entity, newImage, thumbnail);
return showExitPage(vreq, entity);
} catch (UserMistakeException e) {
return showErrorMessage(vreq, entity, e.getMessage());
}
}
/**
* Delete the main image and the thumbnail, and go back to the referring
* page.
*/
private ResponseValues doDeleteImage(VitroRequest vreq, Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
helper.removeExistingImage(entity);
return showExitPage(vreq, entity);
}
/**
* Delete the main image and the thumbnail, and go to the "add image"
* screen.
*/
private ResponseValues doDeleteThenEdit(VitroRequest vreq, Individual entity) {
ImageUploadHelper helper = new ImageUploadHelper(fileStorage,
vreq.getFullWebappDaoFactory());
helper.removeExistingImage(entity);
return showAddImagePage(vreq, entity);
}
/**
* We need to be talking about an actual Individual here.
*/
private Individual validateEntityUri(VitroRequest vreq)
throws UserMistakeException {
String entityUri = vreq.getParameter(PARAMETER_ENTITY_URI);
if (entityUri == null) {
throw new UserMistakeException("No entity URI was provided");
}
Individual entity = vreq.getFullWebappDaoFactory().getIndividualDao()
.getIndividualByURI(entityUri);
if (entity == null) {
throw new UserMistakeException(
"This URI is not recognized as belonging to anyone: '"
+ entityUri + "'");
}
return entity;
}
/**
* Did we get the cropping coordinates?
*/
private CropRectangle validateCropCoordinates(VitroRequest vreq) {
int x = getIntegerParameter(vreq, "x", 0);
int y = getIntegerParameter(vreq, "y", 0);
int h = getIntegerParameter(vreq, "h", THUMBNAIL_HEIGHT);
int w = getIntegerParameter(vreq, "w", THUMBNAIL_WIDTH);
return new CropRectangle(x, y, h, w);
}
/**
* We need this parameter on the request, and it must be a valid integer.
*/
private int getIntegerParameter(HttpServletRequest req, String key,
int defaultValue) {
String string = req.getParameter(key);
if ((string == null) || (string.isEmpty())) {
log.debug("No value for '" + key + "'; using default value = "
+ defaultValue);
return defaultValue;
}
try {
return Integer.parseInt(string);
} catch (NumberFormatException e) {
log.warn("Value for '" + key + "' was not a valid integer: '"
+ string + "'; using default value = " + defaultValue);
return defaultValue;
}
}
/**
* The individual has no image - go to the Add Image page.
*
* @param entity
* if this is null, then all URLs lead to the welcome page.
*/
private TemplateResponseValues showAddImagePage(VitroRequest vreq,
Individual entity) {
String formAction = (entity == null) ? "" : formAction(entity.getURI(),
ACTION_UPLOAD);
String cancelUrl = (entity == null) ? "" : exitPageUrl(vreq,
entity.getURI());
TemplateResponseValues rv = new TemplateResponseValues(TEMPLATE_NEW);
rv.put(BODY_THUMBNAIL_URL, UrlBuilder.getUrl(DUMMY_THUMBNAIL_URL));
rv.put(BODY_FORM_ACTION, formAction);
rv.put(BODY_CANCEL_URL, cancelUrl);
rv.put(BODY_TITLE, "Upload image" + forName(entity));
return rv;
}
/**
* The individual has no image, but the user did something wrong.
*/
private TemplateResponseValues showAddImagePageWithError(VitroRequest vreq,
Individual entity, String message) {
return showAddImagePage(vreq, entity).put(BODY_ERROR_MESSAGE, message);
}
/**
* The individual has an image - go to the Replace Image page.
*/
private TemplateResponseValues showReplaceImagePage(VitroRequest vreq,
Individual entity, ImageInfo imageInfo) {
TemplateResponseValues rv = new TemplateResponseValues(TEMPLATE_REPLACE);
rv.put(BODY_THUMBNAIL_URL, UrlBuilder.getUrl(imageInfo.getThumbnail()
.getBytestreamAliasUrl()));
rv.put(BODY_DELETE_URL, formAction(entity.getURI(), ACTION_DELETE_EDIT));
rv.put(BODY_FORM_ACTION, formAction(entity.getURI(), ACTION_UPLOAD));
rv.put(BODY_CANCEL_URL, exitPageUrl(vreq, entity.getURI()));
rv.put(BODY_TITLE, "Replace image" + forName(entity));
return rv;
}
/**
* The individual has an image, but the user did something wrong.
*/
private TemplateResponseValues showReplaceImagePageWithError(
VitroRequest vreq, Individual entity, ImageInfo imageInfo,
String message) {
TemplateResponseValues rv = showReplaceImagePage(vreq, entity,
imageInfo);
rv.put(BODY_ERROR_MESSAGE, message);
return rv;
}
/**
* We got their main image - go to the Crop Image page.
*/
private TemplateResponseValues showCropImagePage(VitroRequest vreq,
Individual entity, String imageUrl, Dimensions dimensions) {
TemplateResponseValues rv = new TemplateResponseValues(TEMPLATE_CROP);
rv.put(BODY_MAIN_IMAGE_URL, UrlBuilder.getUrl(imageUrl));
rv.put(BODY_MAIN_IMAGE_HEIGHT, dimensions.height);
rv.put(BODY_MAIN_IMAGE_WIDTH, dimensions.width);
rv.put(BODY_FORM_ACTION, formAction(entity.getURI(), ACTION_SAVE));
rv.put(BODY_CANCEL_URL, exitPageUrl(vreq, entity.getURI()));
rv.put(BODY_TITLE, "Crop Photo" + forName(entity));
return rv;
}
/**
* All done - go to the individual display page.
*/
private ForwardResponseValues showExitPage(VitroRequest vreq,
Individual entity) {
return new ForwardResponseValues(exitPageUrl(vreq, entity.getURI()));
}
/**
* When we complete the process, by success or by cancellation, go to the
* initial referring page. If there wasn't one, go to the individual display
* page,
*/
private String exitPageUrl(VitroRequest vreq, String entityUri) {
String referrer = (String) vreq.getSession().getAttribute(
ATTRIBUTE_REFERRING_PAGE);
if (referrer != null) {
return referrer;
}
if (DEFAULT_NAMESPACE == null) {
return "";
} else if (!entityUri.startsWith(DEFAULT_NAMESPACE)) {
return "";
} else {
String tail = entityUri.substring(DEFAULT_NAMESPACE.length());
if (!tail.startsWith("/")) {
tail = "/" + tail;
}
return "display" + tail;
}
}
/**
* The "action" parameter on the HTML "form" tag should include the path
* back to this controller, along with the desired action and the Entity
* URI.
*/
private String formAction(String entityUri, String action) {
UrlBuilder.ParamMap params = new UrlBuilder.ParamMap(
PARAMETER_ENTITY_URI, entityUri, PARAMETER_ACTION, action);
return UrlBuilder.getPath(URL_HERE, params);
}
/**
* Format the entity's name for display as part of the page title.
*/
private String forName(Individual entity) {
if (entity != null) {
String name = entity.getName();
if (name != null) {
return " for " + name;
}
}
return "";
}
/**
* Holds an error message to use as a complaint to the user.
*/
static class UserMistakeException extends Exception {
UserMistakeException(String message) {
super(message);
}
}
/**
* Holds the coordinates that we use to crop the main image.
*/
static class CropRectangle {
final int x;
final int y;
final int height;
final int width;
CropRectangle(int x, int y, int height, int width) {
this.x = x;
this.y = y;
this.height = height;
this.width = width;
}
/** Produce a new crop rectangle that compensates for scaling. */
public CropRectangle unscale(float scale) {
int newX = (int) (x / scale);
int newY = (int) (y / scale);
int newHeight = (int) (height / scale);
int newWidth = (int) (width / scale);
return new CropRectangle(newX, newY, newHeight, newWidth);
}
@Override
public String toString() {
return "CropRectangle[x=" + x + ", y=" + y + ", w=" + width
+ ", h=" + height + "]";
}
}
/**
* For debugging, dump all sorts of information about the request.
*
* WARNING: if "req" represents a Multi-part request which has not yet been
* parsed, then reading these parameters will consume them.
*/
@SuppressWarnings("unchecked")
private void dumpRequestDetails(HttpServletRequest req) {
log.trace("Request is " + req.getClass().getName());
Map<String, String[]> parms = req.getParameterMap();
for (Entry<String, String[]> entry : parms.entrySet()) {
log.trace("Parameter '" + entry.getKey() + "'="
+ Arrays.deepToString(entry.getValue()));
}
Enumeration<String> attrs = req.getAttributeNames();
while (attrs.hasMoreElements()) {
String key = attrs.nextElement();
String valueString = String.valueOf(req.getAttribute(key));
String valueOneLine = valueString.replace("\n", " | ");
log.trace("Attribute '" + key + "'=" + valueOneLine);
}
}
static class Dimensions {
final int width;
final int height;
Dimensions(int width, int height) {
this.width = width;
this.height = height;
}
@Override
public String toString() {
return "Dimensions[width=" + width + ", height=" + height + "]";
}
}
/**
* If they are logged in as an Editor or better, they can do whatever they
* want.
*
* Otherwise, they will need to be self-editing, and will need to have
* authorization for this specific operation they are requesting.
*/
private boolean checkAuthorized(VitroRequest vreq)
throws UserMistakeException {
if (LoginStatusBean.getBean(vreq).isLoggedInAtLeast(
LoginStatusBean.EDITOR)) {
log.debug("Authorized because logged in as Editor");
return true;
}
if (!VitroRequestPrep.isSelfEditing(vreq)) {
log.debug("Not Authorized because not self-editing");
return false;
}
String action = vreq.getParameter(PARAMETER_ACTION);
Individual entity = validateEntityUri(vreq);
String imageUri = entity.getMainImageUri();
// What are we trying to do? Check if authorized.
RequestedAction ra;
if (ACTION_DELETE.equals(action) || ACTION_DELETE_EDIT.equals(action)) {
ra = new DropObjectPropStmt(entity.getURI(),
VitroVocabulary.IND_MAIN_IMAGE, imageUri);
} else if (imageUri != null) {
ra = new EditObjPropStmt(entity.getURI(),
VitroVocabulary.IND_MAIN_IMAGE, imageUri);
} else {
ra = new AddDataPropStmt(entity.getURI(),
VitroVocabulary.IND_MAIN_IMAGE,
RequestActionConstants.SOME_LITERAL, null, null);
}
AuthorizationHelper helper = new AuthorizationHelper(vreq);
boolean authorized = helper.isAuthorizedForRequestedAction(ra);
log.debug((authorized ? "" : "Not ") + "Authorized for '" + action
+ "' as self-editor; requested action = " + ra);
return authorized;
}
}
|
NIHVIVO-1692 Image thumbnails are now generated and saved at 200px x 200px
|
webapp/src/edu/cornell/mannlib/vitro/webapp/controller/freemarker/ImageUploadController.java
|
NIHVIVO-1692 Image thumbnails are now generated and saved at 200px x 200px
|
|
Java
|
mit
|
2285c453dad9c4148fda473720e0e03df932fe52
| 0
|
MylesIsCool/ViaVersion
|
package us.myles.ViaVersion.protocols.protocol1_13to1_12_2.storage;
import us.myles.ViaVersion.api.Via;
import us.myles.ViaVersion.api.data.StoredObject;
import us.myles.ViaVersion.api.data.UserConnection;
import us.myles.ViaVersion.api.minecraft.Position;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
public class BlockConnectionStorage extends StoredObject {
private Map<Long, short[]> blockStorage = createLongObjectMap();
private static short[] short4096 = new short[4096];
private static Constructor<?> fastUtilLongObjectHashMap;
static {
try {
fastUtilLongObjectHashMap = Class.forName("it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap").getConstructor();
Via.getPlatform().getLogger().info("Using FastUtil Long2ObjectOpenHashMap for block connections");
} catch (ClassNotFoundException | NoSuchMethodException ignored) {
}
}
public BlockConnectionStorage(UserConnection user) {
super(user);
}
public void store(Position position, int blockState) {
long pair = getChunkSectionIndex(position);
short[] map = getChunkSection(pair);
map[encodeBlockPos(position)] = (short) blockState;
}
public int get(Position position) {
long pair = getChunkSectionIndex(position);
short[] map = blockStorage.get(pair);
if (map == null) return 0;
short blockPosition = encodeBlockPos(position);
return map[blockPosition];
}
public void remove(Position position) {
long pair = getChunkSectionIndex(position);
short[] map = getChunkSection(pair);
map[encodeBlockPos(position)] = 0;
if (Arrays.equals(short4096, map)) {
blockStorage.remove(pair);
}
}
public void clear() {
blockStorage.clear();
}
public void unloadChunk(int x, int z) {
for (int y = 0; y < 256; y += 16) {
blockStorage.remove(getChunkSectionIndex(x, y, z));
}
}
private short[] getChunkSection(long index) {
short[] map = blockStorage.get(index);
if (map == null) {
map = new short[4096];
blockStorage.put(index, map);
}
return map;
}
private long getChunkSectionIndex(int x, int y, int z) {
return (((x >> 4) & 0x3FFFFFFL) << 38) | (((y >> 4) & 0xFFFL) << 26) | ((z >> 4) & 0x3FFFFFFL);
}
private long getChunkSectionIndex(Position position) {
return getChunkSectionIndex(position.getX().intValue(), position.getY().intValue(), position.getZ().intValue());
}
private short encodeBlockPos(int x, int y, int z) {
return (short) (((y & 0xF) << 8) | ((x & 0xF) << 4) | (z & 0xF));
}
private short encodeBlockPos(Position pos) {
return encodeBlockPos(pos.getX().intValue(), pos.getY().intValue(), pos.getZ().intValue());
}
private <T> Map<Long, T> createLongObjectMap() {
if (fastUtilLongObjectHashMap != null) {
try {
return (Map<Long, T>) fastUtilLongObjectHashMap.newInstance();
} catch (IllegalAccessException | InstantiationException | InvocationTargetException e) {
e.printStackTrace();
}
}
return new HashMap<>();
}
}
|
common/src/main/java/us/myles/ViaVersion/protocols/protocol1_13to1_12_2/storage/BlockConnectionStorage.java
|
package us.myles.ViaVersion.protocols.protocol1_13to1_12_2.storage;
import us.myles.ViaVersion.api.Via;
import us.myles.ViaVersion.api.data.StoredObject;
import us.myles.ViaVersion.api.data.UserConnection;
import us.myles.ViaVersion.api.minecraft.Position;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.Map;
public class BlockConnectionStorage extends StoredObject {
private Map<Long, Map<Short, Short>> blockStorage = createLongObjectMap();
private static Constructor<?> fastUtilLongObjectHashMap;
private static Constructor<?> fastUtilShortShortHashMap;
static {
try {
fastUtilLongObjectHashMap = Class.forName("it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap").getConstructor();
Via.getPlatform().getLogger().info("Using FastUtil Long2ObjectOpenHashMap for block connections");
} catch (ClassNotFoundException | NoSuchMethodException ignored) {
}
try {
fastUtilShortShortHashMap = Class.forName("it.unimi.dsi.fastutil.shorts.Short2ShortOpenHashMap").getConstructor();
Via.getPlatform().getLogger().info("Using FastUtil Short2ShortOpenHashMap for block connections");
} catch (ClassNotFoundException | NoSuchMethodException ignored) {
}
}
public BlockConnectionStorage(UserConnection user) {
super(user);
}
public void store(Position position, int blockState) {
long pair = getChunkIndex(position);
Map<Short, Short> map = getChunkMap(pair);
map.put(encodeBlockPos(position), (short) blockState);
}
public int get(Position position) {
long pair = getChunkIndex(position);
Map<Short, Short> map = getChunkMap(pair);
short blockPosition = encodeBlockPos(position);
return map.containsKey(blockPosition) ? map.get(blockPosition) : 0;
}
public void remove(Position position) {
long pair = getChunkIndex(position);
Map<Short, Short> map = getChunkMap(pair);
map.remove(encodeBlockPos(position));
if (map.isEmpty()) {
blockStorage.remove(pair);
}
}
public void clear() {
blockStorage.clear();
}
public void unloadChunk(int x, int z) {
blockStorage.remove(getChunkIndex(x, z));
}
private Map<Short, Short> getChunkMap(long index) {
Map<Short, Short> map = blockStorage.get(index);
if (map == null) {
map = createShortShortMap();
blockStorage.put(index, map);
}
return map;
}
private long getChunkIndex(int x, int z) {
return (long) x << 32 | (z & 0xFFFFFFFFL);
}
private long getChunkIndex(Position position) {
return getChunkIndex(position.getX().intValue(), position.getZ().intValue());
}
private short encodeBlockPos(int x, int y, int z) {
return (short) (y << 8 | x & 0xF << 4 | z & 0xF);
}
private short encodeBlockPos(Position pos) {
return encodeBlockPos(pos.getX().intValue(), pos.getY().intValue(), pos.getZ().intValue());
}
private <T> Map<Long, T> createLongObjectMap() {
if (fastUtilLongObjectHashMap != null) {
try {
return (Map<Long, T>) fastUtilLongObjectHashMap.newInstance();
} catch (IllegalAccessException | InstantiationException | InvocationTargetException e) {
e.printStackTrace();
}
}
return new HashMap<>();
}
private Map<Short, Short> createShortShortMap() {
if (fastUtilShortShortHashMap != null) {
try {
return (Map<Short, Short>) fastUtilShortShortHashMap.newInstance();
} catch (IllegalAccessException | InstantiationException | InvocationTargetException e) {
e.printStackTrace();
}
}
return new HashMap<>();
}
}
|
fixed encodeblockpos, use map<long, short[]>
|
common/src/main/java/us/myles/ViaVersion/protocols/protocol1_13to1_12_2/storage/BlockConnectionStorage.java
|
fixed encodeblockpos, use map<long, short[]>
|
|
Java
|
mit
|
e8bedd8bc5bbc6e3cdf53d950353476ebe47850b
| 0
|
takenspc/validator,validator/validator,takenspc/validator,takenspc/validator,validator/validator,validator/validator,takenspc/validator,takenspc/validator,validator/validator,validator/validator
|
/*
* Copyright (c) 2012-2018 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.servlet;
import java.io.IOException;
import java.text.DecimalFormat;
import javax.servlet.http.HttpServletResponse;
import nu.validator.htmlparser.sax.HtmlSerializer;
import nu.validator.xml.EmptyAttributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
public class Statistics {
public static final Statistics STATISTICS;
private static final char[] VALIDATOR_STATISTICS = "Validator statistics".toCharArray();
private static final char[] COUNTER_NAME = "Counter".toCharArray();
private static final char[] COUNTER_VALUE = "Value".toCharArray();
private static final char[] COUNTER_PROPORTION = "Proportion".toCharArray();
private static final char[] TOTAL_VALIDATIONS = "Total number of validations".toCharArray();
private static final char[] UPTIME_DAYS = "Uptime in days".toCharArray();
private static final char[] VALIDATIONS_PER_SECOND = "Validations per second".toCharArray();
private static final char[] SORT_LANGS_SCRIPT = (""
+ " var rows = document.querySelectorAll('tr');"
+ " var langRows = new Array();"
+ " for (var i=0; i < rows.length; i++) { var row = rows[i];"
+ " if (row.textContent.indexOf('Detected language') > -1) {"
+ " var sortnr = parseInt(row.cells[1].textContent"
+ " || row.cells[0].innerText);"
+ " if (sortnr == 0) {"
+ " row.remove();"
+ " } else if (!isNaN(sortnr)) {"
+ " langRows.push([sortnr, row]);"
+ " }"
+ " }"
+ " } langRows.sort(function(x,y) { return x[0] - y[0]; });"
+ " langRows.reverse();"
+ " for (var i=0; i<langRows.length; i++) {"
+ " document.querySelector('tbody').appendChild(langRows[i][1]);"
+ " }"
+ " var langValRows = new Array();"
+ " for (var i=0; i < rows.length; i++) { var row = rows[i];"
+ " if (row.textContent.indexOf('<html lang>: ') > -1) {"
+ " var sortnr = parseInt(row.cells[1].textContent"
+ " || row.cells[0].innerText);"
+ " if (sortnr == 0) {"
+ " row.remove();"
+ " } else if (!isNaN(sortnr)) {"
+ " langValRows.push([sortnr, row]);"
+ " }"
+ " }"
+ " } langValRows.sort(function(x,y) { return x[0] - y[0]; });"
+ " langValRows.reverse();"
+ " for (var i=0; i<langValRows.length; i++) {"
+ " document.querySelector('tbody').appendChild(langValRows[i][1]);"
+ " }").toCharArray();
private static final char[] STYLESHEET = (""
+ " body { font-family: sans-serif; }"
+ " td { padding: 4px 8px 4px 8px; }"
+ " tr:nth-child(even) { background-color: #cde; }").toCharArray();
public enum Field {
// @formatter:off
INPUT_GET("Input: GET-based"), //
INPUT_POST("Input: POST-based"), //
INPUT_ENTITY_BODY("\u2514 Entity-body input"), //
INPUT_TEXT_FIELD("\u2514 Text-field input"), //
INPUT_FILE_UPLOAD("\u2514 File-upload input"), //
INPUT_HTML("Input: text/html"), //
INPUT_CSS("Input: text/css"), //
INPUT_UNSUPPORTED("Input: unsupported content type (error)"), //
INPUT_XML("Input: application/xhtml+xml or other XML content type"), //
OUTPUT_XML("Output: XML"), //
OUTPUT_HTML("Output: HTML"), //
OUTPUT_JSON("Output: JSON"), //
OUTPUT_GNU("Output: GNU"), //
OUTPUT_TEXT("Output: Text"), //
OUTPUT_XHTML("Output: XHTML"), //
SHOW_SOURCE("Show: source"), //
SHOW_OUTLINE("Show: outline"), //
IMAGE_REPORT("Show: image report"), //
HTML5_SCHEMA("Schema: HTML5 schema"), //
HTML5_RDFA_LITE_SCHEMA("Schema: HTML5+RDFa Lite schema"), //
XHTML1_DOCTYPE("Doctype: XHTML1"), //
HTML4_DOCTYPE("Doctype: HTML4"), //
ABOUT_LEGACY_COMPAT("Doctype: about:legacy-compat"), //
AUTO_SCHEMA("Schema: automatically chosen"), //
PRESET_SCHEMA("Schema: preset"), //
HTML4_STRICT_SCHEMA("Schema: legacy Strict"), //
HTML4_TRANSITIONAL_SCHEMA("Schema: legacy Transitional"), //
HTML4_FRAMESET_SCHEMA("Schema: legacy Frameset"), //
XHTML1_COMPOUND_SCHEMA("Schema: legacy XHTML+SVG+MathML"), //
SVG_SCHEMA("Schema: SVG"), //
BUILT_IN_NON_PRESET("Schema: custom combined from built-ins"), //
EXTERNAL_SCHEMA_NON_SCHEMATRON("Schema: non-schematron custom"), //
EXTERNAL_SCHEMA_SCHEMATRON("Schema: schematron custom"), //
LAX_TYPE("Content type: being lax"), //
CUSTOM_ENC("Encoding: manually set"), //
PARSER_XML_EXTERNAL("Parser: set to XML with external entities"), //
XMLNS_FILTER("Parser: XMLNS filter set"), //
LOGIC_ERROR("Logic errors in schema stats"), //
H1_MULTIPLE_FOUND("<h1> multiple found"), //
H1_MULTIPLE_WITH_SECTION_ANCESTOR_FOUND("<h1> multiple with <section> ancestor found"), //
H1_MULTIPLE_WITH_ARTICLE_ANCESTOR_FOUND("<h1> multiple with <article> ancestor found"), //
H1_MULTIPLE_WITH_ASIDE_ANCESTOR_FOUND("<h1> multiple with <aside> ancestor found"), //
H1_MULTIPLE_WITH_NAV_ANCESTOR_FOUND("<h1> multiple with <nav> ancestor found"), //
HGROUP_FOUND("<hgroup> found"), //
STYLE_ATTRIBUTE_FOUND("style attribute found"), //
STYLE_IN_BODY_FOUND("<style> in <body> found"), //
MAIN_FOUND("<main> found"), //
MAIN_MULTIPLE_VISIBLE_FOUND("<main> multiple visible found"), //
LANG_FOUND("<html lang> found"), //
LANG_WRONG("<html lang> does not match detected language"), //
LANG_EMPTY("<html lang>: empty (lang=\"\")"), //
LANG_OTHER("<html lang>: other (unrecognized/invalid)"), //
LANG_AF("<html lang>: af"), //
LANG_AM("<html lang>: am"), //
LANG_AR("<html lang>: ar"), //
LANG_AR_AA("<html lang>: ar-aa"), //
LANG_AR_AE("<html lang>: ar-ae"), //
LANG_AR_AR("<html lang>: ar-ar"), //
LANG_AR_DZ("<html lang>: ar-dz"), //
LANG_AR_EG("<html lang>: ar-eg"), //
LANG_AR_JO("<html lang>: ar-jo"), //
LANG_AR_KW("<html lang>: ar-kw"), //
LANG_AR_OM("<html lang>: ar-om"), //
LANG_AR_QA("<html lang>: ar-qa"), //
LANG_AR_SA("<html lang>: ar-sa"), //
LANG_AR_SY("<html lang>: ar-sy"), //
LANG_AT("<html lang>: at"), //
LANG_AZ("<html lang>: az"), //
LANG_AZ_AZ("<html lang>: az-az"), //
LANG_BA("<html lang>: ba"), //
LANG_BE("<html lang>: be"), //
LANG_BG("<html lang>: bg"), //
LANG_BG_BG("<html lang>: bg-bg"), //
LANG_BN("<html lang>: bn"), //
LANG_BN_BD("<html lang>: bn-bd"), //
LANG_BR("<html lang>: br"), //
LANG_BS("<html lang>: bs"), //
LANG_BS_BA("<html lang>: bs-ba"), //
LANG_BS_LATN("<html lang>: bs-latn"), //
LANG_CA("<html lang>: ca"), //
LANG_CA_ES("<html lang>: ca-es"), //
LANG_CE("<html lang>: ce"), //
LANG_CH("<html lang>: ch"), //
LANG_CHR("<html lang>: chr"), //
LANG_CKB("<html lang>: ckb"), //
LANG_CN("<html lang>: cn"), //
LANG_CR("<html lang>: cr"), //
LANG_CS("<html lang>: cs"), //
LANG_CS_CZ("<html lang>: cs-cz"), //
LANG_CY("<html lang>: cy"), //
LANG_CZ("<html lang>: cz"), //
LANG_DA("<html lang>: da"), //
LANG_DA_DK("<html lang>: da-dk"), //
LANG_DE("<html lang>: de"), //
LANG_DE_AT("<html lang>: de-at"), //
LANG_DE_CH("<html lang>: de-ch"), //
LANG_DE_DE("<html lang>: de-de"), //
LANG_DE__DE("<html lang>: de_de"), //
LANG_DK("<html lang>: dk"), //
LANG_EL("<html lang>: el"), //
LANG_EL_GR("<html lang>: el-gr"), //
LANG_EN("<html lang>: en"), //
LANG_ENG("<html lang>: eng"), //
LANG_EN_AE("<html lang>: en-ae"), //
LANG_EN_AU("<html lang>: en-au"), //
LANG_EN__AU("<html lang>: en_au"), //
LANG_EN_CA("<html lang>: en-ca"), //
LANG_EN__CA("<html lang>: en_ca"), //
LANG_EN_DE("<html lang>: en-de"), //
LANG_EN_EG("<html lang>: en-eg"), //
LANG_EN_EN("<html lang>: en-en"), //
LANG_EN__EN("<html lang>: en_en"), //
LANG_EN_EU("<html lang>: en-eu"), //
LANG_EN_GB("<html lang>: en-gb"), //
LANG_EN__GB("<html lang>: en_gb"), //
LANG_EN_HK("<html lang>: en-hk"), //
LANG_EN_ID("<html lang>: en-id"), //
LANG_EN_IE("<html lang>: en-ie"), //
LANG_EN_IN("<html lang>: en-in"), //
LANG_EN_MY("<html lang>: en-my"), //
LANG_EN_NG("<html lang>: en-ng"), //
LANG_EN_NZ("<html lang>: en-nz"), //
LANG_EN_PH("<html lang>: en-ph"), //
LANG_EN_PK("<html lang>: en-pk"), //
LANG_EN_SG("<html lang>: en-sg"), //
LANG_EN_UK("<html lang>: en-uk"), //
LANG_EN_US("<html lang>: en-us"), //
LANG_EN__US("<html lang>: en_us"), //
LANG_EN_VN("<html lang>: en-vn"), //
LANG_EN_ZA("<html lang>: en-za"), //
LANG_ES("<html lang>: es"), //
LANG_ES_AR("<html lang>: es-ar"), //
LANG_ES__AR("<html lang>: es_ar"), //
LANG_ES_CL("<html lang>: es-cl"), //
LANG_ES_CO("<html lang>: es-co"), //
LANG_ES_DO("<html lang>: es-do"), //
LANG_ES_EC("<html lang>: es-ec"), //
LANG_ES_ES("<html lang>: es-es"), //
LANG_ES__ES("<html lang>: es_es"), //
LANG_ES_LA("<html lang>: es-la"), //
LANG_ES_MX("<html lang>: es-mx"), //
LANG_ES_PE("<html lang>: es-pe"), //
LANG_ES_PR("<html lang>: es-pr"), //
LANG_ES_US("<html lang>: es-us"), //
LANG_ES_VE("<html lang>: es-ve"), //
LANG_ET("<html lang>: et"), //
LANG_ET_EE("<html lang>: et-ee"), //
LANG_EU("<html lang>: eu"), //
LANG_FA("<html lang>: fa"), //
LANG_FA_IR("<html lang>: fa-ir"), //
LANG_FA__IR("<html lang>: fa_ir"), //
LANG_FI("<html lang>: fi"), //
LANG_FI_FI("<html lang>: fi-fi"), //
LANG_FO("<html lang>: fo"), //
LANG_FR("<html lang>: fr"), //
LANG_FR_BE("<html lang>: fr-be"), //
LANG_FR_CA("<html lang>: fr-ca"), //
LANG_FR_CH("<html lang>: fr-ch"), //
LANG_FR_FR("<html lang>: fr-fr"), //
LANG_FR__FR("<html lang>: fr_fr"), //
LANG_FR_MA("<html lang>: fr-ma"), //
LANG_FR_US("<html lang>: fr-us"), //
LANG_GA("<html lang>: ga"), //
LANG_GB("<html lang>: gb"), //
LANG_GE("<html lang>: ge"), //
LANG_GL("<html lang>: gl"), //
LANG_GL_ES("<html lang>: gl-es"), //
LANG_GR("<html lang>: gr"), //
LANG_GU("<html lang>: gu"), //
LANG_HE("<html lang>: he"), //
LANG_HE_IL("<html lang>: he-il"), //
LANG_HI("<html lang>: hi"), //
LANG_HR_HR("<html lang>: hr-hr"), //
LANG_HU("<html lang>: hu"), //
LANG_HU_HU("<html lang>: hu-hu"), //
LANG_HY("<html lang>: hy"), //
LANG_HY_AM("<html lang>: hy-am"), //
LANG_ID("<html lang>: id"), //
LANG_ID_ID("<html lang>: id-id"), //
LANG_IG("<html lang>: ig"), //
LANG_IN("<html lang>: in"), //
LANG_IN_ID("<html lang>: in-id"), //
LANG_IR("<html lang>: ir"), //
LANG_IS("<html lang>: is"), //
LANG_IT("<html lang>: it"), //
LANG_IT_IT("<html lang>: it-it"), //
LANG_IT__IT("<html lang>: it_it"), //
LANG_IU("<html lang>: iu"), //
LANG_IW("<html lang>: iw"), //
LANG_JA("<html lang>: ja"), //
LANG_JA_JP("<html lang>: ja-jp"), //
LANG_JA__JP("<html lang>: ja_jp"), //
LANG_JP("<html lang>: jp"), //
LANG_JV("<html lang>: jv"), //
LANG_KA("<html lang>: ka"), //
LANG_KA_GE("<html lang>: ka-ge"), //
LANG_KK("<html lang>: kk"), //
LANG_KK_KK("<html lang>: kk-kk"), //
LANG_KK_KZ("<html lang>: kk-kz"), //
LANG_KM("<html lang>: km"), //
LANG_KN("<html lang>: kn"), //
LANG_KO("<html lang>: ko"), //
LANG_KO_KR("<html lang>: ko-kr"), //
LANG_KR("<html lang>: kr"), //
LANG_KU("<html lang>: ku"), //
LANG_KY("<html lang>: ky"), //
LANG_KZ("<html lang>: kz"), //
LANG_KZ_KZ("<html lang>: kz-kz"), //
LANG_LANG("<html lang>: lang"), //
LANG_LO("<html lang>: lo"), //
LANG_LT("<html lang>: lt"), //
LANG_LT_LT("<html lang>: lt-lt"), //
LANG_LV("<html lang>: lv"), //
LANG_LV_LV("<html lang>: lv-lv"), //
LANG_MG("<html lang>: mg"), //
LANG_MHR("<html lang>: mhr"), //
LANG_MI("<html lang>: mi"), //
LANG_MK("<html lang>: mk"), //
LANG_MK_MK("<html lang>: mk-mk"), //
LANG_ML("<html lang>: ml"), //
LANG_MN("<html lang>: mn"), //
LANG_MN_MN("<html lang>: mn-mn"), //
LANG_MR("<html lang>: mr"), //
LANG_MRJ("<html lang>: mrj"), //
LANG_MR_IN("<html lang>: mr-in"), //
LANG_MS("<html lang>: ms"), //
LANG_MS_MY("<html lang>: ms-my"), //
LANG_MT("<html lang>: mt"), //
LANG_MUL("<html lang>: mul"), //
LANG_MX("<html lang>: mx"), //
LANG_MY("<html lang>: my"), //
LANG_NAH("<html lang>: nah"), //
LANG_NB_NO("<html lang>: nb-no"), //
LANG_NE("<html lang>: ne"), //
LANG_NL("<html lang>: nl"), //
LANG_NL_BE("<html lang>: nl-be"), //
LANG_NL_NL("<html lang>: nl-nl"), //
LANG_NL__NL("<html lang>: nl_nl"), //
LANG_NN("<html lang>: nn"), //
LANG_NO("<html lang>: no"), //
LANG_NO_NB("<html lang>: no-nb"), //
LANG_NO_NO("<html lang>: no-no"), //
LANG_NY("<html lang>: ny"), //
LANG_OC("<html lang>: oc"), //
LANG_OR("<html lang>: or"), //
LANG_OS("<html lang>: os"), //
LANG_PA("<html lang>: pa"), //
LANG_PL("<html lang>: pl"), //
LANG_PL_PL("<html lang>: pl-pl"), //
LANG_PL__PL("<html lang>: pl_pl"), //
LANG_PNB("<html lang>: pnb"), //
LANG_PS("<html lang>: ps"), //
LANG_PT("<html lang>: pt"), //
LANG_PT_BR("<html lang>: pt-br"), //
LANG_PT__BR("<html lang>: pt_br"), //
LANG_PT_PT("<html lang>: pt-pt"), //
LANG_RO("<html lang>: ro"), //
LANG_RO_RO("<html lang>: ro-ro"), //
LANG_RO__RO("<html lang>: ro_ro"), //
LANG_RS("<html lang>: rs"), //
LANG_RU("<html lang>: ru"), //
LANG_RU_RU("<html lang>: ru-ru"), //
LANG_RU__RU("<html lang>: ru_ru"), //
LANG_RU_UA("<html lang>: ru-ua"), //
LANG_RW("<html lang>: rw"), //
LANG_SAH("<html lang>: sah"), //
LANG_SD("<html lang>: sd"), //
LANG_SE("<html lang>: se"), //
LANG_SH("<html lang>: sh"), //
LANG_SI("<html lang>: si"), //
LANG_SK("<html lang>: sk"), //
LANG_SK_SK("<html lang>: sk-sk"), //
LANG_SL("<html lang>: sl"), //
LANG_SL_SI("<html lang>: sl-si"), //
LANG_SN("<html lang>: sn"), //
LANG_SP("<html lang>: sp"), //
LANG_SQ("<html lang>: sq"), //
LANG_SQ_AL("<html lang>: sq-al"), //
LANG_SR("<html lang>: sr"), //
LANG_SR_LATN("<html lang>: sr-latn"), //
LANG_SR_RS("<html lang>: sr-rs"), //
LANG_SR_SR("<html lang>: sr-sr"), //
LANG_SR_YU("<html lang>: sr-yu"), //
LANG_ST("<html lang>: st"), //
LANG_SU("<html lang>: su"), //
LANG_SV("<html lang>: sv"), //
LANG_SV_SE("<html lang>: sv-se"), //
LANG_SW("<html lang>: sw"), //
LANG_TA("<html lang>: ta"), //
LANG_TE("<html lang>: te"), //
LANG_TG("<html lang>: tg"), //
LANG_TH("<html lang>: th"), //
LANG_TH_TH("<html lang>: th-th"), //
LANG_TL("<html lang>: tl"), //
LANG_TR("<html lang>: tr"), //
LANG_TR_TR("<html lang>: tr-tr"), //
LANG_TR__TR("<html lang>: tr_tr"), //
LANG_TT("<html lang>: tt"), //
LANG_TW("<html lang>: tw"), //
LANG_UA("<html lang>: ua"), //
LANG_UG("<html lang>: ug"), //
LANG_UK("<html lang>: uk"), //
LANG_UK_UA("<html lang>: uk-ua"), //
LANG_UR("<html lang>: ur"), //
LANG_UR_PK("<html lang>: ur-pk"), //
LANG_US("<html lang>: us"), //
LANG_US_EN("<html lang>: us-en"), //
LANG_UZ("<html lang>: uz"), //
LANG_VI("<html lang>: vi"), //
LANG_VI_VN("<html lang>: vi-vn"), //
LANG_VI__VN("<html lang>: vi_vn"), //
LANG_VN("<html lang>: vn"), //
LANG_XH("<html lang>: xh"), //
LANG_ZH("<html lang>: zh"), //
LANG_ZH_CH("<html lang>: zh-ch"), //
LANG_ZH_CMN("<html lang>: zh-cmn"), //
LANG_ZH_CMN_HANS("<html lang>: zh-cmn-hans"), //
LANG_ZH_CMN_HANT("<html lang>: zh-cmn-hant"), //
LANG_ZH_CN("<html lang>: zh-cn"), //
LANG_ZH__CN("<html lang>: zh_cn"), //
LANG_ZH_HANS("<html lang>: zh-hans"), //
LANG_ZH_HANS_CN("<html lang>: zh-hans-cn"), //
LANG_ZH_HANT("<html lang>: zh-hant"), //
LANG_ZH_HANT_HK("<html lang>: zh-hant-hk"), //
LANG_ZH_HANT_TW("<html lang>: zh-hant-tw"), //
LANG_ZH_HK("<html lang>: zh-hk"), //
LANG_ZH__HK("<html lang>: zh_hk"), //
LANG_ZH_TW("<html lang>: zh-tw"), //
LANG_ZH__TW("<html lang>: zh_tw"), //
LANG_ZU("<html lang>: zu"), //
LANG_ZXX("<html lang>: zxx"), //
DETECTEDLANG_AF("Detected language: Afrikaans"), //
DETECTEDLANG_AZB("Detected language: South Azerbaijani"), //
DETECTEDLANG_AM("Detected language: Amharic"), //
DETECTEDLANG_AR("Detected language: Arabic"), //
DETECTEDLANG_AZ("Detected language: Azerbaijani"), //
DETECTEDLANG_BA("Detected language: Bashkir"), //
DETECTEDLANG_BE("Detected language: Belarusian"), //
DETECTEDLANG_BN("Detected language: Bengali"), //
DETECTEDLANG_BO("Detected language: Tibetan"), //
DETECTEDLANG_BS("Detected language: Bosnian"), //
DETECTEDLANG_CA("Detected language: Catalan"), //
DETECTEDLANG_CE("Detected language: Chechen"), //
DETECTEDLANG_CHR("Detected language: Cherokee"), //
DETECTEDLANG_CKB("Detected language: Sorani Kurdish"), //
DETECTEDLANG_CR("Detected language: Cree"), //
DETECTEDLANG_CS("Detected language: Czech"), //
DETECTEDLANG_CY("Detected language: Welsh"), //
DETECTEDLANG_DA("Detected language: Danish"), //
DETECTEDLANG_DE("Detected language: German"), //
DETECTEDLANG_DV("Detected language: Divehi"), //
DETECTEDLANG_EL("Detected language: Greek"), //
DETECTEDLANG_EN("Detected language: English"), //
DETECTEDLANG_ES("Detected language: Spanish"), //
DETECTEDLANG_ET("Detected language: Estonian"), //
DETECTEDLANG_EU("Detected language: Basque"), //
DETECTEDLANG_FA("Detected language: Persian"), //
DETECTEDLANG_FI("Detected language: Finnish"), //
DETECTEDLANG_FO("Detected language: Faroese"), //
DETECTEDLANG_FR("Detected language: French"), //
DETECTEDLANG_GA("Detected language: Irish"), //
DETECTEDLANG_GU("Detected language: Gujarati"), //
DETECTEDLANG_HA("Detected language: Hausa"), //
DETECTEDLANG_HE("Detected language: Hebrew"), //
DETECTEDLANG_HI("Detected language: Hindi"), //
DETECTEDLANG_HR("Detected language: Croatian"), //
DETECTEDLANG_HU("Detected language: Hungarian"), //
DETECTEDLANG_HY("Detected language: Armenian"), //
DETECTEDLANG_ID("Detected language: Indonesian"), //
DETECTEDLANG_IG("Detected language: Igbo"), //
DETECTEDLANG_IS("Detected language: Icelandic"), //
DETECTEDLANG_IT("Detected language: Italian"), //
DETECTEDLANG_IU("Detected language: Inuktitut"), //
DETECTEDLANG_JA("Detected language: Japanese"), //
DETECTEDLANG_JV("Detected language: Javanese"), //
DETECTEDLANG_KA("Detected language: Georgian"), //
DETECTEDLANG_KM("Detected language: Khmer"), //
DETECTEDLANG_KK("Detected language: Kazakh"), //
DETECTEDLANG_KN("Detected language: Kannada"), //
DETECTEDLANG_KO("Detected language: Korean"), //
DETECTEDLANG_KU("Detected language: Kurdish"), //
DETECTEDLANG_KY("Detected language: Kyrgyz"), //
DETECTEDLANG_LO("Detected language: Lao"), //
DETECTEDLANG_LT("Detected language: Lithuanian"), //
DETECTEDLANG_LV("Detected language: Latvian"), //
DETECTEDLANG_MG("Detected language: Malagasy"), //
DETECTEDLANG_MHR("Detected language: Meadow Mari"), //
DETECTEDLANG_MI("Detected language: Maori"), //
DETECTEDLANG_MK("Detected language: Macedonian"), //
DETECTEDLANG_ML("Detected language: Malayalam"), //
DETECTEDLANG_MN("Detected language: Mongolian"), //
DETECTEDLANG_MR("Detected language: Marathi"), //
DETECTEDLANG_MRJ("Detected language: Hill Mari"), //
DETECTEDLANG_MS("Detected language: Malay"), //
DETECTEDLANG_MT("Detected language: Maltese"), //
DETECTEDLANG_MY("Detected language: Burmese"), //
DETECTEDLANG_NAH("Detected language: Nahuatl"), //
DETECTEDLANG_NE("Detected language: Nepali"), //
DETECTEDLANG_NL("Detected language: Dutch"), //
DETECTEDLANG_NO("Detected language: Norwegian"), //
DETECTEDLANG_NY("Detected language: Nyanja"), //
DETECTEDLANG_OC("Detected language: Occitan"), //
DETECTEDLANG_OM("Detected language: Oromo"), //
DETECTEDLANG_OR("Detected language: Oriya"), //
DETECTEDLANG_OS("Detected language: Ossetian"), //
DETECTEDLANG_PA("Detected language: Punjabi"), //
DETECTEDLANG_PL("Detected language: Polish"), //
DETECTEDLANG_PNB("Detected language: Western Panjabi"), //
DETECTEDLANG_PS("Detected language: Pashto"), //
DETECTEDLANG_PT("Detected language: Portuguese"), //
DETECTEDLANG_RO("Detected language: Romanian"), //
DETECTEDLANG_RU("Detected language: Russian"), //
DETECTEDLANG_RW("Detected language: Kinyarwanda"), //
DETECTEDLANG_SAH("Detected language: Sakha"), //
DETECTEDLANG_SD("Detected language: Sindhi"), //
DETECTEDLANG_SH("Detected language: Croatian, Serbian, or Bosnian"), //
DETECTEDLANG_SI("Detected language: Sinhala"), //
DETECTEDLANG_SK("Detected language: Slovak"), //
DETECTEDLANG_SL("Detected language: Slovenian"), //
DETECTEDLANG_SN("Detected language: Shona"), //
DETECTEDLANG_SO("Detected language: Somali"), //
DETECTEDLANG_SQ("Detected language: Albanian"), //
DETECTEDLANG_SR_CYRL("Detected language: Serbian (Cyrillic)"), //
DETECTEDLANG_SR_LATN("Detected language: Serbian (Latin)"), //
DETECTEDLANG_ST("Detected language: Southern Sotho"), //
DETECTEDLANG_SU("Detected language: Sundanese"), //
DETECTEDLANG_SV("Detected language: Swedish"), //
DETECTEDLANG_SW("Detected language: Swahili"), //
DETECTEDLANG_TA("Detected language: Tamil"), //
DETECTEDLANG_TE("Detected language: Telugu"), //
DETECTEDLANG_TG("Detected language: Tajik"), //
DETECTEDLANG_TH("Detected language: Thai"), //
DETECTEDLANG_TI("Detected language: Tigrinya"), //
DETECTEDLANG_TL("Detected language: Tagalog"), //
DETECTEDLANG_TR("Detected language: Turkish"), //
DETECTEDLANG_TT("Detected language: Tatar"), //
DETECTEDLANG_UG("Detected language: Uyghur"), //
DETECTEDLANG_UK("Detected language: Ukrainian"), //
DETECTEDLANG_UR("Detected language: Urdu"), //
DETECTEDLANG_UZ_CYRL("Detected language: Uzbek (Cyrillic)"), //
DETECTEDLANG_UZ_LATN("Detected language: Uzbek (Latin)"), //
DETECTEDLANG_VI("Detected language: Vietnamese"), //
DETECTEDLANG_XH("Detected language: Xhosa"), //
DETECTEDLANG_ZH_HANS("Detected language: Simplified Chinese"), //
DETECTEDLANG_ZH_HANT("Detected language: Traditional Chinese"), //
DETECTEDLANG_ZU("Detected language: Zulu"), //
DETECTEDLANG_ZXX("Detected language: Lorem ipsum text"); //
// @formatter:on
Field(String description) {
this.description = description;
}
private final String description;
/**
* @see java.lang.Enum#toString()
*/
@Override
public String toString() {
return description;
}
}
public Field getFieldFromName(String name) {
for (Field field : Field.class.getEnumConstants()) {
if (field.name().equals(name)) {
return field;
}
}
throw new IllegalArgumentException(
"No statistics field with name " + name);
}
static {
if ("1".equals(System.getProperty("nu.validator.servlet.statistics"))) {
STATISTICS = new Statistics();
} else {
STATISTICS = null;
}
}
private final long startTime = System.currentTimeMillis();
private long total = 0;
private final long[] counters;
private Statistics() {
counters = new long[Field.values().length];
}
public void incrementTotal() {
total++;
}
public void incrementField(Field field) {
counters[field.ordinal()]++;
}
public void writeToResponse(HttpServletResponse response)
throws IOException {
try {
long totalCopy;
long[] countersCopy = new long[counters.length];
synchronized (this) {
totalCopy = total;
System.arraycopy(counters, 0, countersCopy, 0, counters.length);
}
double totalDouble = totalCopy;
double uptimeMillis = System.currentTimeMillis() - startTime;
response.setContentType("text/html; charset=utf-8");
ContentHandler ch = new HtmlSerializer(response.getOutputStream());
try {
ch.startDocument();
startElement(ch, "html");
startElement(ch, "head");
startElement(ch, "title");
characters(ch, VALIDATOR_STATISTICS);
endElement(ch, "title");
startElement(ch, "style");
characters(ch, STYLESHEET);
endElement(ch, "style");
endElement(ch, "head");
startElement(ch, "body");
startElement(ch, "h1");
characters(ch, VALIDATOR_STATISTICS);
endElement(ch, "h1");
startElement(ch, "dl");
startElement(ch, "dt");
characters(ch, TOTAL_VALIDATIONS);
endElement(ch, "dt");
startElement(ch, "dd");
characters(ch, totalCopy);
endElement(ch, "dd");
startElement(ch, "dt");
characters(ch, UPTIME_DAYS);
endElement(ch, "dt");
startElement(ch, "dd");
characters(ch, uptimeMillis / (1000 * 60 * 60 * 24));
endElement(ch, "dd");
startElement(ch, "dt");
characters(ch, VALIDATIONS_PER_SECOND);
endElement(ch, "dt");
startElement(ch, "dd");
characters(ch, totalDouble / (uptimeMillis / 1000.0));
endElement(ch, "dd");
endElement(ch, "dl");
startElement(ch, "table");
startElement(ch, "thead");
startElement(ch, "tr");
startElement(ch, "th");
characters(ch, COUNTER_NAME);
endElement(ch, "th");
startElement(ch, "th");
characters(ch, COUNTER_VALUE);
endElement(ch, "th");
startElement(ch, "th");
characters(ch, COUNTER_PROPORTION);
endElement(ch, "th");
endElement(ch, "tr");
endElement(ch, "thead");
startElement(ch, "tbody");
for (int i = 0; i < countersCopy.length; i++) {
long count = countersCopy[i];
startElement(ch, "tr");
startElement(ch, "td");
characters(ch, Field.values()[i].toString());
endElement(ch, "td");
startElement(ch, "td");
characters(ch, count);
endElement(ch, "td");
startElement(ch, "td");
characters(ch, count / totalDouble);
endElement(ch, "td");
endElement(ch, "tr");
}
endElement(ch, "tbody");
endElement(ch, "table");
startElement(ch, "script");
characters(ch, SORT_LANGS_SCRIPT);
endElement(ch, "script");
endElement(ch, "body");
endElement(ch, "html");
} finally {
ch.endDocument();
}
} catch (SAXException e) {
throw new IOException(e);
}
}
private void characters(ContentHandler ch, double d) throws SAXException {
// Let's just create a new DecimalFormat each time to avoid the
// complexity of recycling an instance correctly without threading
// hazards.
DecimalFormat df = new DecimalFormat("#,###,##0.000000");
characters(ch, df.format(d));
}
private void characters(ContentHandler ch, long l) throws SAXException {
characters(ch, Long.toString(l));
}
private void characters(ContentHandler ch, String str) throws SAXException {
characters(ch, str.toCharArray());
}
private void characters(ContentHandler ch, char[] cs) throws SAXException {
ch.characters(cs, 0, cs.length);
}
private void endElement(ContentHandler ch, String name)
throws SAXException {
ch.endElement("http://www.w3.org/1999/xhtml", name, name);
}
private void startElement(ContentHandler ch, String name)
throws SAXException {
ch.startElement("http://www.w3.org/1999/xhtml", name, name,
EmptyAttributes.EMPTY_ATTRIBUTES);
}
}
|
src/nu/validator/servlet/Statistics.java
|
/*
* Copyright (c) 2012-2018 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package nu.validator.servlet;
import java.io.IOException;
import java.text.DecimalFormat;
import javax.servlet.http.HttpServletResponse;
import nu.validator.htmlparser.sax.HtmlSerializer;
import nu.validator.xml.EmptyAttributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
public class Statistics {
public static final Statistics STATISTICS;
private static final char[] VALIDATOR_STATISTICS = "Validator statistics".toCharArray();
private static final char[] COUNTER_NAME = "Counter".toCharArray();
private static final char[] COUNTER_VALUE = "Value".toCharArray();
private static final char[] COUNTER_PROPORTION = "Proportion".toCharArray();
private static final char[] TOTAL_VALIDATIONS = "Total number of validations".toCharArray();
private static final char[] UPTIME_DAYS = "Uptime in days".toCharArray();
private static final char[] VALIDATIONS_PER_SECOND = "Validations per second".toCharArray();
private static final char[] SORT_LANGS_SCRIPT = (""
+ " var rows = document.querySelectorAll('tr');"
+ " var langRows = new Array();"
+ " for (var i=0; i < rows.length; i++) { var row = rows[i];"
+ " if (row.textContent.indexOf('Detected language') > -1) {"
+ " var sortnr = parseInt(row.cells[1].textContent"
+ " || row.cells[0].innerText);"
+ " if (sortnr == 0) {"
+ " row.remove();"
+ " } else if (!isNaN(sortnr)) {"
+ " langRows.push([sortnr, row]);"
+ " }"
+ " }"
+ " } langRows.sort(function(x,y) { return x[0] - y[0]; });"
+ " langRows.reverse();"
+ " for (var i=0; i<langRows.length; i++) {"
+ " document.querySelector('tbody').appendChild(langRows[i][1]);"
+ " }"
+ " var langValRows = new Array();"
+ " for (var i=0; i < rows.length; i++) { var row = rows[i];"
+ " if (row.textContent.indexOf('<html lang>: ') > -1) {"
+ " var sortnr = parseInt(row.cells[1].textContent"
+ " || row.cells[0].innerText);"
+ " if (sortnr == 0) {"
+ " row.remove();"
+ " } else if (!isNaN(sortnr)) {"
+ " langValRows.push([sortnr, row]);"
+ " }"
+ " }"
+ " } langValRows.sort(function(x,y) { return x[0] - y[0]; });"
+ " langValRows.reverse();"
+ " for (var i=0; i<langValRows.length; i++) {"
+ " document.querySelector('tbody').appendChild(langValRows[i][1]);"
+ " }").toCharArray();
private static final char[] STYLESHEET = (""
+ " body { font-family: sans-serif; }"
+ " td { padding: 4px 8px 4px 8px; }"
+ " tr:nth-child(even) { background-color: #cde; }").toCharArray();
public enum Field {
// @formatter:off
INPUT_GET("Input: GET-based"), //
INPUT_POST("Input: POST-based"), //
INPUT_ENTITY_BODY("\u2514 Entity-body input"), //
INPUT_TEXT_FIELD("\u2514 Text-field input"), //
INPUT_FILE_UPLOAD("\u2514 File-upload input"), //
INPUT_HTML("Input: text/html"), //
INPUT_CSS("Input: text/css"), //
INPUT_UNSUPPORTED("Input: unsupported content type (error)"), //
INPUT_XML("Input: application/xhtml+xml or other XML content type"), //
OUTPUT_XML("Output: XML"), //
OUTPUT_HTML("Output: HTML"), //
OUTPUT_JSON("Output: JSON"), //
OUTPUT_GNU("Output: GNU"), //
OUTPUT_TEXT("Output: Text"), //
OUTPUT_XHTML("Output: XHTML"), //
SHOW_SOURCE("Show: source"), //
SHOW_OUTLINE("Show: outline"), //
IMAGE_REPORT("Show: image report"), //
HTML5_SCHEMA("Schema: HTML5 schema"), //
HTML5_RDFA_LITE_SCHEMA("Schema: HTML5+RDFa Lite schema"), //
XHTML1_DOCTYPE("Doctype: XHTML1"), //
HTML4_DOCTYPE("Doctype: HTML4"), //
ABOUT_LEGACY_COMPAT("Doctype: about:legacy-compat"), //
AUTO_SCHEMA("Schema: automatically chosen"), //
PRESET_SCHEMA("Schema: preset"), //
HTML4_STRICT_SCHEMA("Schema: legacy Strict"), //
HTML4_TRANSITIONAL_SCHEMA("Schema: legacy Transitional"), //
HTML4_FRAMESET_SCHEMA("Schema: legacy Frameset"), //
XHTML1_COMPOUND_SCHEMA("Schema: legacy XHTML+SVG+MathML"), //
SVG_SCHEMA("Schema: SVG"), //
BUILT_IN_NON_PRESET("Schema: custom combined from built-ins"), //
EXTERNAL_SCHEMA_NON_SCHEMATRON("Schema: non-schematron custom"), //
EXTERNAL_SCHEMA_SCHEMATRON("Schema: schematron custom"), //
LAX_TYPE("Content type: being lax"), //
CUSTOM_ENC("Encoding: manually set"), //
PARSER_XML_EXTERNAL("Parser: set to XML with external entities"), //
XMLNS_FILTER("Parser: XMLNS filter set"), //
LOGIC_ERROR("Logic errors in schema stats"), //
H1_MULTIPLE_FOUND("<h1> multiple found"), //
H1_MULTIPLE_WITH_SECTION_ANCESTOR_FOUND("<h1> multiple with <section> ancestor found"), //
H1_MULTIPLE_WITH_ARTICLE_ANCESTOR_FOUND("<h1> multiple with <article> ancestor found"), //
H1_MULTIPLE_WITH_ASIDE_ANCESTOR_FOUND("<h1> multiple with <aside> ancestor found"), //
H1_MULTIPLE_WITH_NAV_ANCESTOR_FOUND("<h1> multiple with <nav> ancestor found"), //
HGROUP_FOUND("<hgroup> found"), //
SECTION_NO_HEADING_FOUND("<section> with no heading found"), //
ARTICLE_NO_HEADING_FOUND("<article> with no heading found"), //
STYLE_ATTRIBUTE_FOUND("style attribute found"), //
STYLE_IN_BODY_FOUND("<style> in <body> found"), //
MAIN_FOUND("<main> found"), //
MAIN_MULTIPLE_VISIBLE_FOUND("<main> multiple visible found"), //
LANG_FOUND("<html lang> found"), //
LANG_WRONG("<html lang> does not match detected language"), //
LANG_EMPTY("<html lang>: empty (lang=\"\")"), //
LANG_OTHER("<html lang>: other (unrecognized/invalid)"), //
LANG_AF("<html lang>: af"), //
LANG_AM("<html lang>: am"), //
LANG_AR("<html lang>: ar"), //
LANG_AR_AA("<html lang>: ar-aa"), //
LANG_AR_AE("<html lang>: ar-ae"), //
LANG_AR_AR("<html lang>: ar-ar"), //
LANG_AR_DZ("<html lang>: ar-dz"), //
LANG_AR_EG("<html lang>: ar-eg"), //
LANG_AR_JO("<html lang>: ar-jo"), //
LANG_AR_KW("<html lang>: ar-kw"), //
LANG_AR_OM("<html lang>: ar-om"), //
LANG_AR_QA("<html lang>: ar-qa"), //
LANG_AR_SA("<html lang>: ar-sa"), //
LANG_AR_SY("<html lang>: ar-sy"), //
LANG_AT("<html lang>: at"), //
LANG_AZ("<html lang>: az"), //
LANG_AZ_AZ("<html lang>: az-az"), //
LANG_BA("<html lang>: ba"), //
LANG_BE("<html lang>: be"), //
LANG_BG("<html lang>: bg"), //
LANG_BG_BG("<html lang>: bg-bg"), //
LANG_BN("<html lang>: bn"), //
LANG_BN_BD("<html lang>: bn-bd"), //
LANG_BR("<html lang>: br"), //
LANG_BS("<html lang>: bs"), //
LANG_BS_BA("<html lang>: bs-ba"), //
LANG_BS_LATN("<html lang>: bs-latn"), //
LANG_CA("<html lang>: ca"), //
LANG_CA_ES("<html lang>: ca-es"), //
LANG_CE("<html lang>: ce"), //
LANG_CH("<html lang>: ch"), //
LANG_CHR("<html lang>: chr"), //
LANG_CKB("<html lang>: ckb"), //
LANG_CN("<html lang>: cn"), //
LANG_CR("<html lang>: cr"), //
LANG_CS("<html lang>: cs"), //
LANG_CS_CZ("<html lang>: cs-cz"), //
LANG_CY("<html lang>: cy"), //
LANG_CZ("<html lang>: cz"), //
LANG_DA("<html lang>: da"), //
LANG_DA_DK("<html lang>: da-dk"), //
LANG_DE("<html lang>: de"), //
LANG_DE_AT("<html lang>: de-at"), //
LANG_DE_CH("<html lang>: de-ch"), //
LANG_DE_DE("<html lang>: de-de"), //
LANG_DE__DE("<html lang>: de_de"), //
LANG_DK("<html lang>: dk"), //
LANG_EL("<html lang>: el"), //
LANG_EL_GR("<html lang>: el-gr"), //
LANG_EN("<html lang>: en"), //
LANG_ENG("<html lang>: eng"), //
LANG_EN_AE("<html lang>: en-ae"), //
LANG_EN_AU("<html lang>: en-au"), //
LANG_EN__AU("<html lang>: en_au"), //
LANG_EN_CA("<html lang>: en-ca"), //
LANG_EN__CA("<html lang>: en_ca"), //
LANG_EN_DE("<html lang>: en-de"), //
LANG_EN_EG("<html lang>: en-eg"), //
LANG_EN_EN("<html lang>: en-en"), //
LANG_EN__EN("<html lang>: en_en"), //
LANG_EN_EU("<html lang>: en-eu"), //
LANG_EN_GB("<html lang>: en-gb"), //
LANG_EN__GB("<html lang>: en_gb"), //
LANG_EN_HK("<html lang>: en-hk"), //
LANG_EN_ID("<html lang>: en-id"), //
LANG_EN_IE("<html lang>: en-ie"), //
LANG_EN_IN("<html lang>: en-in"), //
LANG_EN_MY("<html lang>: en-my"), //
LANG_EN_NG("<html lang>: en-ng"), //
LANG_EN_NZ("<html lang>: en-nz"), //
LANG_EN_PH("<html lang>: en-ph"), //
LANG_EN_PK("<html lang>: en-pk"), //
LANG_EN_SG("<html lang>: en-sg"), //
LANG_EN_UK("<html lang>: en-uk"), //
LANG_EN_US("<html lang>: en-us"), //
LANG_EN__US("<html lang>: en_us"), //
LANG_EN_VN("<html lang>: en-vn"), //
LANG_EN_ZA("<html lang>: en-za"), //
LANG_ES("<html lang>: es"), //
LANG_ES_AR("<html lang>: es-ar"), //
LANG_ES__AR("<html lang>: es_ar"), //
LANG_ES_CL("<html lang>: es-cl"), //
LANG_ES_CO("<html lang>: es-co"), //
LANG_ES_DO("<html lang>: es-do"), //
LANG_ES_EC("<html lang>: es-ec"), //
LANG_ES_ES("<html lang>: es-es"), //
LANG_ES__ES("<html lang>: es_es"), //
LANG_ES_LA("<html lang>: es-la"), //
LANG_ES_MX("<html lang>: es-mx"), //
LANG_ES_PE("<html lang>: es-pe"), //
LANG_ES_PR("<html lang>: es-pr"), //
LANG_ES_US("<html lang>: es-us"), //
LANG_ES_VE("<html lang>: es-ve"), //
LANG_ET("<html lang>: et"), //
LANG_ET_EE("<html lang>: et-ee"), //
LANG_EU("<html lang>: eu"), //
LANG_FA("<html lang>: fa"), //
LANG_FA_IR("<html lang>: fa-ir"), //
LANG_FA__IR("<html lang>: fa_ir"), //
LANG_FI("<html lang>: fi"), //
LANG_FI_FI("<html lang>: fi-fi"), //
LANG_FO("<html lang>: fo"), //
LANG_FR("<html lang>: fr"), //
LANG_FR_BE("<html lang>: fr-be"), //
LANG_FR_CA("<html lang>: fr-ca"), //
LANG_FR_CH("<html lang>: fr-ch"), //
LANG_FR_FR("<html lang>: fr-fr"), //
LANG_FR__FR("<html lang>: fr_fr"), //
LANG_FR_MA("<html lang>: fr-ma"), //
LANG_FR_US("<html lang>: fr-us"), //
LANG_GA("<html lang>: ga"), //
LANG_GB("<html lang>: gb"), //
LANG_GE("<html lang>: ge"), //
LANG_GL("<html lang>: gl"), //
LANG_GL_ES("<html lang>: gl-es"), //
LANG_GR("<html lang>: gr"), //
LANG_GU("<html lang>: gu"), //
LANG_HE("<html lang>: he"), //
LANG_HE_IL("<html lang>: he-il"), //
LANG_HI("<html lang>: hi"), //
LANG_HR_HR("<html lang>: hr-hr"), //
LANG_HU("<html lang>: hu"), //
LANG_HU_HU("<html lang>: hu-hu"), //
LANG_HY("<html lang>: hy"), //
LANG_HY_AM("<html lang>: hy-am"), //
LANG_ID("<html lang>: id"), //
LANG_ID_ID("<html lang>: id-id"), //
LANG_IG("<html lang>: ig"), //
LANG_IN("<html lang>: in"), //
LANG_IN_ID("<html lang>: in-id"), //
LANG_IR("<html lang>: ir"), //
LANG_IS("<html lang>: is"), //
LANG_IT("<html lang>: it"), //
LANG_IT_IT("<html lang>: it-it"), //
LANG_IT__IT("<html lang>: it_it"), //
LANG_IU("<html lang>: iu"), //
LANG_IW("<html lang>: iw"), //
LANG_JA("<html lang>: ja"), //
LANG_JA_JP("<html lang>: ja-jp"), //
LANG_JA__JP("<html lang>: ja_jp"), //
LANG_JP("<html lang>: jp"), //
LANG_JV("<html lang>: jv"), //
LANG_KA("<html lang>: ka"), //
LANG_KA_GE("<html lang>: ka-ge"), //
LANG_KK("<html lang>: kk"), //
LANG_KK_KK("<html lang>: kk-kk"), //
LANG_KK_KZ("<html lang>: kk-kz"), //
LANG_KM("<html lang>: km"), //
LANG_KN("<html lang>: kn"), //
LANG_KO("<html lang>: ko"), //
LANG_KO_KR("<html lang>: ko-kr"), //
LANG_KR("<html lang>: kr"), //
LANG_KU("<html lang>: ku"), //
LANG_KY("<html lang>: ky"), //
LANG_KZ("<html lang>: kz"), //
LANG_KZ_KZ("<html lang>: kz-kz"), //
LANG_LANG("<html lang>: lang"), //
LANG_LO("<html lang>: lo"), //
LANG_LT("<html lang>: lt"), //
LANG_LT_LT("<html lang>: lt-lt"), //
LANG_LV("<html lang>: lv"), //
LANG_LV_LV("<html lang>: lv-lv"), //
LANG_MG("<html lang>: mg"), //
LANG_MHR("<html lang>: mhr"), //
LANG_MI("<html lang>: mi"), //
LANG_MK("<html lang>: mk"), //
LANG_MK_MK("<html lang>: mk-mk"), //
LANG_ML("<html lang>: ml"), //
LANG_MN("<html lang>: mn"), //
LANG_MN_MN("<html lang>: mn-mn"), //
LANG_MR("<html lang>: mr"), //
LANG_MRJ("<html lang>: mrj"), //
LANG_MR_IN("<html lang>: mr-in"), //
LANG_MS("<html lang>: ms"), //
LANG_MS_MY("<html lang>: ms-my"), //
LANG_MT("<html lang>: mt"), //
LANG_MUL("<html lang>: mul"), //
LANG_MX("<html lang>: mx"), //
LANG_MY("<html lang>: my"), //
LANG_NAH("<html lang>: nah"), //
LANG_NB_NO("<html lang>: nb-no"), //
LANG_NE("<html lang>: ne"), //
LANG_NL("<html lang>: nl"), //
LANG_NL_BE("<html lang>: nl-be"), //
LANG_NL_NL("<html lang>: nl-nl"), //
LANG_NL__NL("<html lang>: nl_nl"), //
LANG_NN("<html lang>: nn"), //
LANG_NO("<html lang>: no"), //
LANG_NO_NB("<html lang>: no-nb"), //
LANG_NO_NO("<html lang>: no-no"), //
LANG_NY("<html lang>: ny"), //
LANG_OC("<html lang>: oc"), //
LANG_OR("<html lang>: or"), //
LANG_OS("<html lang>: os"), //
LANG_PA("<html lang>: pa"), //
LANG_PL("<html lang>: pl"), //
LANG_PL_PL("<html lang>: pl-pl"), //
LANG_PL__PL("<html lang>: pl_pl"), //
LANG_PNB("<html lang>: pnb"), //
LANG_PS("<html lang>: ps"), //
LANG_PT("<html lang>: pt"), //
LANG_PT_BR("<html lang>: pt-br"), //
LANG_PT__BR("<html lang>: pt_br"), //
LANG_PT_PT("<html lang>: pt-pt"), //
LANG_RO("<html lang>: ro"), //
LANG_RO_RO("<html lang>: ro-ro"), //
LANG_RO__RO("<html lang>: ro_ro"), //
LANG_RS("<html lang>: rs"), //
LANG_RU("<html lang>: ru"), //
LANG_RU_RU("<html lang>: ru-ru"), //
LANG_RU__RU("<html lang>: ru_ru"), //
LANG_RU_UA("<html lang>: ru-ua"), //
LANG_RW("<html lang>: rw"), //
LANG_SAH("<html lang>: sah"), //
LANG_SD("<html lang>: sd"), //
LANG_SE("<html lang>: se"), //
LANG_SH("<html lang>: sh"), //
LANG_SI("<html lang>: si"), //
LANG_SK("<html lang>: sk"), //
LANG_SK_SK("<html lang>: sk-sk"), //
LANG_SL("<html lang>: sl"), //
LANG_SL_SI("<html lang>: sl-si"), //
LANG_SN("<html lang>: sn"), //
LANG_SP("<html lang>: sp"), //
LANG_SQ("<html lang>: sq"), //
LANG_SQ_AL("<html lang>: sq-al"), //
LANG_SR("<html lang>: sr"), //
LANG_SR_LATN("<html lang>: sr-latn"), //
LANG_SR_RS("<html lang>: sr-rs"), //
LANG_SR_SR("<html lang>: sr-sr"), //
LANG_SR_YU("<html lang>: sr-yu"), //
LANG_ST("<html lang>: st"), //
LANG_SU("<html lang>: su"), //
LANG_SV("<html lang>: sv"), //
LANG_SV_SE("<html lang>: sv-se"), //
LANG_SW("<html lang>: sw"), //
LANG_TA("<html lang>: ta"), //
LANG_TE("<html lang>: te"), //
LANG_TG("<html lang>: tg"), //
LANG_TH("<html lang>: th"), //
LANG_TH_TH("<html lang>: th-th"), //
LANG_TL("<html lang>: tl"), //
LANG_TR("<html lang>: tr"), //
LANG_TR_TR("<html lang>: tr-tr"), //
LANG_TR__TR("<html lang>: tr_tr"), //
LANG_TT("<html lang>: tt"), //
LANG_TW("<html lang>: tw"), //
LANG_UA("<html lang>: ua"), //
LANG_UG("<html lang>: ug"), //
LANG_UK("<html lang>: uk"), //
LANG_UK_UA("<html lang>: uk-ua"), //
LANG_UR("<html lang>: ur"), //
LANG_UR_PK("<html lang>: ur-pk"), //
LANG_US("<html lang>: us"), //
LANG_US_EN("<html lang>: us-en"), //
LANG_UZ("<html lang>: uz"), //
LANG_VI("<html lang>: vi"), //
LANG_VI_VN("<html lang>: vi-vn"), //
LANG_VI__VN("<html lang>: vi_vn"), //
LANG_VN("<html lang>: vn"), //
LANG_XH("<html lang>: xh"), //
LANG_ZH("<html lang>: zh"), //
LANG_ZH_CH("<html lang>: zh-ch"), //
LANG_ZH_CMN("<html lang>: zh-cmn"), //
LANG_ZH_CMN_HANS("<html lang>: zh-cmn-hans"), //
LANG_ZH_CMN_HANT("<html lang>: zh-cmn-hant"), //
LANG_ZH_CN("<html lang>: zh-cn"), //
LANG_ZH__CN("<html lang>: zh_cn"), //
LANG_ZH_HANS("<html lang>: zh-hans"), //
LANG_ZH_HANS_CN("<html lang>: zh-hans-cn"), //
LANG_ZH_HANT("<html lang>: zh-hant"), //
LANG_ZH_HANT_HK("<html lang>: zh-hant-hk"), //
LANG_ZH_HANT_TW("<html lang>: zh-hant-tw"), //
LANG_ZH_HK("<html lang>: zh-hk"), //
LANG_ZH__HK("<html lang>: zh_hk"), //
LANG_ZH_TW("<html lang>: zh-tw"), //
LANG_ZH__TW("<html lang>: zh_tw"), //
LANG_ZU("<html lang>: zu"), //
LANG_ZXX("<html lang>: zxx"), //
DETECTEDLANG_AF("Detected language: Afrikaans"), //
DETECTEDLANG_AZB("Detected language: South Azerbaijani"), //
DETECTEDLANG_AM("Detected language: Amharic"), //
DETECTEDLANG_AR("Detected language: Arabic"), //
DETECTEDLANG_AZ("Detected language: Azerbaijani"), //
DETECTEDLANG_BA("Detected language: Bashkir"), //
DETECTEDLANG_BE("Detected language: Belarusian"), //
DETECTEDLANG_BN("Detected language: Bengali"), //
DETECTEDLANG_BO("Detected language: Tibetan"), //
DETECTEDLANG_BS("Detected language: Bosnian"), //
DETECTEDLANG_CA("Detected language: Catalan"), //
DETECTEDLANG_CE("Detected language: Chechen"), //
DETECTEDLANG_CHR("Detected language: Cherokee"), //
DETECTEDLANG_CKB("Detected language: Sorani Kurdish"), //
DETECTEDLANG_CR("Detected language: Cree"), //
DETECTEDLANG_CS("Detected language: Czech"), //
DETECTEDLANG_CY("Detected language: Welsh"), //
DETECTEDLANG_DA("Detected language: Danish"), //
DETECTEDLANG_DE("Detected language: German"), //
DETECTEDLANG_DV("Detected language: Divehi"), //
DETECTEDLANG_EL("Detected language: Greek"), //
DETECTEDLANG_EN("Detected language: English"), //
DETECTEDLANG_ES("Detected language: Spanish"), //
DETECTEDLANG_ET("Detected language: Estonian"), //
DETECTEDLANG_EU("Detected language: Basque"), //
DETECTEDLANG_FA("Detected language: Persian"), //
DETECTEDLANG_FI("Detected language: Finnish"), //
DETECTEDLANG_FO("Detected language: Faroese"), //
DETECTEDLANG_FR("Detected language: French"), //
DETECTEDLANG_GA("Detected language: Irish"), //
DETECTEDLANG_GU("Detected language: Gujarati"), //
DETECTEDLANG_HA("Detected language: Hausa"), //
DETECTEDLANG_HE("Detected language: Hebrew"), //
DETECTEDLANG_HI("Detected language: Hindi"), //
DETECTEDLANG_HR("Detected language: Croatian"), //
DETECTEDLANG_HU("Detected language: Hungarian"), //
DETECTEDLANG_HY("Detected language: Armenian"), //
DETECTEDLANG_ID("Detected language: Indonesian"), //
DETECTEDLANG_IG("Detected language: Igbo"), //
DETECTEDLANG_IS("Detected language: Icelandic"), //
DETECTEDLANG_IT("Detected language: Italian"), //
DETECTEDLANG_IU("Detected language: Inuktitut"), //
DETECTEDLANG_JA("Detected language: Japanese"), //
DETECTEDLANG_JV("Detected language: Javanese"), //
DETECTEDLANG_KA("Detected language: Georgian"), //
DETECTEDLANG_KM("Detected language: Khmer"), //
DETECTEDLANG_KK("Detected language: Kazakh"), //
DETECTEDLANG_KN("Detected language: Kannada"), //
DETECTEDLANG_KO("Detected language: Korean"), //
DETECTEDLANG_KU("Detected language: Kurdish"), //
DETECTEDLANG_KY("Detected language: Kyrgyz"), //
DETECTEDLANG_LO("Detected language: Lao"), //
DETECTEDLANG_LT("Detected language: Lithuanian"), //
DETECTEDLANG_LV("Detected language: Latvian"), //
DETECTEDLANG_MG("Detected language: Malagasy"), //
DETECTEDLANG_MHR("Detected language: Meadow Mari"), //
DETECTEDLANG_MI("Detected language: Maori"), //
DETECTEDLANG_MK("Detected language: Macedonian"), //
DETECTEDLANG_ML("Detected language: Malayalam"), //
DETECTEDLANG_MN("Detected language: Mongolian"), //
DETECTEDLANG_MR("Detected language: Marathi"), //
DETECTEDLANG_MRJ("Detected language: Hill Mari"), //
DETECTEDLANG_MS("Detected language: Malay"), //
DETECTEDLANG_MT("Detected language: Maltese"), //
DETECTEDLANG_MY("Detected language: Burmese"), //
DETECTEDLANG_NAH("Detected language: Nahuatl"), //
DETECTEDLANG_NE("Detected language: Nepali"), //
DETECTEDLANG_NL("Detected language: Dutch"), //
DETECTEDLANG_NO("Detected language: Norwegian"), //
DETECTEDLANG_NY("Detected language: Nyanja"), //
DETECTEDLANG_OC("Detected language: Occitan"), //
DETECTEDLANG_OM("Detected language: Oromo"), //
DETECTEDLANG_OR("Detected language: Oriya"), //
DETECTEDLANG_OS("Detected language: Ossetian"), //
DETECTEDLANG_PA("Detected language: Punjabi"), //
DETECTEDLANG_PL("Detected language: Polish"), //
DETECTEDLANG_PNB("Detected language: Western Panjabi"), //
DETECTEDLANG_PS("Detected language: Pashto"), //
DETECTEDLANG_PT("Detected language: Portuguese"), //
DETECTEDLANG_RO("Detected language: Romanian"), //
DETECTEDLANG_RU("Detected language: Russian"), //
DETECTEDLANG_RW("Detected language: Kinyarwanda"), //
DETECTEDLANG_SAH("Detected language: Sakha"), //
DETECTEDLANG_SD("Detected language: Sindhi"), //
DETECTEDLANG_SH("Detected language: Croatian, Serbian, or Bosnian"), //
DETECTEDLANG_SI("Detected language: Sinhala"), //
DETECTEDLANG_SK("Detected language: Slovak"), //
DETECTEDLANG_SL("Detected language: Slovenian"), //
DETECTEDLANG_SN("Detected language: Shona"), //
DETECTEDLANG_SO("Detected language: Somali"), //
DETECTEDLANG_SQ("Detected language: Albanian"), //
DETECTEDLANG_SR_CYRL("Detected language: Serbian (Cyrillic)"), //
DETECTEDLANG_SR_LATN("Detected language: Serbian (Latin)"), //
DETECTEDLANG_ST("Detected language: Southern Sotho"), //
DETECTEDLANG_SU("Detected language: Sundanese"), //
DETECTEDLANG_SV("Detected language: Swedish"), //
DETECTEDLANG_SW("Detected language: Swahili"), //
DETECTEDLANG_TA("Detected language: Tamil"), //
DETECTEDLANG_TE("Detected language: Telugu"), //
DETECTEDLANG_TG("Detected language: Tajik"), //
DETECTEDLANG_TH("Detected language: Thai"), //
DETECTEDLANG_TI("Detected language: Tigrinya"), //
DETECTEDLANG_TL("Detected language: Tagalog"), //
DETECTEDLANG_TR("Detected language: Turkish"), //
DETECTEDLANG_TT("Detected language: Tatar"), //
DETECTEDLANG_UG("Detected language: Uyghur"), //
DETECTEDLANG_UK("Detected language: Ukrainian"), //
DETECTEDLANG_UR("Detected language: Urdu"), //
DETECTEDLANG_UZ_CYRL("Detected language: Uzbek (Cyrillic)"), //
DETECTEDLANG_UZ_LATN("Detected language: Uzbek (Latin)"), //
DETECTEDLANG_VI("Detected language: Vietnamese"), //
DETECTEDLANG_XH("Detected language: Xhosa"), //
DETECTEDLANG_ZH_HANS("Detected language: Simplified Chinese"), //
DETECTEDLANG_ZH_HANT("Detected language: Traditional Chinese"), //
DETECTEDLANG_ZU("Detected language: Zulu"), //
DETECTEDLANG_ZXX("Detected language: Lorem ipsum text"); //
// @formatter:on
Field(String description) {
this.description = description;
}
private final String description;
/**
* @see java.lang.Enum#toString()
*/
@Override
public String toString() {
return description;
}
}
public Field getFieldFromName(String name) {
for (Field field : Field.class.getEnumConstants()) {
if (field.name().equals(name)) {
return field;
}
}
throw new IllegalArgumentException(
"No statistics field with name " + name);
}
static {
if ("1".equals(System.getProperty("nu.validator.servlet.statistics"))) {
STATISTICS = new Statistics();
} else {
STATISTICS = null;
}
}
private final long startTime = System.currentTimeMillis();
private long total = 0;
private final long[] counters;
private Statistics() {
counters = new long[Field.values().length];
}
public void incrementTotal() {
total++;
}
public void incrementField(Field field) {
counters[field.ordinal()]++;
}
public void writeToResponse(HttpServletResponse response)
throws IOException {
try {
long totalCopy;
long[] countersCopy = new long[counters.length];
synchronized (this) {
totalCopy = total;
System.arraycopy(counters, 0, countersCopy, 0, counters.length);
}
double totalDouble = totalCopy;
double uptimeMillis = System.currentTimeMillis() - startTime;
response.setContentType("text/html; charset=utf-8");
ContentHandler ch = new HtmlSerializer(response.getOutputStream());
try {
ch.startDocument();
startElement(ch, "html");
startElement(ch, "head");
startElement(ch, "title");
characters(ch, VALIDATOR_STATISTICS);
endElement(ch, "title");
startElement(ch, "style");
characters(ch, STYLESHEET);
endElement(ch, "style");
endElement(ch, "head");
startElement(ch, "body");
startElement(ch, "h1");
characters(ch, VALIDATOR_STATISTICS);
endElement(ch, "h1");
startElement(ch, "dl");
startElement(ch, "dt");
characters(ch, TOTAL_VALIDATIONS);
endElement(ch, "dt");
startElement(ch, "dd");
characters(ch, totalCopy);
endElement(ch, "dd");
startElement(ch, "dt");
characters(ch, UPTIME_DAYS);
endElement(ch, "dt");
startElement(ch, "dd");
characters(ch, uptimeMillis / (1000 * 60 * 60 * 24));
endElement(ch, "dd");
startElement(ch, "dt");
characters(ch, VALIDATIONS_PER_SECOND);
endElement(ch, "dt");
startElement(ch, "dd");
characters(ch, totalDouble / (uptimeMillis / 1000.0));
endElement(ch, "dd");
endElement(ch, "dl");
startElement(ch, "table");
startElement(ch, "thead");
startElement(ch, "tr");
startElement(ch, "th");
characters(ch, COUNTER_NAME);
endElement(ch, "th");
startElement(ch, "th");
characters(ch, COUNTER_VALUE);
endElement(ch, "th");
startElement(ch, "th");
characters(ch, COUNTER_PROPORTION);
endElement(ch, "th");
endElement(ch, "tr");
endElement(ch, "thead");
startElement(ch, "tbody");
for (int i = 0; i < countersCopy.length; i++) {
long count = countersCopy[i];
startElement(ch, "tr");
startElement(ch, "td");
characters(ch, Field.values()[i].toString());
endElement(ch, "td");
startElement(ch, "td");
characters(ch, count);
endElement(ch, "td");
startElement(ch, "td");
characters(ch, count / totalDouble);
endElement(ch, "td");
endElement(ch, "tr");
}
endElement(ch, "tbody");
endElement(ch, "table");
startElement(ch, "script");
characters(ch, SORT_LANGS_SCRIPT);
endElement(ch, "script");
endElement(ch, "body");
endElement(ch, "html");
} finally {
ch.endDocument();
}
} catch (SAXException e) {
throw new IOException(e);
}
}
private void characters(ContentHandler ch, double d) throws SAXException {
// Let's just create a new DecimalFormat each time to avoid the
// complexity of recycling an instance correctly without threading
// hazards.
DecimalFormat df = new DecimalFormat("#,###,##0.000000");
characters(ch, df.format(d));
}
private void characters(ContentHandler ch, long l) throws SAXException {
characters(ch, Long.toString(l));
}
private void characters(ContentHandler ch, String str) throws SAXException {
characters(ch, str.toCharArray());
}
private void characters(ContentHandler ch, char[] cs) throws SAXException {
ch.characters(cs, 0, cs.length);
}
private void endElement(ContentHandler ch, String name)
throws SAXException {
ch.endElement("http://www.w3.org/1999/xhtml", name, name);
}
private void startElement(ContentHandler ch, String name)
throws SAXException {
ch.startElement("http://www.w3.org/1999/xhtml", name, name,
EmptyAttributes.EMPTY_ATTRIBUTES);
}
}
|
Remove some cruft from statistics output
|
src/nu/validator/servlet/Statistics.java
|
Remove some cruft from statistics output
|
|
Java
|
mit
|
722c209a74b9d32dde89b0dc9df2b807c3e9d670
| 0
|
anagav/TinkerRocks
|
package com.tinkerrocks.storage;
import com.google.common.base.Preconditions;
import com.tinkerrocks.structure.ByteUtil;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.rocksdb.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Created by ashishn on 8/13/15.
*/
public class IndexDB extends StorageAbstractClass {
public enum INDEX_COLUMNS {
INDEX_KEYS("INDEX_KEYS");
String value;
INDEX_COLUMNS(String value) {
this.value = value;
}
public String getValue() {
return value;
}
}
RocksDB rocksDB;
List<ColumnFamilyHandle> columnFamilyHandleList;
List<ColumnFamilyDescriptor> columnFamilyDescriptors;
Set<String> indexes = new HashSet<>();
public IndexDB() throws RocksDBException {
columnFamilyDescriptors = new ArrayList<>(INDEX_COLUMNS.values().length);
columnFamilyHandleList = new ArrayList<>(INDEX_COLUMNS.values().length);
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY));
for (INDEX_COLUMNS vertex_columns : INDEX_COLUMNS.values()) {
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(vertex_columns.getValue().getBytes(),
StorageConfigFactory.getColumnFamilyOptions()));
}
rocksDB = RocksDB.open(StorageConfigFactory.getDBOptions(), StorageConstants.DATABASE_PREFIX + "/indexes", columnFamilyDescriptors, columnFamilyHandleList);
}
void put(byte[] key, byte[] value) throws RocksDBException {
this.put(null, key, value);
}
void put(ColumnFamilyHandle columnFamilyHandle, byte[] key, byte[] value) throws RocksDBException {
if (columnFamilyHandle != null)
this.rocksDB.put(columnFamilyHandle, StorageConfigFactory.getWriteOptions(), key, value);
else
this.rocksDB.put(StorageConfigFactory.getWriteOptions(), key, value);
}
public ColumnFamilyHandle getColumn(INDEX_COLUMNS edge_column) {
return columnFamilyHandleList.get(edge_column.ordinal() + 1);
}
public void close() {
this.rocksDB.close();
}
public <T extends Element> void putIndex(Class<T> indexClass, String key, Object value, byte[] id) throws RocksDBException {
Preconditions.checkNotNull(indexClass);
Preconditions.checkNotNull(id);
Preconditions.checkNotNull(key);
String className = indexClass.getName();
byte[] key1 = (className +
StorageConstants.PROPERTY_SEPERATOR + key + StorageConstants.PROPERTY_SEPERATOR + value).getBytes();
key1 = ByteUtil.merge(key1, StorageConstants.PROPERTY_SEPERATOR.getBytes(), id);
put(getColumn(INDEX_COLUMNS.INDEX_KEYS), (className +
StorageConstants.PROPERTY_SEPERATOR + key).getBytes(), "".getBytes());
put(key1, id);
}
public <T extends Element> List<byte[]> getIndex(Class<T> indexClass, String key, Object value) {
List<byte[]> results = new ArrayList<>();
RocksIterator iterator = this.rocksDB.newIterator();
byte[] seek_key = (indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR + key +
StorageConstants.PROPERTY_SEPERATOR + value + StorageConstants.PROPERTY_SEPERATOR).getBytes();
iterator.seek(seek_key);
while (iterator.isValid() && ByteUtil.startsWith(iterator.key(), 0, seek_key)) {
results.add(ByteUtil.slice(iterator.key(), seek_key.length));
iterator.next();
}
return results;
}
public <T extends Element> void dropIndex(Class<T> indexClass, String key) throws RocksDBException {
byte[] seek_key = (indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR + key).getBytes();
this.rocksDB.remove(getColumn(INDEX_COLUMNS.INDEX_KEYS), seek_key);
}
public <T extends Element> Set<String> getIndexedKeys(Class<T> indexClass) {
RocksIterator iterator = this.rocksDB.newIterator(getColumn(INDEX_COLUMNS.INDEX_KEYS));
byte[] seek_key = (indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR).getBytes();
iterator.seek(seek_key);
for (; iterator.isValid() && ByteUtil.startsWith(iterator.key(), 0, seek_key); iterator.next()) {
indexes.add(indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR
+ new String(ByteUtil.slice(iterator.key(), seek_key.length)));
}
return indexes;
}
}
|
src/main/java/com/tinkerrocks/storage/IndexDB.java
|
package com.tinkerrocks.storage;
import com.google.common.base.Preconditions;
import com.tinkerrocks.structure.ByteUtil;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.rocksdb.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Created by ashishn on 8/13/15.
*/
public class IndexDB extends StorageAbstractClass {
public enum INDEX_COLUMNS {
INDEX_KEYS("INDEX_KEYS");
String value;
INDEX_COLUMNS(String value) {
this.value = value;
}
public String getValue() {
return value;
}
}
RocksDB rocksDB;
List<ColumnFamilyHandle> columnFamilyHandleList;
List<ColumnFamilyDescriptor> columnFamilyDescriptors;
Set<String> indexes = new HashSet<>();
public IndexDB() throws RocksDBException {
columnFamilyDescriptors = new ArrayList<>(INDEX_COLUMNS.values().length);
columnFamilyHandleList = new ArrayList<>(INDEX_COLUMNS.values().length);
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY));
for (INDEX_COLUMNS vertex_columns : INDEX_COLUMNS.values()) {
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(vertex_columns.getValue().getBytes(),
StorageConfigFactory.getColumnFamilyOptions()));
}
rocksDB = RocksDB.open(StorageConfigFactory.getDBOptions(), StorageConstants.DATABASE_PREFIX + "/indexes", columnFamilyDescriptors, columnFamilyHandleList);
}
void put(byte[] key, byte[] value) throws RocksDBException {
this.put(null, key, value);
}
void put(ColumnFamilyHandle columnFamilyHandle, byte[] key, byte[] value) throws RocksDBException {
if (columnFamilyHandle != null)
this.rocksDB.put(columnFamilyHandle, StorageConfigFactory.getWriteOptions(), key, value);
else
this.rocksDB.put(StorageConfigFactory.getWriteOptions(), key, value);
}
public ColumnFamilyHandle getColumn(INDEX_COLUMNS edge_column) {
return columnFamilyHandleList.get(edge_column.ordinal() + 1);
}
public void close() {
this.rocksDB.close();
}
public <T extends Element> void putIndex(Class<T> indexClass, String key, Object value, byte[] id) throws RocksDBException {
Preconditions.checkNotNull(indexClass);
Preconditions.checkNotNull(id);
Preconditions.checkNotNull(key);
String className = indexClass.getName();
byte[] key1 = (className +
StorageConstants.PROPERTY_SEPERATOR + key + StorageConstants.PROPERTY_SEPERATOR + value).getBytes();
key1 = ByteUtil.merge(key1, StorageConstants.PROPERTY_SEPERATOR.getBytes(), id);
put(getColumn(INDEX_COLUMNS.INDEX_KEYS), (className +
StorageConstants.PROPERTY_SEPERATOR + key).getBytes(), "".getBytes());
put(key1, id);
}
public <T extends Element> List<byte[]> getIndex(Class<T> indexClass, String key, Object value) {
List<byte[]> results = new ArrayList<>();
RocksIterator iterator = this.rocksDB.newIterator();
byte[] seek_key = (indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR + key +
StorageConstants.PROPERTY_SEPERATOR + value).getBytes();
iterator.seek(seek_key);
while (iterator.isValid() && ByteUtil.startsWith(iterator.key(), 0, seek_key)) {
results.add(ByteUtil.slice(iterator.key(), seek_key.length));
iterator.next();
}
return results;
}
public <T extends Element> void dropIndex(Class<T> indexClass, String key) throws RocksDBException {
byte[] seek_key = (indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR + key).getBytes();
this.rocksDB.remove(getColumn(INDEX_COLUMNS.INDEX_KEYS), seek_key);
}
public <T extends Element> Set<String> getIndexedKeys(Class<T> indexClass) {
RocksIterator iterator = this.rocksDB.newIterator(getColumn(INDEX_COLUMNS.INDEX_KEYS));
byte[] seek_key = (indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR).getBytes();
iterator.seek(seek_key);
for (; iterator.isValid() && ByteUtil.startsWith(iterator.key(), 0, seek_key); iterator.next()) {
indexes.add(indexClass.getName() + StorageConstants.PROPERTY_SEPERATOR
+ new String(ByteUtil.slice(iterator.key(), seek_key.length)));
}
return indexes;
}
}
|
optimize
|
src/main/java/com/tinkerrocks/storage/IndexDB.java
|
optimize
|
|
Java
|
mit
|
ea8a6e3f190a862b8defa0128e6c606ed7377f65
| 0
|
allout58/WebSiteUtils
|
package allout58.util.SiteUtils.builtin.brokenlinks;
import allout58.util.SiteUtils.Utils;
import allout58.util.SiteUtils.api.IModule;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import javax.swing.*;
import java.awt.*;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
/**
* Created by James Hollowell on 3/16/2015.
*/
public class BrokenLinksModule implements IModule
{ /*
* Taken from org.jsoup.helper.HttpConnection.Response
*
* For example {@code application/atom+xml;charset=utf-8}.
* Stepping through it: start with {@code "application/"}, follow with word
* characters up to a {@code "+xml"}, and then maybe more ({@code .*}).
*/
//private static final Pattern xmlContentTypeRxp = Pattern.compile("application/\\w+\\+xml.*");
private static final Logger logger = LogManager.getLogger("BrokenLinksModule");
private List<String> visitedPages = new ArrayList<>();
private List<BrokenLink> brokenLinks = new ArrayList<>();
private LinkedList<Link> pageQueue = new LinkedList<>();
private String root;
private boolean followRedirect = false;
private boolean doImages = false;
private OptionSpec followRedirectOpt;
private OptionSpec<File> siteMapOutOpt;
private OptionSpec doImagesOpt;
private OptionSpec<File> brokenLinksOutOpt;
@Override
public JPanel getPanel()
{
JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.PAGE_AXIS));
JPanel optionsPanel = new JPanel(new GridBagLayout());
JTable results = new JTable(new ResultsTableModel(brokenLinks));
JScrollPane resultsPane = new JScrollPane(results);
return mainPanel;
}
@Override
public void addOptionAcceptors(OptionParser parser)
{
followRedirectOpt = parser.acceptsAll(Arrays.asList("followRedirect", "r"), "Follow the redirection");
siteMapOutOpt = parser.acceptsAll(Arrays.asList("siteMapOut", "s"), "Output file for a list of all discovered pages (and images).").withRequiredArg().ofType(File.class);
brokenLinksOutOpt = parser.acceptsAll(Arrays.asList("brokenLinksOut", "b"), "Output file for the list of broken links.").withRequiredArg().ofType(File.class).required();
doImagesOpt = parser.accepts("doImages", "Check images for availability");
}
@Override
public void parseOptions(OptionSet optionSet)
{
try
{
System.out.println("Beginning Broken Links Check.");
if (!optionSet.has("site"))
{
System.out.println("Site option required");
return;
}
root = (String) optionSet.valueOf("site");
followRedirect = optionSet.has(followRedirectOpt);
doImages = optionSet.has(doImagesOpt);
long timeStart = System.nanoTime();
beginTraversal(root);
long timeEnd = System.nanoTime();
System.out.println();
try
{
File out = optionSet.valueOf(brokenLinksOutOpt);
if (!out.exists() && !out.createNewFile())
logger.error("Error creating broken links output file.");
BufferedWriter bw = new BufferedWriter(new FileWriter(out));
for (BrokenLink b : brokenLinks)
{
bw.write(b.toString());
bw.newLine();
}
bw.close();
}
catch (IOException e)
{
logger.error("Error writing broken links to file", e);
}
if (optionSet.has(siteMapOutOpt))
{
try
{
File out = optionSet.valueOf(siteMapOutOpt);
if (!out.exists() && !out.createNewFile())
logger.error("Error creating site map file.");
BufferedWriter bw = new BufferedWriter(new FileWriter(out));
for (String map : visitedPages)
{
bw.write(map);
bw.newLine();
}
bw.close();
}
catch (IOException e)
{
logger.error("Error writing sitemap to file", e);
}
}
System.out.println("======Statistics:======");
System.out.println(String.format("%-25s:%10d", "Number of Visited Pages", visitedPages.size()));
System.out.println(String.format("%-25s:%10d", "Number of Broken Links", brokenLinks.size()));
System.out.println(String.format("%-25s:%10.2f", "Total Time", ((double) (timeEnd - timeStart)) / 1000000000.0));
}
catch (MalformedURLException mue)
{
System.err.println("Error! Bad URL!");
mue.printStackTrace();
}
catch (IOException ioe)
{
System.err.println("Error reading site.");
ioe.printStackTrace();
}
}
private void beginTraversal(String url) throws IOException
{
pageQueue.add(new Link(url, null));
travers();
}
private void travers() throws IOException
{
while (pageQueue.size() > 0)
{
try
{
Link link = pageQueue.getFirst();
if (!visitedPages.contains(link.getUrl()))
{
visitedPages.add(link.getUrl());
if (visitedPages.size() % 50 == 0)
System.out.println("Current number of visited pages: " + visitedPages.size());
int statusCode = pingAddr(new URL(link.getUrl()));
boolean validEnding = true;
for (String ending : Utils.blockedExtensions)
validEnding &= !link.getUrl().endsWith(ending);
if (statusCode == 200)
{
if (validEnding)
{
Connection connect = Jsoup.connect(link.getUrl())
.userAgent(Utils.chromeUA)
.timeout(3000);
connect.execute();
Document document = connect.get();
Elements elements = document.select("a");
for (Element el : elements)
{
String href = el.attr("href");
//Ignore get parameters
/*if (href.contains("?"))
{
href = href.substring(0, href.indexOf("?"));
}*/
//Ignore id links
if (href.contains("#"))
{
href = href.substring(0, href.indexOf("#"));
}
//Ignore relative links and only do links on this site
if (!"".equals(href.trim())
&& (href.contains(root) || !href.startsWith("http"))
&& (href.startsWith("http") || !href.contains(":"))
)
{
pageQueue.add(new Link(new URL(new URL(link.getUrl()), href).toExternalForm(), el));
}
}
if (doImages)
{
elements = document.select("img");
for (Element el : elements)
{
String href = el.attr("src");
if (!"".equals(href.trim())
&& (href.contains(root) || !href.startsWith("http"))
&& (href.startsWith("http") || !href.contains(":"))
)
{
pageQueue.add(new Link(new URL(new URL(link.getUrl()), href).toExternalForm(), el));
}
}
}
}
}
else
{
brokenLinks.add(new BrokenLink(link.getUrl(), link.getContext() == null ? "" : link.getContext().outerHtml(), link.getContext() == null ? "" : link.getContext().baseUri(), statusCode));
}
}
pageQueue.removeFirst();
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
private int pingAddr(URL url) throws IOException
{
HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("HEAD");
urlConnection.setRequestProperty("User-Agent", Utils.chromeUA);
urlConnection.setInstanceFollowRedirects(followRedirect);
urlConnection.connect();
return urlConnection.getResponseCode();
}
}
|
src/main/java/allout58/util/SiteUtils/builtin/brokenlinks/BrokenLinksModule.java
|
package allout58.util.SiteUtils.builtin.brokenlinks;
import allout58.util.SiteUtils.Utils;
import allout58.util.SiteUtils.api.IModule;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import javax.swing.*;
import java.awt.*;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
/**
* Created by James Hollowell on 3/16/2015.
*/
public class BrokenLinksModule implements IModule
{ /*
* Taken from org.jsoup.helper.HttpConnection.Response
*
* For example {@code application/atom+xml;charset=utf-8}.
* Stepping through it: start with {@code "application/"}, follow with word
* characters up to a {@code "+xml"}, and then maybe more ({@code .*}).
*/
//private static final Pattern xmlContentTypeRxp = Pattern.compile("application/\\w+\\+xml.*");
private static final Logger logger = LogManager.getLogger("BrokenLinksModule");
private List<String> visitedPages = new ArrayList<>();
private List<BrokenLink> brokenLinks = new ArrayList<>();
private LinkedList<Link> pageQueue = new LinkedList<>();
private String root;
private boolean followRedirect = false;
private boolean doImages = false;
private OptionSpec followRedirectOpt;
private OptionSpec<File> siteMapOutOpt;
private OptionSpec doImagesOpt;
@Override
public JPanel getPanel()
{
JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.PAGE_AXIS));
JPanel optionsPanel = new JPanel(new GridBagLayout());
JTable results = new JTable(new ResultsTableModel(brokenLinks));
JScrollPane resultsPane = new JScrollPane(results);
return mainPanel;
}
@Override
public void addOptionAcceptors(OptionParser parser)
{
followRedirectOpt = parser.acceptsAll(Arrays.asList("followRedirect", "r"), "Follow the redirection");
siteMapOutOpt = parser.acceptsAll(Arrays.asList("siteMapOut", "s"), "Output file for a list of all discovered pages (and images).").withRequiredArg().ofType(File.class);
doImagesOpt = parser.accepts("doImages", "Check images for availability");
}
@Override
public void parseOptions(OptionSet optionSet)
{
try
{
System.out.println("Beginning Broken Links Check.");
if (!optionSet.has("site"))
{
System.out.println("Site option required");
return;
}
root = (String) optionSet.valueOf("site");
followRedirect = optionSet.has(followRedirectOpt);
doImages = optionSet.has(doImagesOpt);
long timeStart = System.nanoTime();
beginTraversal(root);
long timeEnd = System.nanoTime();
System.out.println();
System.out.println("Broken Links:");
for (BrokenLink b : brokenLinks)
System.out.println(b);
if (optionSet.has(siteMapOutOpt))
{
try
{
File out = optionSet.valueOf(siteMapOutOpt);
if (!out.exists() && !out.createNewFile())
logger.error("Error creating file.");
BufferedWriter bw = new BufferedWriter(new FileWriter(out));
for (String map : visitedPages)
{
bw.write(map);
bw.newLine();
}
}
catch (IOException e)
{
logger.error("Error writing sitemap to file", e);
}
}
System.out.println("======Statistics:======");
System.out.println(String.format("%-30s:%10d", "Number of Visited Pages", visitedPages.size()));
System.out.println(String.format("%-30s:%10.2f", "Total Time", ((double) (timeEnd - timeStart)) / 1000000000.0));
}
catch (MalformedURLException mue)
{
System.err.println("Error! Bad URL!");
mue.printStackTrace();
}
catch (IOException ioe)
{
System.err.println("Error reading site.");
ioe.printStackTrace();
}
}
private void beginTraversal(String url) throws IOException
{
pageQueue.add(new Link(url, null));
travers();
}
private void travers() throws IOException
{
while (pageQueue.size() > 0)
{
try
{
Link link = pageQueue.getFirst();
if (!visitedPages.contains(link.getUrl()))
{
visitedPages.add(link.getUrl());
if (visitedPages.size() % 50 == 0)
System.out.println("Current number of visited pages: " + visitedPages.size());
int statusCode = pingAddr(new URL(link.getUrl()));
boolean validEnding = true;
for (String ending : Utils.blockedExtensions)
validEnding &= !link.getUrl().endsWith(ending);
if (statusCode == 200)
{
if (validEnding)
{
Connection connect = Jsoup.connect(link.getUrl())
.userAgent(Utils.chromeUA)
.timeout(3000);
connect.execute();
Document document = connect.get();
Elements elements = document.select("a");
for (Element el : elements)
{
String href = el.attr("href");
//Ignore get parameters
/*if (href.contains("?"))
{
href = href.substring(0, href.indexOf("?"));
}*/
//Ignore id links
if (href.contains("#"))
{
href = href.substring(0, href.indexOf("#"));
}
//Ignore relative links and only do links on this site
if (!"".equals(href.trim())
&& (href.contains(root) || !href.startsWith("http"))
&& (href.startsWith("http") || !href.contains(":"))
)
{
pageQueue.add(new Link(new URL(new URL(link.getUrl()), href).toExternalForm(), el));
}
}
if (doImages)
{
elements = document.select("img");
for (Element el : elements)
{
String href = el.attr("src");
if (!"".equals(href.trim())
&& (href.contains(root) || !href.startsWith("http"))
&& (href.startsWith("http") || !href.contains(":"))
)
{
pageQueue.add(new Link(new URL(new URL(link.getUrl()), href).toExternalForm(), el));
}
}
}
}
}
else
{
brokenLinks.add(new BrokenLink(link.getUrl(), link.getContext() == null ? "" : link.getContext().outerHtml(), link.getContext() == null ? "" : link.getContext().baseUri(), statusCode));
}
}
pageQueue.removeFirst();
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
private int pingAddr(URL url) throws IOException
{
HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("HEAD");
urlConnection.setRequestProperty("User-Agent", Utils.chromeUA);
urlConnection.setInstanceFollowRedirects(followRedirect);
urlConnection.connect();
return urlConnection.getResponseCode();
}
}
|
Require outputing broken links to file for CLI.
|
src/main/java/allout58/util/SiteUtils/builtin/brokenlinks/BrokenLinksModule.java
|
Require outputing broken links to file for CLI.
|
|
Java
|
mit
|
05df64392feda4276287d1f4d3cc5ec89bdd45ba
| 0
|
PrinceOfAmber/CyclicMagic,PrinceOfAmber/Cyclic
|
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 Sam Bassett (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.core.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import javax.annotation.Nonnull;
import com.google.common.io.Files;
import com.lothrazar.cyclicmagic.ModCyclic;
import com.lothrazar.cyclicmagic.playerupgrade.storage.InventoryPlayerExtended;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.CompressedStreamTools;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraftforge.event.entity.player.PlayerEvent;
/*Thank you so much for the help azanor
* for basically writing this class and releasing it open source
*
* https://github.com/Azanor/Baubles
*
* which is under Attribution-NonCommercial-ShareAlike 3.0 Unported (CC BY-NC-SA 3.0) license.
* so i was able to use parts of that to make this
* **/
public class UtilPlayerInventoryFilestorage {
public static HashSet<Integer> playerEntityIds = new HashSet<Integer>();
private static HashMap<String, InventoryPlayerExtended> playerItems = new HashMap<String, InventoryPlayerExtended>();
public static void playerSetupOnLoad(PlayerEvent.LoadFromFile event) {
EntityPlayer player = event.getEntityPlayer();
clearPlayerInventory(player);
File playerFile = getPlayerFileName(event.getPlayerDirectory(), event.getEntityPlayer());
if (!playerFile.exists()) {
//file does not exist, create new
File fileNew = event.getPlayerFile(legacyExt);
//and copy in the basocs
if (fileNew.exists()) {
try {
Files.copy(fileNew, playerFile);
fileNew.delete();
// File fb = event.getPlayerFile(extback);
// if (fb.exists())
// fb.delete();
}
catch (IOException e) {}
}
}
loadPlayerInventory(event.getEntityPlayer(), playerFile);//, getPlayerFile(extback, event.getPlayerDirectory(), event.getEntityPlayer().getDisplayNameString())
playerEntityIds.add(event.getEntityPlayer().getEntityId());
}
private static void clearPlayerInventory(EntityPlayer player) {
playerItems.remove(player.getDisplayNameString());
}
public static InventoryPlayerExtended getPlayerInventory(EntityPlayer player) {
if (!playerItems.containsKey(player.getDisplayNameString())) {
InventoryPlayerExtended inventory = new InventoryPlayerExtended(player);
playerItems.put(player.getDisplayNameString(), inventory);
}
return playerItems.get(player.getDisplayNameString());
}
public static ItemStack getPlayerInventoryStack(EntityPlayer player, int slot) {
return getPlayerInventory(player).getStackInSlot(slot);
}
public static void setPlayerInventoryStack(EntityPlayer player, int slot, ItemStack itemStack) {
// UtilPlayerInventoryFilestorage.getPlayerInventory(player).setInventorySlotContents(slot, itemStack);
getPlayerInventory(player).inv.set(slot, itemStack);
}
public static void setPlayerInventory(EntityPlayer player, InventoryPlayerExtended inventory) {
playerItems.put(player.getDisplayNameString(), inventory);
}
public static void loadPlayerInventory(EntityPlayer player, File file1) {
if (player != null && !player.getEntityWorld().isRemote) {
try {
NBTTagCompound data = null;
boolean save = false;
if (file1 != null && file1.exists()) {
try {
FileInputStream fileinputstream = new FileInputStream(file1);
data = CompressedStreamTools.readCompressed(fileinputstream);
fileinputstream.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
if (file1 == null || !file1.exists() || data == null || data.hasNoTags()) {
ModCyclic.logger.error("Data not found for " + player.getDisplayNameString());//+ ". Trying to load backup data."
// if (file2 != null && file2.exists()) {
// try {
// FileInputStream fileinputstream = new FileInputStream(file2);
// data = CompressedStreamTools.readCompressed(fileinputstream);
// fileinputstream.close();
// save = true;
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// }
}
if (data != null) {
InventoryPlayerExtended inventory = new InventoryPlayerExtended(player);
inventory.readNBT(data);
playerItems.put(player.getDisplayNameString(), inventory);
if (save)
savePlayerItems(player, file1);
}
}
catch (Exception e) {
ModCyclic.logger.error("Error loading player extended inventory");
e.printStackTrace();
}
}
}
public static void savePlayerItems(@Nonnull EntityPlayer player, File playerDirectory) {
if (!player.getEntityWorld().isRemote) {
try {
File fileToSave = getPlayerFileName(playerDirectory, player);
if (fileToSave != null) {
InventoryPlayerExtended inventory = getPlayerInventory(player);
NBTTagCompound data = new NBTTagCompound();
inventory.saveNBT(data);
FileOutputStream fileoutputstream = new FileOutputStream(fileToSave);
CompressedStreamTools.writeCompressed(data, fileoutputstream);
fileoutputstream.close();
ModCyclic.logger.error("Successs saved for player " + fileToSave.getName());
}
else {
ModCyclic.logger.error("Could not save file for player " + player.getDisplayNameString());
}
//if original fails to save, ID version will not be overwritten!
File fileToSaveID = getPlayerFileID(playerDirectory, player);
if (fileToSaveID != null) {
InventoryPlayerExtended inventory = getPlayerInventory(player);
NBTTagCompound data = new NBTTagCompound();
inventory.saveNBT(data);
FileOutputStream fileoutputstream = new FileOutputStream(fileToSaveID);
CompressedStreamTools.writeCompressed(data, fileoutputstream);
fileoutputstream.close();
ModCyclic.logger.error("Successs saved for player " + fileToSaveID.getName());
}
else {
ModCyclic.logger.error("Could not save file for player " + player.getDisplayNameString());
}
}
catch (Exception e) {
ModCyclic.logger.error("Could not save file for player " + player.getDisplayNameString(), e);
}
}
}
public static final String legacyExt = "invo";
public static final String newExtension = "cyclicinvo";
// public static final String extback = "backup";
public static final String regex = "[^a-zA-Z0-9_]";
private static File getPlayerFileName(File playerDirectory, EntityPlayer player) {
// some other mods/servers/plugins add things like "[Owner] > " prefix to player names
//which are invalid filename chars. https://github.com/PrinceOfAmber/Cyclic/issues/188
//mojang username rules https://help.mojang.com/customer/en/portal/articles/928638-minecraft-usernames
String playernameFiltered = player.getDisplayNameString().replaceAll(regex, "");
return new File(playerDirectory, "_" + playernameFiltered + "." + legacyExt);
}
private static File getPlayerFileID(File playerDirectory, EntityPlayer player) {
return new File(playerDirectory, player.getUniqueID() + "." + newExtension);
}
public static void syncItems(EntityPlayer player) {
int size = InventoryPlayerExtended.ICOL * InventoryPlayerExtended.IROW + 20;//+20 somehow magically fixes bottom row
for (int a = 0; a < size; a++) {
getPlayerInventory(player).syncSlotToClients(a);
}
}
public static void putDataIntoInventory(InventoryPlayerExtended inventory, EntityPlayer player) {
inventory.inv = getPlayerInventory(player).inv;
}
}
|
src/main/java/com/lothrazar/cyclicmagic/core/util/UtilPlayerInventoryFilestorage.java
|
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 Sam Bassett (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.core.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import javax.annotation.Nonnull;
import com.google.common.io.Files;
import com.lothrazar.cyclicmagic.ModCyclic;
import com.lothrazar.cyclicmagic.playerupgrade.storage.InventoryPlayerExtended;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.CompressedStreamTools;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraftforge.event.entity.player.PlayerEvent;
/*Thank you so much for the help azanor
* for basically writing this class and releasing it open source
*
* https://github.com/Azanor/Baubles
*
* which is under Attribution-NonCommercial-ShareAlike 3.0 Unported (CC BY-NC-SA 3.0) license.
* so i was able to use parts of that to make this
* **/
public class UtilPlayerInventoryFilestorage {
public static HashSet<Integer> playerEntityIds = new HashSet<Integer>();
private static HashMap<String, InventoryPlayerExtended> playerItems = new HashMap<String, InventoryPlayerExtended>();
public static void playerSetupOnLoad(PlayerEvent.LoadFromFile event) {
EntityPlayer player = event.getEntityPlayer();
clearPlayerInventory(player);
File playerFile = getPlayerFile(event.getPlayerDirectory(), event.getEntityPlayer());
if (!playerFile.exists()) {
//file does not exist, create new
File fileNew = event.getPlayerFile(ext);
//and copy in the basocs
if (fileNew.exists()) {
try {
Files.copy(fileNew, playerFile);
fileNew.delete();
// File fb = event.getPlayerFile(extback);
// if (fb.exists())
// fb.delete();
}
catch (IOException e) {}
}
}
loadPlayerInventory(event.getEntityPlayer(), playerFile);//, getPlayerFile(extback, event.getPlayerDirectory(), event.getEntityPlayer().getDisplayNameString())
playerEntityIds.add(event.getEntityPlayer().getEntityId());
}
private static void clearPlayerInventory(EntityPlayer player) {
playerItems.remove(player.getDisplayNameString());
}
public static InventoryPlayerExtended getPlayerInventory(EntityPlayer player) {
if (!playerItems.containsKey(player.getDisplayNameString())) {
InventoryPlayerExtended inventory = new InventoryPlayerExtended(player);
playerItems.put(player.getDisplayNameString(), inventory);
}
return playerItems.get(player.getDisplayNameString());
}
public static ItemStack getPlayerInventoryStack(EntityPlayer player, int slot) {
return getPlayerInventory(player).getStackInSlot(slot);
}
public static void setPlayerInventoryStack(EntityPlayer player, int slot, ItemStack itemStack) {
// UtilPlayerInventoryFilestorage.getPlayerInventory(player).setInventorySlotContents(slot, itemStack);
getPlayerInventory(player).inv.set(slot, itemStack);
}
public static void setPlayerInventory(EntityPlayer player, InventoryPlayerExtended inventory) {
playerItems.put(player.getDisplayNameString(), inventory);
}
public static void loadPlayerInventory(EntityPlayer player, File file1) {
if (player != null && !player.getEntityWorld().isRemote) {
try {
NBTTagCompound data = null;
boolean save = false;
if (file1 != null && file1.exists()) {
try {
FileInputStream fileinputstream = new FileInputStream(file1);
data = CompressedStreamTools.readCompressed(fileinputstream);
fileinputstream.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
if (file1 == null || !file1.exists() || data == null || data.hasNoTags()) {
ModCyclic.logger.error("Data not found for " + player.getDisplayNameString());//+ ". Trying to load backup data."
// if (file2 != null && file2.exists()) {
// try {
// FileInputStream fileinputstream = new FileInputStream(file2);
// data = CompressedStreamTools.readCompressed(fileinputstream);
// fileinputstream.close();
// save = true;
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// }
}
if (data != null) {
InventoryPlayerExtended inventory = new InventoryPlayerExtended(player);
inventory.readNBT(data);
playerItems.put(player.getDisplayNameString(), inventory);
if (save)
savePlayerItems(player, file1);
}
}
catch (Exception e) {
ModCyclic.logger.error("Error loading player extended inventory");
e.printStackTrace();
}
}
}
public static void savePlayerItems(@Nonnull EntityPlayer player, File playerDirectory) {
if (player != null && !player.getEntityWorld().isRemote) {
try {
File fileToSave = getPlayerFile(playerDirectory, player);
// if (file1 != null && file1.exists()) {
// try {
// Files.copy(file1, file2);
// }
// catch (Exception e) {
// ModCyclic.logger.error("Could not backup old file for player " + player.getDisplayNameString());
// }
// }
try {
if (fileToSave != null) {
InventoryPlayerExtended inventory = getPlayerInventory(player);
NBTTagCompound data = new NBTTagCompound();
inventory.saveNBT(data);
FileOutputStream fileoutputstream = new FileOutputStream(fileToSave);
CompressedStreamTools.writeCompressed(data, fileoutputstream);
fileoutputstream.close();
ModCyclic.logger.error("Successs saved for player " + player.getDisplayNameString());
}
else {
ModCyclic.logger.error("Could not save file for player " + player.getDisplayNameString());
}
}
catch (Exception e) {
ModCyclic.logger.error("Could not save file for player " + player.getDisplayNameString());
e.printStackTrace();
if (fileToSave.exists()) {
try {
fileToSave.delete();
}
catch (Exception e2) {}
}
}
}
catch (Exception exception1) {
ModCyclic.logger.error("Error saving inventory");
exception1.printStackTrace();
}
}
}
public static final String ext = "invo";
// public static final String extback = "backup";
public static final String regex = "[^a-zA-Z0-9_]";
private static File getPlayerFile(File playerDirectory, EntityPlayer player) {
// some other mods/servers/plugins add things like "[Owner] > " prefix to player names
//which are invalid filename chars. https://github.com/PrinceOfAmber/Cyclic/issues/188
//mojang username rules https://help.mojang.com/customer/en/portal/articles/928638-minecraft-usernames
String playernameFiltered = player.getDisplayNameString().replaceAll(regex, "");
return new File(playerDirectory, "_" + playernameFiltered + "." + ext);
}
public static void syncItems(EntityPlayer player) {
int size = InventoryPlayerExtended.ICOL * InventoryPlayerExtended.IROW + 20;//+20 somehow magically fixes bottom row
for (int a = 0; a < size; a++) {
getPlayerInventory(player).syncSlotToClients(a);
}
}
public static void putDataIntoInventory(InventoryPlayerExtended inventory, EntityPlayer player) {
inventory.inv = getPlayerInventory(player).inv;
}
}
|
1.15.3 save backup file using players UUID
|
src/main/java/com/lothrazar/cyclicmagic/core/util/UtilPlayerInventoryFilestorage.java
|
1.15.3 save backup file using players UUID
|
|
Java
|
mit
|
8bcaf98343f509abd1bc5b1296ccae730250491c
| 0
|
flintproject/Flint,flintproject/Flint
|
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.filesystem;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.UUID;
public class Workspace {
private static final String LOCK_FILE_NAME = "flint.lock";
private static final String LOG_FILE_NAME = "flint.log";
private final static String WORKSPACE_NAME;
private static File mLockFile;
private static FileLock mFileLock;
static {
WORKSPACE_NAME = new File(".flint", UUID.randomUUID().toString()).getPath();
try {
File dir = getFile();
dir.mkdirs();
mLockFile = new File(dir, LOCK_FILE_NAME);
if (!mLockFile.exists()) mLockFile.createNewFile();
mLockFile.deleteOnExit();
FileOutputStream lockStream = new FileOutputStream(mLockFile);
FileChannel lockChannel = lockStream.getChannel();
mFileLock = lockChannel.tryLock();
clear();
} catch (IOException ioe) {
// ignored.
} finally {
if (mFileLock == null) {
Logger.getRootLogger().fatal("could not get lock: " + mLockFile.toString());
System.exit(1);
}
}
Runtime.getRuntime().addShutdownHook(new ShutdownHook());
}
private static void clear() throws IOException {
File lockFile = null;
FileLock lock = null;
FileChannel lockChannel = null;
try {
File flintWorkspace = getFile().getParentFile();
lockFile = new File(flintWorkspace, "flint.lock");
if (!lockFile.exists())
lockFile.createNewFile();
FileOutputStream lockStream = new FileOutputStream(lockFile);
lockChannel = lockStream.getChannel();
lock = lockChannel.tryLock();
if (lock == null)
throw new IOException("could not create the lock file");
for (File child : flintWorkspace.listFiles()) {
if (getFile().equals(child) || lockFile.equals(child))
continue;
FileChannel instanceChannel = null;
try {
File instanceLockFile = new File(child, "flint.lock");
if (instanceLockFile.exists()) {
FileLock instanceLock = null;
try {
FileOutputStream instanceStream
= new FileOutputStream(instanceLockFile);
instanceChannel = instanceStream.getChannel();
instanceLock = instanceChannel.tryLock();
if (instanceLock == null)
continue;
} finally {
if (instanceLock != null)
instanceLock.release();
if (instanceChannel != null)
instanceChannel.close();
}
}
recursiveDelete(child);
} catch (IOException ex) {
Logger.getRootLogger().error(ex.getMessage());
} finally {
}
}
} catch (IOException ex) {
Logger.getRootLogger().error("could not create the lock file.");
return;
} finally {
if (lock != null)
lock.release();
if (lockChannel != null)
lockChannel.close();
if (lockFile != null && lockFile.exists())
lockFile.delete();
}
}
private static File getFile() throws IOException {
return new File(getPath());
}
// TODO: This method name becomes a misnomer, since Java 7 introduces java.nio.file.Path.
public static String getPath() throws IOException {
return System.getProperty("user.home") + File.separator + WORKSPACE_NAME;
}
public static File createTempFile(String prefix, String suffix) throws IOException {
// Prefix must be at least three characters long
// See
// http://docs.oracle.com/javase/7/docs/api/java/nio/file/Files.html#createTempFile(java.nio.file.Path,%20java.lang.String,%20java.lang.String,%20java.nio.file.attribute.FileAttribute...)
// and
// http://docs.oracle.com/javase/7/docs/api/java/io/File.html#createTempFile(java.lang.String,%20java.lang.String,%20java.io.File)
while (prefix.length() < 3) {
prefix += "_";
}
Path path = Files.createTempFile(getFile().toPath(), prefix, suffix);
return path.toFile();
}
public static File createTempDirectory(String prefix) throws IOException {
Path path = Files.createTempDirectory(getFile().toPath(), prefix);
return path.toFile();
}
public static void publishFile(File source, File target) throws IOException {
Files.copy(source.toPath(), target.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
public static void recursiveDelete (File target) {
if (target == null)
return;
if (target.isDirectory()) {
for (File f : target.listFiles())
recursiveDelete (f);
}
target.delete();
}
private static class ShutdownHook extends Thread {
@Override
public void run() {
if (mFileLock != null) {
try (FileChannel channel = mFileLock.channel()) {
Workspace.recursiveDelete(Workspace.getFile());
mFileLock.release();
} catch (IOException ioe) {
// ignored
}
}
}
}
}
|
flint/src/jp/oist/flint/filesystem/Workspace.java
|
/* -*- Mode: Java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:set ts=4 sw=4 sts=4 et: */
package jp.oist.flint.filesystem;
import org.apache.log4j.Logger;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.UUID;
public class Workspace {
private static final String LOCK_FILE_NAME = "flint.lock";
private static final String LOG_FILE_NAME = "flint.log";
private final static String WORKSPACE_NAME;
private static File mLockFile;
private static FileLock mFileLock;
static {
WORKSPACE_NAME = new File(".flint", UUID.randomUUID().toString()).getPath();
try {
File dir = getFile();
dir.mkdirs();
mLockFile = new File(dir, LOCK_FILE_NAME);
if (!mLockFile.exists()) mLockFile.createNewFile();
mLockFile.deleteOnExit();
FileOutputStream lockStream = new FileOutputStream(mLockFile);
FileChannel lockChannel = lockStream.getChannel();
mFileLock = lockChannel.tryLock();
clear();
} catch (IOException ioe) {
// ignored.
} finally {
if (mFileLock == null) {
Logger.getRootLogger().fatal("could not get lock: " + mLockFile.toString());
System.exit(1);
}
}
Runtime.getRuntime().addShutdownHook(new Thread () {
@Override
public void run() {
if (mFileLock != null) {
try {
FileChannel channel = mFileLock.channel();
mFileLock.release();
channel.close();
Workspace.recursiveDelete(Workspace.getFile());
} catch (IOException ex) {
Logger.getRootLogger().error(ex.getMessage());
}
}
}
});
}
private static void clear() throws IOException {
File lockFile = null;
FileLock lock = null;
FileChannel lockChannel = null;
try {
File flintWorkspace = getFile().getParentFile();
lockFile = new File(flintWorkspace, "flint.lock");
if (!lockFile.exists())
lockFile.createNewFile();
FileOutputStream lockStream = new FileOutputStream(lockFile);
lockChannel = lockStream.getChannel();
lock = lockChannel.tryLock();
if (lock == null)
throw new IOException("could not create the lock file");
for (File child : flintWorkspace.listFiles()) {
if (getFile().equals(child) || lockFile.equals(child))
continue;
FileChannel instanceChannel = null;
try {
File instanceLockFile = new File(child, "flint.lock");
if (instanceLockFile.exists()) {
FileLock instanceLock = null;
try {
FileOutputStream instanceStream
= new FileOutputStream(instanceLockFile);
instanceChannel = instanceStream.getChannel();
instanceLock = instanceChannel.tryLock();
if (instanceLock == null)
continue;
} finally {
if (instanceLock != null)
instanceLock.release();
if (instanceChannel != null)
instanceChannel.close();
}
}
recursiveDelete(child);
} catch (IOException ex) {
Logger.getRootLogger().error(ex.getMessage());
} finally {
}
}
} catch (IOException ex) {
Logger.getRootLogger().error("could not create the lock file.");
return;
} finally {
if (lock != null)
lock.release();
if (lockChannel != null)
lockChannel.close();
if (lockFile != null && lockFile.exists())
lockFile.delete();
}
}
private static File getFile() throws IOException {
return new File(getPath());
}
// TODO: This method name becomes a misnomer, since Java 7 introduces java.nio.file.Path.
public static String getPath() throws IOException {
return System.getProperty("user.home") + File.separator + WORKSPACE_NAME;
}
public static File createTempFile(String prefix, String suffix) throws IOException {
// Prefix must be at least three characters long
// See
// http://docs.oracle.com/javase/7/docs/api/java/nio/file/Files.html#createTempFile(java.nio.file.Path,%20java.lang.String,%20java.lang.String,%20java.nio.file.attribute.FileAttribute...)
// and
// http://docs.oracle.com/javase/7/docs/api/java/io/File.html#createTempFile(java.lang.String,%20java.lang.String,%20java.io.File)
while (prefix.length() < 3) {
prefix += "_";
}
Path path = Files.createTempFile(getFile().toPath(), prefix, suffix);
return path.toFile();
}
public static File createTempDirectory(String prefix) throws IOException {
Path path = Files.createTempDirectory(getFile().toPath(), prefix);
return path.toFile();
}
public static void publishFile(File source, File target) throws IOException {
Files.copy(source.toPath(), target.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
public static void recursiveDelete (File target) {
if (target == null)
return;
if (target.isDirectory()) {
for (File f : target.listFiles())
recursiveDelete (f);
}
target.delete();
}
public static class ShutdownHook implements Runnable {
private ShutdownHook() { }
@Override
public void run() {
if (mFileLock != null) {
try {
FileChannel channel = mFileLock.channel();
mFileLock.release();
channel.close();
Workspace.recursiveDelete(Workspace.getFile());
} catch (IOException ex) {
// ignored.
}
}
}
}
}
|
remove duplicate code
This also employs the try-with-resources pattern
|
flint/src/jp/oist/flint/filesystem/Workspace.java
|
remove duplicate code
|
|
Java
|
mit
|
3df0194b5b48bf12e788ef2e842f91b7d9cab0f9
| 0
|
Sometrik/framework,Sometrik/framework,Sometrik/framework
|
package com.sometrik.framework;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import com.android.trivialdrivesample.util.IabHelper;
import com.android.trivialdrivesample.util.IabHelper.IabAsyncInProgressException;
import com.android.trivialdrivesample.util.IabResult;
import com.android.trivialdrivesample.util.Inventory;
import com.android.trivialdrivesample.util.Purchase;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.text.Editable;
import android.text.Html;
import android.text.InputType;
import android.text.TextWatcher;
import android.text.method.LinkMovementMethod;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.PopupMenu;
import android.widget.PopupMenu.OnMenuItemClickListener;
import android.widget.ScrollView;
import android.widget.Switch;
import android.widget.TableLayout;
import android.widget.TextView;
public class NativeCommand {
private int internalId = 0;
private int childInternalId = 0;
private int value = 0;
private int flags = 0;
private String textValue = "";
private String textValue2 = "";
private CommandType command;
private String key;
private FrameWork frame;
private ArrayList<PopupMenu> menuList = new ArrayList<PopupMenu>();
private final int FLAG_PADDING_LEFT = 1;
private final int FLAG_PADDING_RIGHT = 2;
private final int FLAG_PADDING_TOP = 4;
private final int FLAG_PADDING_BOTTOM = 8;
private final int FLAG_PASSWORD = 16;
private final int FLAG_NUMERIC = 32;
private final int FLAG_HYPERLINK = 64;
private final int FLAG_USE_PURCHASES_API = 128;
public enum CommandType {
CREATE_PLATFORM,
CREATE_APPLICATION,
CREATE_FORMVIEW,
CREATE_OPENGL_VIEW,
CREATE_TEXTFIELD, // For viewing single value
CREATE_LISTVIEW, // For viewing lists
CREATE_GRIDVIEW, // For viewing tables
CREATE_BUTTON,
CREATE_SWITCH,
CREATE_PICKER, // called Spinner in Android
CREATE_LINEAR_LAYOUT,
CREATE_TABLE_LAYOUT,
CREATE_AUTO_COLUMN_LAYOUT,
CREATE_HEADING_TEXT,
CREATE_TEXT,
CREATE_DIALOG, // For future
CREATE_IMAGE_ELEMENT,
CREATE_ACTION_SHEET,
CREATE_CHECKBOX,
CREATE_RADIO_GROUP,
CREATE_SEPARATOR,
CREATE_SLIDER,
CREATE_SCROLLVIEW,
DELETE_ELEMENT,
SHOW_VIEW,
SHOW_MESSAGE_DIALOG,
SHOW_INPUT_DIALOG,
SHOW_ACTION_SHEET,
LAUNCH_BROWSER,
POST_NOTIFICATION,
SET_INT_VALUE, // Sets value of radio groups, checkboxes and pickers
SET_TEXT_VALUE, // Sets value of textfields, labels and images
SET_LABEL, // Sets label for buttons and checkboxes
SET_ENABLED,
UPDATE_PREFERENCE,
ADD_OPTION,
QUIT_APP,
// Timers
CREATE_TIMER,
// In-app purchases
LIST_PRODUCTS,
BUY_PRODUCT,
LIST_PURCHASES,
CONSUME_PURCHASE
}
public NativeCommand(FrameWork frame, int messageTypeId, int internalId, int childInternalId, int value, byte[] textValue, byte[] textValue2, int flags){
this.frame = frame;
command = CommandType.values()[messageTypeId];
this.internalId = internalId;
this.childInternalId = childInternalId;
this.value = value;
this.flags = flags;
if (textValue != null) {
this.textValue = new String(textValue, Charset.forName("UTF-8"));
}
if (textValue2 != null) {
this.textValue2 = new String(textValue2, Charset.forName("UTF-8"));
}
}
public void apply(NativeCommandHandler view) {
System.out.println("Processing message " + command + " id: " + internalId + " Child id: " + getChildInternalId());
if (view == null){
System.out.println("View was null");
return;
}
switch (command) {
case CREATE_FORMVIEW:
System.out.println("creating formView " + getChildInternalId());
createFormView();
break;
case CREATE_SCROLLVIEW:
FWScrollView scrollView = new FWScrollView(frame);
scrollView.setId(getChildInternalId());
view.addChild(scrollView);
break;
case CREATE_LINEAR_LAYOUT:
FWLayout layout = createLinearLayout();
view.addChild(layout);
break;
case CREATE_AUTO_COLUMN_LAYOUT:
AutoColumnLayout autoLayout = new AutoColumnLayout(frame);
autoLayout.setId(getChildInternalId());
view.addChild(autoLayout);
break;
case CREATE_TABLE_LAYOUT:
FWTable table = createTableLayout();
view.addChild(table);
break;
case CREATE_BUTTON:
Button button = createButton();
view.addChild(button);
break;
case CREATE_PICKER:
FWPicker picker = createSpinner();
view.addChild(picker);
break;
case CREATE_SWITCH:
Switch click = new Switch(frame);
click.setId(childInternalId);
if (textValue != "") {
click.setTextOn(textValue);
if (textValue2 == "") click.setTextOff(textValue);
}
if (textValue2 != "") click.setTextOff(textValue2);
// TODO: add listener
view.addChild(click);
break;
case CREATE_CHECKBOX:
CheckBox checkBox = new CheckBox(frame);
checkBox.setId(childInternalId);
if (textValue != "") {
checkBox.setText(textValue);
}
view.addChild(checkBox);
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton box, boolean isChecked) {
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, childInternalId, isChecked ? 1 : 0);
}
});
break;
case CREATE_OPENGL_VIEW:
NativeSurface surface = frame.createNativeOpenGLView(childInternalId);
surface.showView();
break;
case CREATE_TEXTFIELD:
EditText editText = createEditText();
view.addChild(editText);
break;
case CREATE_RADIO_GROUP:
FWRadioGroup radioGroup = new FWRadioGroup(frame);
radioGroup.setId(childInternalId);
break;
case CREATE_HEADING_TEXT:
case CREATE_TEXT:
TextView textView = createTextView();
view.addChild(textView);
break;
case CREATE_IMAGE_ELEMENT:
ImageView imageView = createImageView();
view.addChild(imageView);
break;
case SHOW_VIEW:
frame.disableDraw();
view.showView();
break;
case ADD_OPTION:
// Forward Command to FWPicker
view.addOption(getValue(), getTextValue());
break;
case POST_NOTIFICATION:
frame.createNotification(getTextValue(), getTextValue2());
break;
case CREATE_APPLICATION:
frame.setAppId(getInternalId());
frame.setSharedPreferences(textValue);
if (isSet(FLAG_USE_PURCHASES_API)) {
System.out.println("Initializing purchaseHelper");
frame.initializePurchaseHelper(textValue2, new IabHelper.OnIabSetupFinishedListener() {
@Override
public void onIabSetupFinished(IabResult result) {
if (result.isSuccess()) {
System.out.println("PurchaseHelper successfully setup");
sendInventory(frame.getPurchaseHelperInventory());
} else {
System.out.println("PurchaseHelper failed to setup");
}
}
});
}
break;
case SET_INT_VALUE:
view.setValue(getValue());
break;
case SET_TEXT_VALUE:
view.setValue(getTextValue());
break;
case SET_ENABLED:
if (value > 0){
view.setEnabled(true);
} else {
view.setEnabled(false);
}
break;
case LAUNCH_BROWSER:
frame.launchBrowser(getTextValue());
break;
case SHOW_MESSAGE_DIALOG:
showMessageDialog(textValue, textValue2);
break;
case SHOW_INPUT_DIALOG:
showInputDialog(textValue, textValue2);
break;
case CREATE_ACTION_SHEET:
createActionSheet();
break;
case QUIT_APP:
// TODO
frame.finish();
break;
case UPDATE_PREFERENCE:
//Now stores String value to string key.
frame.getPreferencesEditor().putString(textValue, textValue2);
frame.getPreferencesEditor().apply();
break;
case DELETE_ELEMENT:
view.removeChild(childInternalId);
break;
case BUY_PRODUCT:
try {
launchPurchase("com.sometrik.formtest.coin");
} catch (IabAsyncInProgressException e) {
e.printStackTrace();
System.out.println("Error on launchPurchase with message: " + e.getMessage());
}
default:
System.out.println("Message couldn't be handled");
break;
}
}
private void createActionSheet(){
PopupMenu menu = new PopupMenu(frame, null);
menu.setOnMenuItemClickListener(new OnMenuItemClickListener(){
@Override
public boolean onMenuItemClick(MenuItem item) {
return false;
}
});
menuList.add(menu);
}
private FWTable createTableLayout(){
FWTable table = new FWTable(frame);
table.setId(getChildInternalId());
table.setColumnCount(value);
FrameWork.addToViewList(table);
return table;
}
private ImageView createImageView() {
ImageView imageView = new ImageView(frame);
imageView.setId(childInternalId);
try {
InputStream is = frame.getAssets().open(textValue);
Bitmap bitmap = BitmapFactory.decodeStream(is);
imageView.setImageBitmap(bitmap);
return imageView;
} catch (IOException e) {
e.printStackTrace();
System.out.println("error loading asset file to imageView");
System.exit(1);
}
return null;
}
private FWLayout createLinearLayout() {
FWLayout layout = new FWLayout(frame);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
params.weight = 1.0f;
params.gravity = Gravity.FILL;
layout.setBaselineAligned(false);
layout.setLayoutParams(params);
layout.setId(getChildInternalId());
FrameWork.addToViewList(layout);
if (getValue() == 2) {
layout.setOrientation(LinearLayout.HORIZONTAL);
} else {
layout.setOrientation(LinearLayout.VERTICAL);
}
return layout;
}
private FWButton createButton() {
FWButton button = new FWButton(frame);
button.setId(getInternalId());
button.setText(getTextValue());
button.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
System.out.println("Java: my button was clicked with id " + getChildInternalId());
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, getChildInternalId(), 1);
}
});
return button;
}
private EditText createEditText(){
final EditText editText = new EditText(frame);
editText.setId(getChildInternalId());
editText.setText(getTextValue());
TableLayout.LayoutParams params = new TableLayout.LayoutParams(TableLayout.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT);
params.weight = 1.0f;
params.gravity = Gravity.FILL;
editText.setLayoutParams(params);
editText.setMinimumWidth(120000 / (int) frame.getScreenWidth());
if (isSet(FLAG_PASSWORD) && isSet(FLAG_NUMERIC)){
editText.setInputType(InputType.TYPE_NUMBER_VARIATION_PASSWORD);
} else if (isSet(FLAG_PASSWORD)) {
editText.setInputType(InputType.TYPE_TEXT_VARIATION_PASSWORD);
} else if (isSet(FLAG_NUMERIC)){
editText.setInputType(InputType.TYPE_CLASS_NUMBER);
}
editText.addTextChangedListener(new TextWatcher() {
public void afterTextChanged(Editable editable) {
frame.textChangedEvent(System.currentTimeMillis() / 1000.0, getChildInternalId(), editable.toString());
}
public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
public void onTextChanged(CharSequence s, int start, int before, int count) {}
});
return editText;
}
private FWPicker createSpinner(){
FWPicker picker = new FWPicker(frame);
picker.setId(getChildInternalId());
FrameWork.views.put(getChildInternalId(), picker);
return picker;
}
private TextView createTextView() {
TextView textView = new TextView(frame);
textView.setId(getChildInternalId());
TableLayout.LayoutParams params = new TableLayout.LayoutParams(TableLayout.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT);
params.weight = 1.0f;
params.gravity = Gravity.FILL;
textView.setLayoutParams(params);
if (isSet(FLAG_HYPERLINK)) {
textView.setMovementMethod(LinkMovementMethod.getInstance());
String text = "<a href='" + textValue2 + "'>" + textValue + "</a>";
textView.setText(Html.fromHtml(text));
} else {
textView.setText(textValue);
}
return textView;
}
private void createFormView(){
FWLayout layout = new FWLayout(frame);
layout.setId(getChildInternalId());
FrameWork.views.put(getChildInternalId(), layout);
ScrollView scrollView = new ScrollView(frame);
scrollView.addView(layout);
}
// Create dialog with user text input
private void showInputDialog(String title, String message) {
System.out.println("Creating input dialog");
AlertDialog.Builder builder;
builder = new AlertDialog.Builder(frame);
// Building an alert
builder.setTitle(title);
builder.setMessage(message);
builder.setCancelable(true);
final EditText input = new EditText(frame);
input.setInputType(InputType.TYPE_CLASS_TEXT);
builder.setView(input);
builder.setOnCancelListener(new OnCancelListener(){
@Override
public void onCancel(DialogInterface arg0) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
}
});
// Negative button listener
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
dialog.cancel();
}
});
// Positive button listener
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
String inputText = String.valueOf(input.getText());
byte[] b = inputText.getBytes(Charset.forName("UTF-8"));
frame.endModal(System.currentTimeMillis() / 1000.0, 1, b);
dialog.cancel();
}
});
// Create and show the alert
AlertDialog alert = builder.create();
alert.show();
}
// create Message dialog
private void showMessageDialog(String title, String message) {
System.out.println("creating message dialog");
AlertDialog.Builder builder;
builder = new AlertDialog.Builder(frame);
// Building an alert
builder.setTitle(title);
builder.setMessage(message);
builder.setCancelable(true);
builder.setOnCancelListener(new OnCancelListener(){
@Override
public void onCancel(DialogInterface arg0) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
}
});
// Positive button listener
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
frame.endModal(System.currentTimeMillis() / 1000.0, 1, null);
dialog.dismiss();
}
});
// Create and show the alert
AlertDialog alert = builder.create();
alert.show();
System.out.println("message dialog created");
}
private void launchPurchase(final String productId) throws IabAsyncInProgressException {
// Sku = product id from google account
frame.getPurchaseHelper().launchPurchaseFlow(frame, productId, IabHelper.ITEM_TYPE_INAPP, null, 1, new IabHelper.OnIabPurchaseFinishedListener() {
@Override
public void onIabPurchaseFinished(IabResult result, Purchase info) {
if (result.isSuccess()) {
System.out.println("Purchase of product id " + productId + " completed");
FrameWork.onPurchaseEvent(info.getPurchaseTime() / 1000.0, info.getSku(), true);
// TODO
} else {
System.out.println("Purchase of product id " + productId + " failed");
// TODO
}
}
}, "");
}
private void sendInventory(Inventory inventory){
List <Purchase> purchaseList = inventory.getAllPurchases();
System.out.println("getting purchase history. Purchase list size: " + purchaseList.size());
for (Purchase purchase : inventory.getAllPurchases()){
FrameWork.onPurchaseEvent(purchase.getPurchaseTime() / 1000.0, purchase.getSku(), false);
}
}
private Boolean isSet(int flag) {
return (flags & flag) != 0;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public int getInternalId() {
return internalId;
}
public int getChildInternalId() {
return childInternalId;
}
public String getTextValue() {
return textValue;
}
public String getTextValue2() {
return textValue2;
}
public CommandType getCommand() {
return command;
}
public int getValue() {
return value;
}
}
|
android/java/com/sometrik/framework/NativeCommand.java
|
package com.sometrik.framework;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import com.android.trivialdrivesample.util.IabHelper;
import com.android.trivialdrivesample.util.IabHelper.IabAsyncInProgressException;
import com.android.trivialdrivesample.util.IabResult;
import com.android.trivialdrivesample.util.Inventory;
import com.android.trivialdrivesample.util.Purchase;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.text.Editable;
import android.text.Html;
import android.text.InputType;
import android.text.TextWatcher;
import android.text.method.LinkMovementMethod;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.PopupMenu;
import android.widget.PopupMenu.OnMenuItemClickListener;
import android.widget.ScrollView;
import android.widget.Switch;
import android.widget.TableLayout;
import android.widget.TextView;
public class NativeCommand {
private int internalId = 0;
private int childInternalId = 0;
private int value = 0;
private int flags = 0;
private String textValue = "";
private String textValue2 = "";
private CommandType command;
private String key;
private FrameWork frame;
private ArrayList<PopupMenu> menuList = new ArrayList<PopupMenu>();
private final int FLAG_PADDING_LEFT = 1;
private final int FLAG_PADDING_RIGHT = 2;
private final int FLAG_PADDING_TOP = 4;
private final int FLAG_PADDING_BOTTOM = 8;
private final int FLAG_PASSWORD = 16;
private final int FLAG_NUMERIC = 32;
private final int FLAG_HYPERLINK = 64;
private final int FLAG_USE_PURCHASES_API = 128;
public enum CommandType {
CREATE_PLATFORM,
CREATE_APPLICATION,
CREATE_FORMVIEW,
CREATE_OPENGL_VIEW,
CREATE_TEXTFIELD, // For viewing single value
CREATE_LISTVIEW, // For viewing lists
CREATE_GRIDVIEW, // For viewing tables
CREATE_BUTTON,
CREATE_SWITCH,
CREATE_PICKER, // called Spinner in Android
CREATE_LINEAR_LAYOUT,
CREATE_TABLE_LAYOUT,
CREATE_AUTO_COLUMN_LAYOUT,
CREATE_HEADING_TEXT,
CREATE_TEXT,
CREATE_DIALOG, // For future
CREATE_IMAGE_ELEMENT,
CREATE_ACTION_SHEET,
CREATE_CHECKBOX,
CREATE_RADIO_GROUP,
CREATE_SEPARATOR,
CREATE_SLIDER,
CREATE_SCROLLVIEW,
DELETE_ELEMENT,
SHOW_VIEW,
SHOW_MESSAGE_DIALOG,
SHOW_INPUT_DIALOG,
SHOW_ACTION_SHEET,
LAUNCH_BROWSER,
POST_NOTIFICATION,
SET_INT_VALUE, // Sets value of radio groups, checkboxes and pickers
SET_TEXT_VALUE, // Sets value of textfields, labels and images
SET_LABEL, // Sets label for buttons and checkboxes
SET_ENABLED,
UPDATE_PREFERENCE,
ADD_OPTION,
QUIT_APP,
// Timers
CREATE_TIMER,
// In-app purchases
LIST_PRODUCTS,
BUY_PRODUCT,
LIST_PURCHASES,
CONSUME_PURCHASE
}
public NativeCommand(FrameWork frame, int messageTypeId, int internalId, int childInternalId, int value, byte[] textValue, byte[] textValue2, int flags){
this.frame = frame;
command = CommandType.values()[messageTypeId];
this.internalId = internalId;
this.childInternalId = childInternalId;
this.value = value;
this.flags = flags;
if (textValue != null) {
this.textValue = new String(textValue, Charset.forName("UTF-8"));
}
if (textValue2 != null) {
this.textValue2 = new String(textValue2, Charset.forName("UTF-8"));
}
}
public void apply(NativeCommandHandler view) {
System.out.println("Processing message " + command + " id: " + internalId + " Child id: " + getChildInternalId());
if (view == null){
System.out.println("View was null");
return;
}
switch (command) {
case CREATE_FORMVIEW:
System.out.println("creating formView " + getChildInternalId());
createFormView();
break;
case CREATE_SCROLLVIEW:
ScrollView scrollView = new ScrollView(frame);
scrollView.setId(getChildInternalId());
view.addChild(scrollView);
break;
case CREATE_LINEAR_LAYOUT:
FWLayout layout = createLinearLayout();
view.addChild(layout);
break;
case CREATE_AUTO_COLUMN_LAYOUT:
AutoColumnLayout autoLayout = new AutoColumnLayout(frame);
autoLayout.setId(getChildInternalId());
view.addChild(autoLayout);
break;
case CREATE_TABLE_LAYOUT:
FWTable table = createTableLayout();
view.addChild(table);
break;
case CREATE_BUTTON:
Button button = createButton();
view.addChild(button);
break;
case CREATE_PICKER:
FWPicker picker = createSpinner();
view.addChild(picker);
break;
case CREATE_SWITCH:
Switch click = new Switch(frame);
click.setId(childInternalId);
if (textValue != "") {
click.setTextOn(textValue);
if (textValue2 == "") click.setTextOff(textValue);
}
if (textValue2 != "") click.setTextOff(textValue2);
// TODO: add listener
view.addChild(click);
break;
case CREATE_CHECKBOX:
CheckBox checkBox = new CheckBox(frame);
checkBox.setId(childInternalId);
if (textValue != "") {
checkBox.setText(textValue);
}
view.addChild(checkBox);
checkBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton box, boolean isChecked) {
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, childInternalId, isChecked ? 1 : 0);
}
});
break;
case CREATE_OPENGL_VIEW:
NativeSurface surface = frame.createNativeOpenGLView(childInternalId);
surface.showView();
break;
case CREATE_TEXTFIELD:
EditText editText = createEditText();
view.addChild(editText);
break;
case CREATE_RADIO_GROUP:
FWRadioGroup radioGroup = new FWRadioGroup(frame);
radioGroup.setId(childInternalId);
break;
case CREATE_HEADING_TEXT:
case CREATE_TEXT:
TextView textView = createTextView();
view.addChild(textView);
break;
case CREATE_IMAGE_ELEMENT:
ImageView imageView = createImageView();
view.addChild(imageView);
break;
case SHOW_VIEW:
frame.disableDraw();
view.showView();
break;
case ADD_OPTION:
// Forward Command to FWPicker
view.addOption(getValue(), getTextValue());
break;
case POST_NOTIFICATION:
frame.createNotification(getTextValue(), getTextValue2());
break;
case CREATE_APPLICATION:
frame.setAppId(getInternalId());
frame.setSharedPreferences(textValue);
if (isSet(FLAG_USE_PURCHASES_API)) {
System.out.println("Initializing purchaseHelper");
frame.initializePurchaseHelper(textValue2, new IabHelper.OnIabSetupFinishedListener() {
@Override
public void onIabSetupFinished(IabResult result) {
if (result.isSuccess()) {
System.out.println("PurchaseHelper successfully setup");
sendInventory(frame.getPurchaseHelperInventory());
} else {
System.out.println("PurchaseHelper failed to setup");
}
}
});
}
break;
case SET_INT_VALUE:
view.setValue(getValue());
break;
case SET_TEXT_VALUE:
view.setValue(getTextValue());
break;
case SET_ENABLED:
if (value > 0){
view.setEnabled(true);
} else {
view.setEnabled(false);
}
break;
case LAUNCH_BROWSER:
frame.launchBrowser(getTextValue());
break;
case SHOW_MESSAGE_DIALOG:
showMessageDialog(textValue, textValue2);
break;
case SHOW_INPUT_DIALOG:
showInputDialog(textValue, textValue2);
break;
case CREATE_ACTION_SHEET:
createActionSheet();
break;
case QUIT_APP:
// TODO
frame.finish();
break;
case UPDATE_PREFERENCE:
//Now stores String value to string key.
frame.getPreferencesEditor().putString(textValue, textValue2);
frame.getPreferencesEditor().apply();
break;
case DELETE_ELEMENT:
view.removeChild(childInternalId);
break;
case BUY_PRODUCT:
try {
launchPurchase("com.sometrik.formtest.coin");
} catch (IabAsyncInProgressException e) {
e.printStackTrace();
System.out.println("Error on launchPurchase with message: " + e.getMessage());
}
default:
System.out.println("Message couldn't be handled");
break;
}
}
private void createActionSheet(){
PopupMenu menu = new PopupMenu(frame, null);
menu.setOnMenuItemClickListener(new OnMenuItemClickListener(){
@Override
public boolean onMenuItemClick(MenuItem item) {
return false;
}
});
menuList.add(menu);
}
private FWTable createTableLayout(){
FWTable table = new FWTable(frame);
table.setId(getChildInternalId());
table.setColumnCount(value);
FrameWork.addToViewList(table);
return table;
}
private ImageView createImageView() {
ImageView imageView = new ImageView(frame);
imageView.setId(childInternalId);
try {
InputStream is = frame.getAssets().open(textValue);
Bitmap bitmap = BitmapFactory.decodeStream(is);
imageView.setImageBitmap(bitmap);
return imageView;
} catch (IOException e) {
e.printStackTrace();
System.out.println("error loading asset file to imageView");
System.exit(1);
}
return null;
}
private FWLayout createLinearLayout() {
FWLayout layout = new FWLayout(frame);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
params.weight = 1.0f;
params.gravity = Gravity.FILL;
layout.setBaselineAligned(false);
layout.setLayoutParams(params);
layout.setId(getChildInternalId());
FrameWork.addToViewList(layout);
if (getValue() == 2) {
layout.setOrientation(LinearLayout.HORIZONTAL);
} else {
layout.setOrientation(LinearLayout.VERTICAL);
}
return layout;
}
private FWButton createButton() {
FWButton button = new FWButton(frame);
button.setId(getInternalId());
button.setText(getTextValue());
button.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
System.out.println("Java: my button was clicked with id " + getChildInternalId());
frame.intChangedEvent(System.currentTimeMillis() / 1000.0, getChildInternalId(), 1);
}
});
return button;
}
private EditText createEditText(){
final EditText editText = new EditText(frame);
editText.setId(getChildInternalId());
editText.setText(getTextValue());
TableLayout.LayoutParams params = new TableLayout.LayoutParams(TableLayout.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT);
params.weight = 1.0f;
params.gravity = Gravity.FILL;
editText.setLayoutParams(params);
editText.setMinimumWidth(120000 / (int) frame.getScreenWidth());
if (isSet(FLAG_PASSWORD) && isSet(FLAG_NUMERIC)){
editText.setInputType(InputType.TYPE_NUMBER_VARIATION_PASSWORD);
} else if (isSet(FLAG_PASSWORD)) {
editText.setInputType(InputType.TYPE_TEXT_VARIATION_PASSWORD);
} else if (isSet(FLAG_NUMERIC)){
editText.setInputType(InputType.TYPE_CLASS_NUMBER);
}
editText.addTextChangedListener(new TextWatcher() {
public void afterTextChanged(Editable editable) {
frame.textChangedEvent(System.currentTimeMillis() / 1000.0, getChildInternalId(), editable.toString());
}
public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
public void onTextChanged(CharSequence s, int start, int before, int count) {}
});
return editText;
}
private FWPicker createSpinner(){
FWPicker picker = new FWPicker(frame);
picker.setId(getChildInternalId());
FrameWork.views.put(getChildInternalId(), picker);
return picker;
}
private TextView createTextView() {
TextView textView = new TextView(frame);
textView.setId(getChildInternalId());
TableLayout.LayoutParams params = new TableLayout.LayoutParams(TableLayout.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.MATCH_PARENT);
params.weight = 1.0f;
params.gravity = Gravity.FILL;
textView.setLayoutParams(params);
if (isSet(FLAG_HYPERLINK)) {
textView.setMovementMethod(LinkMovementMethod.getInstance());
String text = "<a href='" + textValue2 + "'>" + textValue + "</a>";
textView.setText(Html.fromHtml(text));
} else {
textView.setText(textValue);
}
return textView;
}
private void createFormView(){
FWLayout layout = new FWLayout(frame);
layout.setId(getChildInternalId());
FrameWork.views.put(getChildInternalId(), layout);
ScrollView scrollView = new ScrollView(frame);
scrollView.addView(layout);
}
// Create dialog with user text input
private void showInputDialog(String title, String message) {
System.out.println("Creating input dialog");
AlertDialog.Builder builder;
builder = new AlertDialog.Builder(frame);
// Building an alert
builder.setTitle(title);
builder.setMessage(message);
builder.setCancelable(true);
final EditText input = new EditText(frame);
input.setInputType(InputType.TYPE_CLASS_TEXT);
builder.setView(input);
builder.setOnCancelListener(new OnCancelListener(){
@Override
public void onCancel(DialogInterface arg0) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
}
});
// Negative button listener
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
dialog.cancel();
}
});
// Positive button listener
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
String inputText = String.valueOf(input.getText());
byte[] b = inputText.getBytes(Charset.forName("UTF-8"));
frame.endModal(System.currentTimeMillis() / 1000.0, 1, b);
dialog.cancel();
}
});
// Create and show the alert
AlertDialog alert = builder.create();
alert.show();
}
// create Message dialog
private void showMessageDialog(String title, String message) {
System.out.println("creating message dialog");
AlertDialog.Builder builder;
builder = new AlertDialog.Builder(frame);
// Building an alert
builder.setTitle(title);
builder.setMessage(message);
builder.setCancelable(true);
builder.setOnCancelListener(new OnCancelListener(){
@Override
public void onCancel(DialogInterface arg0) {
frame.endModal(System.currentTimeMillis() / 1000.0, 0, null);
}
});
// Positive button listener
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
frame.endModal(System.currentTimeMillis() / 1000.0, 1, null);
dialog.dismiss();
}
});
// Create and show the alert
AlertDialog alert = builder.create();
alert.show();
System.out.println("message dialog created");
}
private void launchPurchase(final String productId) throws IabAsyncInProgressException {
// Sku = product id from google account
frame.getPurchaseHelper().launchPurchaseFlow(frame, productId, IabHelper.ITEM_TYPE_INAPP, null, 1, new IabHelper.OnIabPurchaseFinishedListener() {
@Override
public void onIabPurchaseFinished(IabResult result, Purchase info) {
if (result.isSuccess()) {
System.out.println("Purchase of product id " + productId + " completed");
FrameWork.onPurchaseEvent(info.getPurchaseTime() / 1000.0, info.getSku(), true);
// TODO
} else {
System.out.println("Purchase of product id " + productId + " failed");
// TODO
}
}
}, "");
}
private void sendInventory(Inventory inventory){
List <Purchase> purchaseList = inventory.getAllPurchases();
System.out.println("getting purchase history. Purchase list size: " + purchaseList.size());
for (Purchase purchase : inventory.getAllPurchases()){
FrameWork.onPurchaseEvent(purchase.getPurchaseTime() / 1000.0, purchase.getSku(), false);
}
}
private Boolean isSet(int flag) {
return (flags & flag) != 0;
}
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public int getInternalId() {
return internalId;
}
public int getChildInternalId() {
return childInternalId;
}
public String getTextValue() {
return textValue;
}
public String getTextValue2() {
return textValue2;
}
public CommandType getCommand() {
return command;
}
public int getValue() {
return value;
}
}
|
Change scrollView creation to FWScrollView
|
android/java/com/sometrik/framework/NativeCommand.java
|
Change scrollView creation to FWScrollView
|
|
Java
|
epl-1.0
|
92050630d988deb050428193b511c0801e028a49
| 0
|
samuelmartingc/optsicom-framework,samuelmartingc/optsicom-framework,codeurjc/optsicom-framework,codeurjc/optsicom-framework,codeurjc/optsicom-framework,samuelmartingc/optsicom-framework,codeurjc/optsicom-framework,codeurjc/optsicom-framework
|
package es.optsicom.lib.approx.improvement.movement;
import es.optsicom.lib.Instance;
import es.optsicom.lib.Solution;
import es.optsicom.lib.approx.improvement.TimeLimitException;
import es.optsicom.lib.util.ArraysUtil;
import es.optsicom.lib.util.BestMode;
import es.optsicom.lib.util.Id;
import es.optsicom.lib.util.MathUtil;
public class TabuVariableTenureImprovementMethod<S extends Solution<I>, I extends Instance>
extends MovementImprovementMethod<S, I> implements MovementManager {
private Mode mode;
private double bestIncrement;
private Object movementAttributes;
private BestMode bestMode;
private float tabuTenure;
private float maxIterWoImpr;
private Object memory;
private TabuProblemAdapter<S, I> tabuAdapter;
private int numIteration;
private int itersWoImpr;
private S bestSolution;
private TenureProblemAdapter<S, I> tenureAdapter;
private double solutionWeight;
private boolean testTabuMovements = true;
private long startTime;
public TabuVariableTenureImprovementMethod(MovementGenerator<S, I> movementGenerator,
Mode mode, float maxIterWoImpr, float tabuTenure,
TabuProblemAdapter<S, I> tabuAdapter,
TenureProblemAdapter<S, I> tenureAdapter) {
super(movementGenerator);
this.mode = mode;
this.tabuAdapter = tabuAdapter;
this.tabuTenure = tabuTenure;
this.tenureAdapter = tenureAdapter;
this.maxIterWoImpr = maxIterWoImpr;
}
@Override
public boolean internalImproveSolution(S solution, long duration) {
//Hack to know when algorithm starts
startTime = System.currentTimeMillis();
return super.internalImproveSolution(solution, duration);
}
@SuppressWarnings("unchecked")
@Override
protected void moreInternalImproveSolution() {
this.memory = tabuAdapter.createMemory(solution);
int maxItersWoImprInt = tabuAdapter.getMaxItersWoImprInt(solution,
maxIterWoImpr);
bestMode = instance.getProblem().getMode();
numIteration = 1;
itersWoImpr = 0;
bestSolution = (S) solution.createCopy();
do {
// System.out.println("NumIteration: "+numIteration);
bestIncrement = 0;
movementAttributes = null;
solutionWeight = solution.getWeight();
try {
movementGenerator.generateMovements(this);
applyMovement();
} catch (FinishGeneratingMovementsException e) {
applyMovement();
}
numIteration++;
tenureAdapter.finishIteration(solution, numIteration, itersWoImpr);
// if(solution.getWeight() != bestSolution.getWeight()){
// System.out.println("NumIterations Wo Impr: "+itersWoImpr+" limit="+maxItersWoImprInt);
// }
if (itersWoImpr >= maxItersWoImprInt) {
break;
}
try{
checkFinishByTime();
} catch(TimeLimitException e) {
break;
}
} while (true);
System.out.println("Iterations: " + numIteration + "; ItersWoImp: " + itersWoImpr);
solution.asSolution(bestSolution);
}
private void applyMovement() {
//System.out.println("---- ApplyMovement: " + (solutionWeight + bestIncrement) + "("+ bestIncrement+") "+ArraysUtil.toStringObj(movementAttributes));
boolean newBestSolutionFound = false;
if (bestMode.isBetterThan(solutionWeight + bestIncrement,
bestSolution.getWeight())) {
improvement = true;
itersWoImpr = 0;
newBestSolutionFound = true;
//System.out.println("BestSolFound");
} else {
itersWoImpr++;
}
// if(!bestMode.isImprovement(bestIncrement)){
// System.out.println(".");
// }
//double originalWeight = solutionWeight;
movementGenerator.applyMovement(movementAttributes);
int tenure = tenureAdapter.getTenure(solution, movementAttributes, numIteration, itersWoImpr);
System.out.println(" ##-> "+(System.currentTimeMillis()-startTime)+":"+tenure);
tabuAdapter.markAsTabu(memory, movementAttributes, numIteration,
tenure);
//System.out.println("---- New Solution: " + solution.getWeight());
// System.out.print(".");
// System.out.print(solution.getWeight() + ", ");
// TODO This can be optimized making the copy when the first
// non-improving
// movement is applied
if (solution.isBetterThan(bestSolution)) {
//System.out.println("New solution found: "+solution.getWeight());
bestSolution = (S) solution.createCopy();
newBestSolutionFound(bestSolution);
//System.out.println(">> "+bestSolution.getWeight());
}
// TODO Correct implementation test. We need to found a good way
// to enable or disable this kind of thing in all code.
// if (!MathUtil.efectiveEquals(originalWeight + bestIncrement,
// solution.getWeight())) {
// throw new RuntimeException(
// "Applying the movement doesn't increment the solution value as expected. It should be "
// + (originalWeight + bestIncrement)
// + " and is "
// + solution.getWeight());
// }
}
@Override
public void testMovement(double increment, Object movementAttributes) {
boolean tabuMovement = tabuAdapter.isMarkedAsTabu(memory,
movementAttributes, numIteration);
boolean aspirationCriteria = false;
if (tabuMovement) {
aspirationCriteria = bestMode.isBetterThan(
increment + solutionWeight, bestSolution.getWeight());
// if(aspirationCriteria){
// System.out.println("AS: "+(increment + solutionWeight)+" > "+bestSolution.getWeight());
// }
}
if (!tabuMovement || aspirationCriteria) {
if (this.movementAttributes == null
|| bestMode.isBetterThan(increment, bestIncrement)) {
this.bestIncrement = increment;
this.movementAttributes = this.movementGenerator
.createCopy(movementAttributes);
// if(!bestMode.isImprovement(increment)){
// System.out.println("Best non-improving movement: "+increment
// + " -> "
// + solution.getWeight() + " [" +
// Arrays.toString((int[])movementAttributes) + "]");
// }
}
if (mode == Mode.FIRST) {
if (bestMode.isImprovement(increment)) {
throw new FinishGeneratingMovementsException();
}
}
}
}
@Override
public void finishMovementGroup() {
if (mode == Mode.MIXED && bestMode.isImprovement(bestIncrement)) {
throw new FinishGeneratingMovementsException();
}
}
@Id
public Mode getMode() {
return mode;
}
@Id
public float getTabuTenure() {
return tabuTenure;
}
@Id
public TabuProblemAdapter<S, I> getTabuAdapter() {
return tabuAdapter;
}
@Id
public TenureProblemAdapter<S, I> getTenureAdapter() {
return tenureAdapter;
}
@Id
public float getMaxIterWoImpr() {
return maxIterWoImpr;
}
public TabuVariableTenureImprovementMethod<S, I> setTestTabuMovements(boolean testTabuMovements) {
this.testTabuMovements = testTabuMovements;
return this;
}
@Override
public boolean canTestMovement(Object movementAttributes) {
if(testTabuMovements){
return true;
} else {
return !tabuAdapter.isMarkedAsTabu(memory, movementAttributes, numIteration);
}
}
}
|
es.optsicom.lib.approx/src/main/java/es/optsicom/lib/approx/improvement/movement/TabuVariableTenureImprovementMethod.java
|
package es.optsicom.lib.approx.improvement.movement;
import es.optsicom.lib.Instance;
import es.optsicom.lib.Solution;
import es.optsicom.lib.approx.improvement.TimeLimitException;
import es.optsicom.lib.util.ArraysUtil;
import es.optsicom.lib.util.BestMode;
import es.optsicom.lib.util.Id;
import es.optsicom.lib.util.MathUtil;
public class TabuVariableTenureImprovementMethod<S extends Solution<I>, I extends Instance>
extends MovementImprovementMethod<S, I> implements MovementManager {
private Mode mode;
private double bestIncrement;
private Object movementAttributes;
private BestMode bestMode;
private float tabuTenure;
private float maxIterWoImpr;
private Object memory;
private TabuProblemAdapter<S, I> tabuAdapter;
private int numIteration;
private int itersWoImpr;
private S bestSolution;
private TenureProblemAdapter<S, I> tenureAdapter;
private double solutionWeight;
private boolean testTabuMovements = true;
public TabuVariableTenureImprovementMethod(MovementGenerator<S, I> movementGenerator,
Mode mode, float maxIterWoImpr, float tabuTenure,
TabuProblemAdapter<S, I> tabuAdapter,
TenureProblemAdapter<S, I> tenureAdapter) {
super(movementGenerator);
this.mode = mode;
this.tabuAdapter = tabuAdapter;
this.tabuTenure = tabuTenure;
this.tenureAdapter = tenureAdapter;
this.maxIterWoImpr = maxIterWoImpr;
}
@SuppressWarnings("unchecked")
@Override
protected void moreInternalImproveSolution() {
this.memory = tabuAdapter.createMemory(solution);
int maxItersWoImprInt = tabuAdapter.getMaxItersWoImprInt(solution,
maxIterWoImpr);
bestMode = instance.getProblem().getMode();
numIteration = 1;
itersWoImpr = 0;
bestSolution = (S) solution.createCopy();
do {
// System.out.println("NumIteration: "+numIteration);
bestIncrement = 0;
movementAttributes = null;
solutionWeight = solution.getWeight();
try {
movementGenerator.generateMovements(this);
applyMovement();
} catch (FinishGeneratingMovementsException e) {
applyMovement();
}
numIteration++;
tenureAdapter.finishIteration(solution, numIteration, itersWoImpr);
// if(solution.getWeight() != bestSolution.getWeight()){
// System.out.println("NumIterations Wo Impr: "+itersWoImpr+" limit="+maxItersWoImprInt);
// }
if (itersWoImpr >= maxItersWoImprInt) {
break;
}
try{
checkFinishByTime();
} catch(TimeLimitException e) {
break;
}
} while (true);
System.out.println("Iterations: " + numIteration + "; ItersWoImp: " + itersWoImpr);
solution.asSolution(bestSolution);
}
private void applyMovement() {
//System.out.println("---- ApplyMovement: " + (solutionWeight + bestIncrement) + "("+ bestIncrement+") "+ArraysUtil.toStringObj(movementAttributes));
boolean newBestSolutionFound = false;
if (bestMode.isBetterThan(solutionWeight + bestIncrement,
bestSolution.getWeight())) {
improvement = true;
itersWoImpr = 0;
newBestSolutionFound = true;
//System.out.println("BestSolFound");
} else {
itersWoImpr++;
}
// if(!bestMode.isImprovement(bestIncrement)){
// System.out.println(".");
// }
//double originalWeight = solutionWeight;
movementGenerator.applyMovement(movementAttributes);
tabuAdapter.markAsTabu(memory, movementAttributes, numIteration,
tenureAdapter.getTenure(solution, movementAttributes, numIteration, itersWoImpr));
//System.out.println("---- New Solution: " + solution.getWeight());
// System.out.print(".");
// System.out.print(solution.getWeight() + ", ");
// TODO This can be optimized making the copy when the first
// non-improving
// movement is applied
if (solution.isBetterThan(bestSolution)) {
//System.out.println("New solution found: "+solution.getWeight());
bestSolution = (S) solution.createCopy();
newBestSolutionFound(bestSolution);
System.out.println(">> "+bestSolution.getWeight());
}
// TODO Correct implementation test. We need to found a good way
// to enable or disable this kind of thing in all code.
// if (!MathUtil.efectiveEquals(originalWeight + bestIncrement,
// solution.getWeight())) {
// throw new RuntimeException(
// "Applying the movement doesn't increment the solution value as expected. It should be "
// + (originalWeight + bestIncrement)
// + " and is "
// + solution.getWeight());
// }
}
@Override
public void testMovement(double increment, Object movementAttributes) {
boolean tabuMovement = tabuAdapter.isMarkedAsTabu(memory,
movementAttributes, numIteration);
boolean aspirationCriteria = false;
if (tabuMovement) {
aspirationCriteria = bestMode.isBetterThan(
increment + solutionWeight, bestSolution.getWeight());
// if(aspirationCriteria){
// System.out.println("AS: "+(increment + solutionWeight)+" > "+bestSolution.getWeight());
// }
}
if (!tabuMovement || aspirationCriteria) {
if (this.movementAttributes == null
|| bestMode.isBetterThan(increment, bestIncrement)) {
this.bestIncrement = increment;
this.movementAttributes = this.movementGenerator
.createCopy(movementAttributes);
// if(!bestMode.isImprovement(increment)){
// System.out.println("Best non-improving movement: "+increment
// + " -> "
// + solution.getWeight() + " [" +
// Arrays.toString((int[])movementAttributes) + "]");
// }
}
if (mode == Mode.FIRST) {
if (bestMode.isImprovement(increment)) {
throw new FinishGeneratingMovementsException();
}
}
}
}
@Override
public void finishMovementGroup() {
if (mode == Mode.MIXED && bestMode.isImprovement(bestIncrement)) {
throw new FinishGeneratingMovementsException();
}
}
@Id
public Mode getMode() {
return mode;
}
@Id
public float getTabuTenure() {
return tabuTenure;
}
@Id
public TabuProblemAdapter<S, I> getTabuAdapter() {
return tabuAdapter;
}
@Id
public TenureProblemAdapter<S, I> getTenureAdapter() {
return tenureAdapter;
}
@Id
public float getMaxIterWoImpr() {
return maxIterWoImpr;
}
public TabuVariableTenureImprovementMethod<S, I> setTestTabuMovements(boolean testTabuMovements) {
this.testTabuMovements = testTabuMovements;
return this;
}
@Override
public boolean canTestMovement(Object movementAttributes) {
if(testTabuMovements){
return true;
} else {
return !tabuAdapter.isMarkedAsTabu(memory, movementAttributes, numIteration);
}
}
}
|
tenure trace
|
es.optsicom.lib.approx/src/main/java/es/optsicom/lib/approx/improvement/movement/TabuVariableTenureImprovementMethod.java
|
tenure trace
|
|
Java
|
agpl-3.0
|
62a4c956c2dcc0eb13885795d5566f7eed7e670e
| 0
|
musalbas/Nuclibook,musalbas/Nuclibook,musalbas/Nuclibook
|
package nuclibook.models;
import com.j256.ormlite.dao.CloseableIterator;
import com.j256.ormlite.dao.ForeignCollection;
import com.j256.ormlite.field.DataType;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.field.ForeignCollectionField;
import com.j256.ormlite.table.DatabaseTable;
import nuclibook.server.Renderable;
import org.joda.time.Days;
import org.joda.time.LocalDate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
/**
* Model representing a booking.
*/
@DatabaseTable(tableName = "bookings")
public class Booking implements Renderable {
@DatabaseField(generatedId = true)
private Integer id;
@DatabaseField(columnName = "patient_id", foreign = true, foreignAutoRefresh = true)
private Patient patient;
@DatabaseField(columnName = "therapy_id", foreign = true, foreignAutoRefresh = true)
private Therapy therapy;
@DatabaseField(columnName = "camera_id", foreign = true, foreignAutoRefresh = true)
private Camera camera;
@DatabaseField(columnName = "tracer_id", foreign = true, foreignAutoRefresh = true)
private Tracer tracer;
@DatabaseField(width = 32, columnName = "tracer_dose")
private String tracerDose;
@ForeignCollectionField(eager = true)
private ForeignCollection<BookingSection> bookingSections;
@ForeignCollectionField(eager = true)
private ForeignCollection<BookingStaff> bookingStaff;
@DatabaseField(width = 16)
private String status;
@DatabaseField(dataType = DataType.LONG_STRING)
private String notes;
/**
* Blank constructor for ORM.
*/
public Booking() {
}
/**
* Get the ID of the booking.
*
* @return The ID of the booking.
*/
public Integer getId() {
return id;
}
/**
* Set the ID of the booking.
*
* @param id The ID of the booking.
*/
public void setId(Integer id) {
this.id = id;
}
/**
* Get the patient.
*
* @return The patient.
*/
public Patient getPatient() {
return patient;
}
/**
* Set the patient.
*
* @param patient The patient.
*/
public void setPatient(Patient patient) {
this.patient = patient;
}
/**
* Get the therapy.
*
* @return The therapy.
*/
public Therapy getTherapy() {
return therapy;
}
/**
* Set the therapy.
*
* @param therapy The therapy.
*/
public void setTherapy(Therapy therapy) {
this.therapy = therapy;
}
/**
* Get the camera.
*
* @return The camera.
*/
public Camera getCamera() {
return camera;
}
/**
* Set the camera
*
* @param camera The camera.
*/
public void setCamera(Camera camera) {
this.camera = camera;
}
/**
* Get the tracer
*
* @return The tracer
*/
public Tracer getTracer() {
return tracer;
}
/**
* Set the tracer
*
* @param tracer The tracer
*/
public void setTracer(Tracer tracer) {
this.tracer = tracer;
}
/**
* Get the tracer dose.
*
* @return The tracer dose.
*/
public String getTracerDose() {
return tracerDose;
}
/**
* Set the tracer dose
*
* @param tracerDose The tracer dose.
*/
public void setTracerDose(String tracerDose) {
this.tracerDose = tracerDose;
}
/**
* Get the list of booking sections for this booking.
*
* @return The list of booking sections for this booking.
*/
public List<BookingSection> getBookingSections() {
ArrayList<BookingSection> output = new ArrayList<>();
try {
bookingSections.refreshCollection();
} catch (SQLException | NullPointerException e) {
return output;
}
CloseableIterator<BookingSection> iterator = bookingSections.closeableIterator();
try {
BookingSection bs;
while (iterator.hasNext()) {
bs = iterator.next();
if (bs != null) output.add(bs);
}
} finally {
iterator.closeQuietly();
}
// sort by date
output.sort(new Comparator<BookingSection>() {
@Override
public int compare(BookingSection o1, BookingSection o2) {
return o1.getStart().compareTo(o2.getStart());
}
});
return output;
}
/**
* Get the list of staff for this booking.
*
* @return The list of staff for this booking.
*/
public List<Staff> getStaff() {
ArrayList<Staff> output = new ArrayList<>();
try {
bookingStaff.refreshCollection();
} catch (SQLException | NullPointerException e) {
return output;
}
CloseableIterator<BookingStaff> iterator = bookingStaff.closeableIterator();
try {
BookingStaff bs;
while (iterator.hasNext()) {
bs = iterator.next();
if (bs != null) output.add(bs.getStaff());
}
} finally {
iterator.closeQuietly();
}
return output;
}
/**
* Get the list of staff for this booking.
*
* @return The list of staff for this booking.
*/
public List<BookingStaff> getBookingStaff() {
ArrayList<BookingStaff> output = new ArrayList<>();
try {
bookingStaff.refreshCollection();
} catch (SQLException | NullPointerException e) {
return output;
}
CloseableIterator<BookingStaff> iterator = bookingStaff.closeableIterator();
try {
BookingStaff bs;
while (iterator.hasNext()) {
bs = iterator.next();
if (bs != null) output.add(bs);
}
} finally {
iterator.closeQuietly();
}
return output;
}
/**
* Get the status of the booking.
*
* @return The status of the booking.
*/
public String getStatus() {
return status;
}
/**
* Set the status of the booking
*
* @param status The status of the booking.
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Get the notes associated with the booking.
*
* @return The notes associated with the booking.
*/
public String getNotes() {
return notes;
}
/**
* Set the notes associated with the booking.
*
* @param notes The notes associated with the booking.
*/
public void setNotes(String notes) {
this.notes = notes;
}
@Override
public HashMap<String, String> getHashMap() {
return new HashMap<String, String>() {{
put("booking-id", getId().toString());
put("patient-name", getPatient().getName());
put("therapy-name", getTherapy().getName());
put("camera-id", getCamera().getId().toString());
put("camera-type-label", getCamera().getType().getLabel());
put("camera-room-number", getCamera().getRoomNumber());
put("tracer-name", getTracer().getName());
put("tracer-id", getTracer().getId().toString());
put("tracer-dose", getTracerDose());
put("status", getStatus());
// get status label
String statusLabel = "default";
if (getStatus().equals("unconfirmed")) statusLabel = "warning";
if (getStatus().equals("confirmed")) statusLabel = "success";
if (getStatus().equals("rebooked")) statusLabel = "info";
put("status-with-label", "<span class=\"label label-as-badge label-" + statusLabel + "\">" + getStatus() + "</span>");
// get date
List<BookingSection> bookingSections = getBookingSections();
if (bookingSections.isEmpty()) {
put("date", "?");
} else {
put("date", bookingSections.get(0).getStart().toString("YYYY-MM-dd"));
}
// set up booking sections as string for day summary
String bookingSectionsAsString = "";
String bookingSectionsAsStringTimeOnly = "";
if (!bookingSections.isEmpty()) {
for (BookingSection b : bookingSections) {
bookingSectionsAsString += "<li class=\"list-group-item\">\n";
String startTime = b.getStart().toString("HH:mm");
String endTime = b.getEnd().toString("HH:mm");
bookingSectionsAsString += (startTime + " to " + endTime + " \n");
bookingSectionsAsString += "</li>";
bookingSectionsAsStringTimeOnly += (startTime + " - " + endTime + ", ");
}
bookingSectionsAsStringTimeOnly = bookingSectionsAsStringTimeOnly.substring(0, bookingSectionsAsStringTimeOnly.length() - 2);
} else {
bookingSectionsAsString = "<em>None</em>\n";
}
bookingSectionsAsStringTimeOnly = bookingSectionsAsStringTimeOnly.substring(0, bookingSectionsAsStringTimeOnly.length() - 2);
put("booking-sections-as-string", bookingSectionsAsString);
put("booking-sections-as-string-time-only", bookingSectionsAsStringTimeOnly);
// get days until
if (bookingSections.isEmpty()) {
put("days-until", "?");
} else {
int daysUntil = Days.daysBetween(new LocalDate(), bookingSections.get(0).getStart().toLocalDate()).getDays();
if (daysUntil == 0) {
put("days-until", "today");
} else if (daysUntil < 0) {
put("days-until", (daysUntil * -1) + " day" + (daysUntil == -1 ? "" : "s") + " ago");
} else {
put("days-until", "in " + daysUntil + " day" + (daysUntil == -1 ? "" : "s"));
}
}
// get staff
String staff = "";
if (getStaff().isEmpty()) {
staff = "<em>None</em>";
} else {
List<Staff> assignedStaff = getStaff();
for (Staff s : assignedStaff) {
staff += s.getName() + ", ";
}
staff = staff.substring(0, staff.length() - 2);
}
put("staff", staff);
//get staffID
String staffId = "";
if (getStaff().isEmpty()) {
staffId = "<em>None</em>";
} else {
List<Staff> assignedStaff = getStaff();
for (Staff s : assignedStaff) {
staffId += s.getId() + ", ";
}
staffId = staffId.substring(0, staffId.length() - 2);
}
put("staff-id-list", staffId);
// get notes
String notes = getNotes();
if (notes == null || notes.length() == 0) {
notes = "<em>None</em>";
} else {
notes = notes.replace("\n", "<br />");
}
put("notes", notes);
put("notes-unformatted", getNotes());
}};
}
}
|
src/main/java/nuclibook/models/Booking.java
|
package nuclibook.models;
import com.j256.ormlite.dao.CloseableIterator;
import com.j256.ormlite.dao.ForeignCollection;
import com.j256.ormlite.field.DataType;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.field.ForeignCollectionField;
import com.j256.ormlite.table.DatabaseTable;
import nuclibook.server.Renderable;
import org.joda.time.Days;
import org.joda.time.LocalDate;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
/**
* Model representing a booking.
*/
@DatabaseTable(tableName = "bookings")
public class Booking implements Renderable {
@DatabaseField(generatedId = true)
private Integer id;
@DatabaseField(columnName = "patient_id", foreign = true, foreignAutoRefresh = true)
private Patient patient;
@DatabaseField(columnName = "therapy_id", foreign = true, foreignAutoRefresh = true)
private Therapy therapy;
@DatabaseField(columnName = "camera_id", foreign = true, foreignAutoRefresh = true)
private Camera camera;
@DatabaseField(columnName = "tracer_id", foreign = true, foreignAutoRefresh = true)
private Tracer tracer;
@DatabaseField(width = 32, columnName = "tracer_dose")
private String tracerDose;
@ForeignCollectionField(eager = true)
private ForeignCollection<BookingSection> bookingSections;
@ForeignCollectionField(eager = true)
private ForeignCollection<BookingStaff> bookingStaff;
@DatabaseField(width = 16)
private String status;
@DatabaseField(dataType = DataType.LONG_STRING)
private String notes;
/**
* Blank constructor for ORM.
*/
public Booking() {
}
/**
* Get the ID of the booking.
*
* @return The ID of the booking.
*/
public Integer getId() {
return id;
}
/**
* Set the ID of the booking.
*
* @param id The ID of the booking.
*/
public void setId(Integer id) {
this.id = id;
}
/**
* Get the patient.
*
* @return The patient.
*/
public Patient getPatient() {
return patient;
}
/**
* Set the patient.
*
* @param patient The patient.
*/
public void setPatient(Patient patient) {
this.patient = patient;
}
/**
* Get the therapy.
*
* @return The therapy.
*/
public Therapy getTherapy() {
return therapy;
}
/**
* Set the therapy.
*
* @param therapy The therapy.
*/
public void setTherapy(Therapy therapy) {
this.therapy = therapy;
}
/**
* Get the camera.
*
* @return The camera.
*/
public Camera getCamera() {
return camera;
}
/**
* Set the camera
*
* @param camera The camera.
*/
public void setCamera(Camera camera) {
this.camera = camera;
}
/**
* Get the tracer
*
* @return The tracer
*/
public Tracer getTracer() {
return tracer;
}
/**
* Set the tracer
*
* @param tracer The tracer
*/
public void setTracer(Tracer tracer) {
this.tracer = tracer;
}
/**
* Get the tracer dose.
*
* @return The tracer dose.
*/
public String getTracerDose() {
return tracerDose;
}
/**
* Set the tracer dose
*
* @param tracerDose The tracer dose.
*/
public void setTracerDose(String tracerDose) {
this.tracerDose = tracerDose;
}
/**
* Get the list of booking sections for this booking.
*
* @return The list of booking sections for this booking.
*/
public List<BookingSection> getBookingSections() {
ArrayList<BookingSection> output = new ArrayList<>();
try {
bookingSections.refreshCollection();
} catch (SQLException | NullPointerException e) {
return output;
}
CloseableIterator<BookingSection> iterator = bookingSections.closeableIterator();
try {
BookingSection bs;
while (iterator.hasNext()) {
bs = iterator.next();
if (bs != null) output.add(bs);
}
} finally {
iterator.closeQuietly();
}
// sort by date
output.sort(new Comparator<BookingSection>() {
@Override
public int compare(BookingSection o1, BookingSection o2) {
return o1.getStart().compareTo(o2.getStart());
}
});
return output;
}
/**
* Get the list of staff for this booking.
*
* @return The list of staff for this booking.
*/
public List<Staff> getStaff() {
ArrayList<Staff> output = new ArrayList<>();
try {
bookingStaff.refreshCollection();
} catch (SQLException | NullPointerException e) {
return output;
}
CloseableIterator<BookingStaff> iterator = bookingStaff.closeableIterator();
try {
BookingStaff bs;
while (iterator.hasNext()) {
bs = iterator.next();
if (bs != null) output.add(bs.getStaff());
}
} finally {
iterator.closeQuietly();
}
return output;
}
/**
* Get the list of staff for this booking.
*
* @return The list of staff for this booking.
*/
public List<BookingStaff> getBookingStaff() {
ArrayList<BookingStaff> output = new ArrayList<>();
try {
bookingStaff.refreshCollection();
} catch (SQLException | NullPointerException e) {
return output;
}
CloseableIterator<BookingStaff> iterator = bookingStaff.closeableIterator();
try {
BookingStaff bs;
while (iterator.hasNext()) {
bs = iterator.next();
if (bs != null) output.add(bs);
}
} finally {
iterator.closeQuietly();
}
return output;
}
/**
* Get the status of the booking.
*
* @return The status of the booking.
*/
public String getStatus() {
return status;
}
/**
* Set the status of the booking
*
* @param status The status of the booking.
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Get the notes associated with the booking.
*
* @return The notes associated with the booking.
*/
public String getNotes() {
return notes;
}
/**
* Set the notes associated with the booking.
*
* @param notes The notes associated with the booking.
*/
public void setNotes(String notes) {
this.notes = notes;
}
@Override
public HashMap<String, String> getHashMap() {
return new HashMap<String, String>() {{
put("booking-id", getId().toString());
put("patient-name", getPatient().getName());
put("therapy-name", getTherapy().getName());
put("camera-id", getCamera().getId().toString());
put("camera-type-label", getCamera().getType().getLabel());
put("camera-room-number", getCamera().getRoomNumber());
put("tracer-name", getTracer().getName());
put("tracer-id", getTracer().getId().toString());
put("tracer-dose", getTracerDose());
put("status", getStatus());
// get status label
String statusLabel = "default";
if (getStatus().equals("unconfirmed")) statusLabel = "warning";
if (getStatus().equals("confirmed")) statusLabel = "success";
if (getStatus().equals("rebooked")) statusLabel = "info";
put("status-with-label", "<span class=\"label label-as-badge label-" + statusLabel + "\">" + getStatus() + "</span>");
// get date
List<BookingSection> bookingSections = getBookingSections();
if (bookingSections.isEmpty()) {
put("date", "?");
} else {
put("date", bookingSections.get(0).getStart().toString("YYYY-MM-dd"));
}
// set up booking sections as string for day summary
String bookingSectionsAsString = "";
String bookingSectionsAsStringTimeOnly = "";
if (!bookingSections.isEmpty()) {
for (BookingSection b : bookingSections) {
bookingSectionsAsString += "<li class=\"list-group-item\">\n";
String startTime = b.getStart().toString("HH:mm");
String endTime = b.getEnd().toString("HH:mm");
bookingSectionsAsString += (startTime + " to " + endTime + " \n");
bookingSectionsAsString += "</li>";
bookingSectionsAsStringTimeOnly += (startTime + " - " + endTime + ", ");
}
} else {
bookingSectionsAsString = "<em>None</em>\n";
}
bookingSectionsAsStringTimeOnly = bookingSectionsAsStringTimeOnly.substring(0, bookingSectionsAsStringTimeOnly.length() - 2);
put("booking-sections-as-string", bookingSectionsAsString);
put("booking-sections-as-string-time-only", bookingSectionsAsStringTimeOnly);
// get days until
if (bookingSections.isEmpty()) {
put("days-until", "?");
} else {
int daysUntil = Days.daysBetween(new LocalDate(), bookingSections.get(0).getStart().toLocalDate()).getDays();
if (daysUntil == 0) {
put("days-until", "today");
} else if (daysUntil < 0) {
put("days-until", (daysUntil * -1) + " day" + (daysUntil == -1 ? "" : "s") + " ago");
} else {
put("days-until", "in " + daysUntil + " day" + (daysUntil == -1 ? "" : "s"));
}
}
// get staff
String staff = "";
if (getStaff().isEmpty()) {
staff = "<em>None</em>";
} else {
List<Staff> assignedStaff = getStaff();
for (Staff s : assignedStaff) {
staff += s.getName() + ", ";
}
staff = staff.substring(0, staff.length() - 2);
}
put("staff", staff);
//get staffID
String staffId = "";
if (getStaff().isEmpty()) {
staffId = "<em>None</em>";
} else {
List<Staff> assignedStaff = getStaff();
for (Staff s : assignedStaff) {
staffId += s.getId() + ", ";
}
staffId = staffId.substring(0, staffId.length() - 2);
}
put("staff-id-list", staffId);
// get notes
String notes = getNotes();
if (notes == null || notes.length() == 0) {
notes = "<em>None</em>";
} else {
notes = notes.replace("\n", "<br />");
}
put("notes", notes);
put("notes-unformatted", getNotes());
}};
}
}
|
500 internal fixed
|
src/main/java/nuclibook/models/Booking.java
|
500 internal fixed
|
|
Java
|
lgpl-2.1
|
e1b4099a44d064d16c640437aecb6774997d3746
| 0
|
xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.test.ui.po.editor;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.LocaleUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.ui.Select;
import org.xwiki.test.ui.po.BasePage;
import org.xwiki.test.ui.po.BootstrapSelect;
import org.xwiki.test.ui.po.InlinePage;
import org.xwiki.test.ui.po.ViewPage;
/**
* Represents the common actions possible on all Pages when using the "edit" action.
*
* @version $Id$
* @since 3.2M3
*/
public class EditPage extends BasePage
{
@FindBy(name = "action_saveandcontinue")
protected WebElement saveandcontinue;
@FindBy(name = "action_save")
protected WebElement save;
@FindBy(name = "action_cancel")
protected WebElement cancel;
@FindBy(id = "editcolumn")
protected WebElement currentEditorDiv;
@FindBy(id = "xwikidocsyntaxinput2")
protected WebElement syntaxIdSelect;
@FindBy(name = "parent")
private WebElement parentInput;
@FindBy(id = "xwikidoctitleinput")
private WebElement titleField;
@FindBy(id = "xwikidoclanguageinput2")
private WebElement defaultLanguageField;
/**
* The top floating edit menu bar.
*/
@FindBy(id = "editmenu")
private WebElement editMenuBar;
/**
* The entry on the edit menu bar that displays the current editor and allows us to switch the editor.
*/
@FindBy(id = "tmCurrentEditor")
private WebElement currentEditorMenu;
@FindBy(id = "csrf-warning-modal")
private WebElement csrfWarningModal;
@FindBy(id = "cancel-save-csrf")
private WebElement cancelCSRFWarningButton;
@FindBy(id = "force-save-csrf")
private WebElement forceSaveCSRFButton;
/**
* Enumerates the available editors.
*/
public enum Editor
{
WYSIWYG("WYSIWYG"),
WIKI("Wiki"),
RIGHTS("Access Rights"),
OBJECT("Objects"),
CLASS("Class");
/**
* The mapping between pretty names and editors.
*/
private static final Map<String, Editor> BY_PRETTY_NAME = new HashMap<String, Editor>();
static {
// NOTE: We cannot refer to a static enum field within the initializer because enums are initialized before
// any static initializers are run so we are forced to use a static block to build the map.
for (Editor editor : values()) {
BY_PRETTY_NAME.put(editor.getPrettyName(), editor);
}
}
/**
* The string used to display the name of the editor on the edit menu.
*/
private final String prettyName;
/**
* Defines a new editor with the given pretty name.
*
* @param prettyName the string used to display the name of the editor on the edit menu
*/
Editor(String prettyName)
{
this.prettyName = prettyName;
}
/**
* @return the string used to display the name of the editor on the edit menu
*/
public String getPrettyName()
{
return this.prettyName;
}
/**
* @param prettyName the string used to display the name of the editor on the edit menu
* @return the editor corresponding to the given pretty name, {@code null} if no editor matches the given pretty
* name
*/
public static Editor byPrettyName(String prettyName)
{
return BY_PRETTY_NAME.get(prettyName);
}
}
public void clickSaveAndContinue()
{
this.clickSaveAndContinue(true);
}
/**
* Clicks on the Save and Continue button. Use this instead of {@link #clickSaveAndContinue()} when you want to wait
* for a different message (e.g. an error message).
*
* @param wait {@code true} to wait for the page to be saved, {@code false} otherwise
*/
public void clickSaveAndContinue(boolean wait)
{
this.getSaveAndContinueButton().click();
if (wait) {
// Wait until the page is really saved.
waitForNotificationSuccessMessage("Saved");
}
}
/**
* Use this method instead of {@link #clickSaveAndContinue()} and call {@link WebElement#click()} when you know that
* the next page is not a standard XWiki {@link InlinePage}.
*
* @return the save and continue button used to submit the form.
*/
public WebElement getSaveAndContinueButton()
{
return saveandcontinue;
}
public <T extends ViewPage> T clickSaveAndView()
{
clickSaveAndView(true);
return (T) new ViewPage();
}
/**
* Useful when the save and view operation could fail on the client side and a reload (the view part) might thus not
* take place.
*
* @param wait if we should wait for the page to be reloaded
* @since 7.4M2
*/
public void clickSaveAndView(boolean wait)
{
if (wait) {
getDriver().addPageNotYetReloadedMarker();
}
this.getSaveAndViewButton().click();
if (wait) {
// Since we might have a loading step between clicking Save&View and the view page actually loading
// (specifically when using templates that have child documents associated), we need to wait for the save to
// finish and for the redirect to occur.
getDriver().waitUntilPageIsReloaded();
}
}
/**
* Use this method instead of {@link #clickSaveAndView()} and call {@link WebElement#click()} when you know that the
* next page is not a standard XWiki {@link InlinePage}.
*
* @return the save and view button used to submit the form.
*/
public WebElement getSaveAndViewButton()
{
return save;
}
public ViewPage clickCancel()
{
this.cancel.click();
return new ViewPage();
}
/**
* @return the editor being used on this page
*/
public Editor getEditor()
{
String editor = "";
String[] CSSClasses = this.currentEditorDiv.getAttribute("class").split(" ");
for (String cssClasse : CSSClasses) {
if (cssClasse.startsWith("editor-")) {
editor = cssClasse.substring(7);
break;
}
}
return Editor.valueOf(editor.toUpperCase());
}
/**
* @return the syntax if of the page
* @since 3.2M3
*/
public String getSyntaxId()
{
return this.syntaxIdSelect.getAttribute("value");
}
/**
* @since 3.2M3
*/
public void setSyntaxId(String syntaxId)
{
Select select = new Select(this.syntaxIdSelect);
select.selectByValue(syntaxId);
}
/**
* @return the value of the parent field.
* @since 7.2M2
*/
public String getParent()
{
return this.parentInput.getAttribute("value");
}
/**
* @since 7.2M2
*/
@Override
public String getDocumentTitle()
{
return this.titleField.getAttribute("value");
}
/**
* @since 7.4M2
*/
@Override
public void waitUntilPageJSIsLoaded()
{
super.waitUntilPageJSIsLoaded();
// // Actionbuttons javascript for saving the page.
getDriver().waitUntilJavascriptCondition(
"return XWiki.actionButtons != undefined && " + "XWiki.actionButtons.EditActions != undefined && "
+ "XWiki.actionButtons.AjaxSaveAndContinue != undefined");
}
protected Set<Locale> getExistingLocales(List<WebElement> elements)
{
Set<Locale> locales = new HashSet<>(elements.size());
for (WebElement element : elements) {
locales.add(LocaleUtils.toLocale(element.getText()));
}
return locales;
}
/**
* @return a list of the locales already translated for this document
* @since 9.0RC1
*/
public Set<Locale> getExistingLocales()
{
List<WebElement> elements =
getDriver().findElementsWithoutWaiting(By.xpath("//p[starts-with(text(), 'Existing translations:')]//a"));
return getExistingLocales(elements);
}
/**
* @return a list of the supported locales not yet translated for this document
* @since 9.0RC1
*/
public Set<Locale> getNotExistingLocales()
{
List<WebElement> elements =
getDriver().findElementsWithoutWaiting(By.xpath("//p[starts-with(text(), 'Translate this page in:')]//a"));
return getExistingLocales(elements);
}
/**
* @param locale the locale to translate to
* @return the target locale edit page
* @since 9.0RC1
*/
public WikiEditPage clickTranslate(String locale)
{
WebElement element;
if ("default".equals(locale)) {
element = getDriver().findElementByLinkText("default");
} else {
element = getDriver().findElementWithoutWaiting(
By.xpath("//p[starts-with(text(), 'Translate this page in:')]//a[text()='" + locale + "']"));
}
element.click();
return new WikiEditPage();
}
/**
* Set the default language input field.
*
* @param defaultLanguage the string to fill the input.
* @since 11.3RC1
*/
public void setDefaultLanguage(String defaultLanguage)
{
// Select the parent of the default language field because we're using the Bootstrap select widget.
WebElement parent = this.defaultLanguageField.findElement(By.xpath("./.."));
BootstrapSelect select = new BootstrapSelect(parent, getDriver());
select.selectByValue(defaultLanguage);
}
public String getDefaultLanguage()
{
return new Select(this.defaultLanguageField).getFirstSelectedOption().getAttribute("value");
}
public boolean isCSRFWarningDisplayed()
{
try {
return this.csrfWarningModal.isDisplayed();
} catch (NoSuchElementException e) {
return false;
}
}
public void clickCancelCSRFWarningButton()
{
this.cancelCSRFWarningButton.click();
}
public void clickForceSaveCSRFButton()
{
this.forceSaveCSRFButton.click();
}
/**
* Cancel the edition by using keyboard shortcut.
* @return a new {@link ViewPage}
* @since 11.9RC1
*/
public ViewPage useShortcutKeyForCancellingEdition()
{
getDriver().addPageNotYetReloadedMarker();
getDriver().createActions().keyDown(Keys.ALT).sendKeys("c").keyUp(Keys.ALT).perform();
getDriver().waitUntilPageIsReloaded();
return new ViewPage();
}
}
|
xwiki-platform-core/xwiki-platform-test/xwiki-platform-test-ui/src/main/java/org/xwiki/test/ui/po/editor/EditPage.java
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.test.ui.po.editor;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.LocaleUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.ui.Select;
import org.xwiki.test.ui.po.BasePage;
import org.xwiki.test.ui.po.InlinePage;
import org.xwiki.test.ui.po.ViewPage;
/**
* Represents the common actions possible on all Pages when using the "edit" action.
*
* @version $Id$
* @since 3.2M3
*/
public class EditPage extends BasePage
{
@FindBy(name = "action_saveandcontinue")
protected WebElement saveandcontinue;
@FindBy(name = "action_save")
protected WebElement save;
@FindBy(name = "action_cancel")
protected WebElement cancel;
@FindBy(id = "editcolumn")
protected WebElement currentEditorDiv;
@FindBy(id = "xwikidocsyntaxinput2")
protected WebElement syntaxIdSelect;
@FindBy(name = "parent")
private WebElement parentInput;
@FindBy(id = "xwikidoctitleinput")
private WebElement titleField;
@FindBy(id = "xwikidoclanguageinput2")
private WebElement defaultLanguageField;
/**
* The top floating edit menu bar.
*/
@FindBy(id = "editmenu")
private WebElement editMenuBar;
/**
* The entry on the edit menu bar that displays the current editor and allows us to switch the editor.
*/
@FindBy(id = "tmCurrentEditor")
private WebElement currentEditorMenu;
@FindBy(id = "csrf-warning-modal")
private WebElement csrfWarningModal;
@FindBy(id = "cancel-save-csrf")
private WebElement cancelCSRFWarningButton;
@FindBy(id = "force-save-csrf")
private WebElement forceSaveCSRFButton;
/**
* Enumerates the available editors.
*/
public enum Editor
{
WYSIWYG("WYSIWYG"),
WIKI("Wiki"),
RIGHTS("Access Rights"),
OBJECT("Objects"),
CLASS("Class");
/**
* The mapping between pretty names and editors.
*/
private static final Map<String, Editor> BY_PRETTY_NAME = new HashMap<String, Editor>();
static {
// NOTE: We cannot refer to a static enum field within the initializer because enums are initialized before
// any static initializers are run so we are forced to use a static block to build the map.
for (Editor editor : values()) {
BY_PRETTY_NAME.put(editor.getPrettyName(), editor);
}
}
/**
* The string used to display the name of the editor on the edit menu.
*/
private final String prettyName;
/**
* Defines a new editor with the given pretty name.
*
* @param prettyName the string used to display the name of the editor on the edit menu
*/
Editor(String prettyName)
{
this.prettyName = prettyName;
}
/**
* @return the string used to display the name of the editor on the edit menu
*/
public String getPrettyName()
{
return this.prettyName;
}
/**
* @param prettyName the string used to display the name of the editor on the edit menu
* @return the editor corresponding to the given pretty name, {@code null} if no editor matches the given pretty
* name
*/
public static Editor byPrettyName(String prettyName)
{
return BY_PRETTY_NAME.get(prettyName);
}
}
public void clickSaveAndContinue()
{
this.clickSaveAndContinue(true);
}
/**
* Clicks on the Save and Continue button. Use this instead of {@link #clickSaveAndContinue()} when you want to wait
* for a different message (e.g. an error message).
*
* @param wait {@code true} to wait for the page to be saved, {@code false} otherwise
*/
public void clickSaveAndContinue(boolean wait)
{
this.getSaveAndContinueButton().click();
if (wait) {
// Wait until the page is really saved.
waitForNotificationSuccessMessage("Saved");
}
}
/**
* Use this method instead of {@link #clickSaveAndContinue()} and call {@link WebElement#click()} when you know that
* the next page is not a standard XWiki {@link InlinePage}.
*
* @return the save and continue button used to submit the form.
*/
public WebElement getSaveAndContinueButton()
{
return saveandcontinue;
}
public <T extends ViewPage> T clickSaveAndView()
{
clickSaveAndView(true);
return (T) new ViewPage();
}
/**
* Useful when the save and view operation could fail on the client side and a reload (the view part) might thus not
* take place.
*
* @param wait if we should wait for the page to be reloaded
* @since 7.4M2
*/
public void clickSaveAndView(boolean wait)
{
if (wait) {
getDriver().addPageNotYetReloadedMarker();
}
this.getSaveAndViewButton().click();
if (wait) {
// Since we might have a loading step between clicking Save&View and the view page actually loading
// (specifically when using templates that have child documents associated), we need to wait for the save to
// finish and for the redirect to occur.
getDriver().waitUntilPageIsReloaded();
}
}
/**
* Use this method instead of {@link #clickSaveAndView()} and call {@link WebElement#click()} when you know that the
* next page is not a standard XWiki {@link InlinePage}.
*
* @return the save and view button used to submit the form.
*/
public WebElement getSaveAndViewButton()
{
return save;
}
public ViewPage clickCancel()
{
this.cancel.click();
return new ViewPage();
}
/**
* @return the editor being used on this page
*/
public Editor getEditor()
{
String editor = "";
String[] CSSClasses = this.currentEditorDiv.getAttribute("class").split(" ");
for (String cssClasse : CSSClasses) {
if (cssClasse.startsWith("editor-")) {
editor = cssClasse.substring(7);
break;
}
}
return Editor.valueOf(editor.toUpperCase());
}
/**
* @return the syntax if of the page
* @since 3.2M3
*/
public String getSyntaxId()
{
return this.syntaxIdSelect.getAttribute("value");
}
/**
* @since 3.2M3
*/
public void setSyntaxId(String syntaxId)
{
Select select = new Select(this.syntaxIdSelect);
select.selectByValue(syntaxId);
}
/**
* @return the value of the parent field.
* @since 7.2M2
*/
public String getParent()
{
return this.parentInput.getAttribute("value");
}
/**
* @since 7.2M2
*/
@Override
public String getDocumentTitle()
{
return this.titleField.getAttribute("value");
}
/**
* @since 7.4M2
*/
@Override
public void waitUntilPageJSIsLoaded()
{
super.waitUntilPageJSIsLoaded();
// // Actionbuttons javascript for saving the page.
getDriver().waitUntilJavascriptCondition(
"return XWiki.actionButtons != undefined && " + "XWiki.actionButtons.EditActions != undefined && "
+ "XWiki.actionButtons.AjaxSaveAndContinue != undefined");
}
protected Set<Locale> getExistingLocales(List<WebElement> elements)
{
Set<Locale> locales = new HashSet<>(elements.size());
for (WebElement element : elements) {
locales.add(LocaleUtils.toLocale(element.getText()));
}
return locales;
}
/**
* @return a list of the locales already translated for this document
* @since 9.0RC1
*/
public Set<Locale> getExistingLocales()
{
List<WebElement> elements =
getDriver().findElementsWithoutWaiting(By.xpath("//p[starts-with(text(), 'Existing translations:')]//a"));
return getExistingLocales(elements);
}
/**
* @return a list of the supported locales not yet translated for this document
* @since 9.0RC1
*/
public Set<Locale> getNotExistingLocales()
{
List<WebElement> elements =
getDriver().findElementsWithoutWaiting(By.xpath("//p[starts-with(text(), 'Translate this page in:')]//a"));
return getExistingLocales(elements);
}
/**
* @param locale the locale to translate to
* @return the target locale edit page
* @since 9.0RC1
*/
public WikiEditPage clickTranslate(String locale)
{
WebElement element;
if ("default".equals(locale)) {
element = getDriver().findElementByLinkText("default");
} else {
element = getDriver().findElementWithoutWaiting(
By.xpath("//p[starts-with(text(), 'Translate this page in:')]//a[text()='" + locale + "']"));
}
element.click();
return new WikiEditPage();
}
/**
* Set the default language input field.
* @param defaultLanguage the string to fill the input.
* @since 11.3RC1
*/
public void setDefaultLanguage(String defaultLanguage)
{
defaultLanguageField.clear();
defaultLanguageField.sendKeys(defaultLanguage);
}
public String getDefaultLanguage()
{
return defaultLanguageField.getAttribute("value");
}
public boolean isCSRFWarningDisplayed()
{
try {
return this.csrfWarningModal.isDisplayed();
} catch (NoSuchElementException e) {
return false;
}
}
public void clickCancelCSRFWarningButton()
{
this.cancelCSRFWarningButton.click();
}
public void clickForceSaveCSRFButton()
{
this.forceSaveCSRFButton.click();
}
/**
* Cancel the edition by using keyboard shortcut.
* @return a new {@link ViewPage}
* @since 11.9RC1
*/
public ViewPage useShortcutKeyForCancellingEdition()
{
getDriver().addPageNotYetReloadedMarker();
getDriver().createActions().keyDown(Keys.ALT).sendKeys("c").keyUp(Keys.ALT).perform();
getDriver().waitUntilPageIsReloaded();
return new ViewPage();
}
}
|
XWIKI-17201: Extend the page Information tab with content from the edit panels
* Update the test page object because we're now using the Bootstrap select widget to set the default page locale.
|
xwiki-platform-core/xwiki-platform-test/xwiki-platform-test-ui/src/main/java/org/xwiki/test/ui/po/editor/EditPage.java
|
XWIKI-17201: Extend the page Information tab with content from the edit panels * Update the test page object because we're now using the Bootstrap select widget to set the default page locale.
|
|
Java
|
apache-2.0
|
7cebac42d433b5f0439993ef5676da462b262fe0
| 0
|
arquillian/arquillian-cube-q
|
package org.arquillian.cube.q.api;
import java.util.concurrent.TimeUnit;
public interface Q {
Action on(String machine, int port);
public interface Action {
Action down();
Action timeout(TimeoutType timeType);
Action timeout(TimeoutType timeType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action latency(LatencyType latencyType);
Action latency(LatencyType latencyType, JitterType jitterType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action bandwidth(RateType rateType);
Action bandwidth(RateType rateType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action slowClose(DelayType delayType);
Action slowClose(DelayType delayType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action slice(SliceAverageSizeType sliceAverageSizeType, DelayType delayType);
Action slice(SliceAverageSizeType sliceAverageSizeType, DelayType delayType, SliceSizeVariationType sliceSizeVariationType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
void exec() throws Exception;
void exec(Perform perform) throws Exception;
void exec(RunCondition runCondition, Perform perform) throws Exception;
}
public interface Perform {
void execute() throws Exception;
}
public interface RunCondition {
boolean isExecutable();
}
public static enum ToxicDirectionStream {
DOWNSTREAM, UPSTREAM
}
public static class DurationRunCondition implements RunCondition {
protected long finishTime;
protected DurationRunCondition(long duration, TimeUnit unit) {
final long durationInMillis = unit.toMillis(duration);
this.finishTime = System.currentTimeMillis() + durationInMillis;
}
public static DurationRunCondition during(long duration, TimeUnit timeUnit) {
return new DurationRunCondition(duration, timeUnit);
}
@Override
public boolean isExecutable() {
return System.currentTimeMillis() < finishTime;
}
}
public static class IterationRunCondition implements RunCondition {
protected final long iterations;
protected long currentIteration = 0;
protected IterationRunCondition(long iterations) {
this.iterations = iterations;
}
public static IterationRunCondition times(long numberOfIterations) {
return new IterationRunCondition(numberOfIterations);
}
@Override
public boolean isExecutable() {
if (currentIteration < iterations) {
currentIteration++;
return true;
} else {
return false;
}
}
}
public static final class ToxicityType extends FloatType {
protected ToxicityType(float value) {
super(value);
}
public static ToxicityType fullToxic() {
return new ToxicityType(1f);
}
public static ToxicityType noToxic() {
return new ToxicityType(0f);
}
/**
* Toxicity value. It is a percentage between 0 and 1.
* @param toxicity
* @return
*/
public static ToxicityType toxicity(float toxicity) {
return new ToxicityType(toxicity);
}
}
public static final class SliceSizeVariationType extends LongType {
protected SliceSizeVariationType(long value) {
super(value);
}
public static SliceSizeVariationType sliceSizeVariation(long sizeVariation) {
return new SliceSizeVariationType(sizeVariation);
}
}
public static final class SliceAverageSizeType extends LongType {
protected SliceAverageSizeType(long value) {
super(value);
}
public static SliceAverageSizeType sliceAverageSize(long average) {
return new SliceAverageSizeType(average);
}
}
public static final class DelayType extends LongType {
protected DelayType(long value) {
super(value);
}
public static DelayType delay(long delay) {
return new DelayType(delay);
}
public static DelayType delay(long delay, TimeUnit timeUnit) {
return new DelayType(timeUnit.toMillis(delay));
}
}
public static final class RateType extends LongType {
protected RateType(long value) {
super(value);
}
public static RateType rate(long rate) {
return new RateType(rate);
}
}
public static final class JitterType extends IntegerType {
protected JitterType(int value) {
super(value);
}
public static JitterType jitter(int jitter) {
return new JitterType(jitter);
}
}
public static final class LatencyType extends LongType {
protected LatencyType(long value) {
super(value);
}
public static LatencyType latency(long time) {
return new LatencyType(time);
}
public static LatencyType latency(long time, TimeUnit timeUnit) {
return new LatencyType(timeUnit.toMillis(time));
}
}
public static final class TimeoutType extends LongType {
protected TimeoutType(long value) {
super(value);
}
public static TimeoutType timeout(long time) {
return new TimeoutType(time);
}
public static TimeoutType timeout(long time, TimeUnit timeUnit) {
return new TimeoutType(timeUnit.toMillis(time));
}
}
public static abstract class FloatType {
private float value;
public FloatType(float value) {
this.value = value;
}
public float getValue() {
return value;
}
}
public static abstract class LongType {
private long value;
protected LongType(long value) {
this.value = value;
}
public long getValue() {
return value;
}
}
public static abstract class IntegerType {
private int value;
protected IntegerType(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
}
|
api/src/main/java/org/arquillian/cube/q/api/Q.java
|
package org.arquillian.cube.q.api;
import java.util.concurrent.TimeUnit;
public interface Q {
Action on(String machine, int port);
public interface Action {
Action down();
Action timeout(TimeoutType timeType);
Action timeout(TimeoutType timeType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action latency(LatencyType latencyType);
Action latency(LatencyType latencyType, JitterType jitterType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action bandwidth(RateType rateType);
Action bandwidth(RateType rateType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action slowClose(DelayType delayType);
Action slowClose(DelayType delayType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
Action slice(SliceAverageSizeType sliceAverageSizeType, DelayType delayType);
Action slice(SliceAverageSizeType sliceAverageSizeType, DelayType delayType, SliceSizeVariationType sliceSizeVariationType, ToxicityType toxicityType, ToxicDirectionStream toxicDirectionStream);
void exec() throws Exception;
void exec(Perform perform) throws Exception;
void exec(RunCondition runCondition, Perform perform) throws Exception;
}
public interface Perform {
void execute() throws Exception;
}
public interface RunCondition {
boolean isExecutable();
}
public static enum ToxicDirectionStream {
DOWNSTREAM, UPSTREAM
}
public static class DurationRunCondition implements RunCondition {
protected long finishTime;
protected DurationRunCondition(long duration, TimeUnit unit) {
final long durationInMillis = unit.toMillis(duration);
this.finishTime = System.currentTimeMillis() + durationInMillis;
}
public static DurationRunCondition during(long duration, TimeUnit timeUnit) {
return new DurationRunCondition(duration, timeUnit);
}
@Override
public boolean isExecutable() {
return System.currentTimeMillis() < finishTime;
}
}
public static class IterationRunCondition implements RunCondition {
protected final long iterations;
protected long currentIteration = 0;
protected IterationRunCondition(long iterations) {
this.iterations = iterations;
}
public static IterationRunCondition times(long numberOfIterations) {
return new IterationRunCondition(numberOfIterations);
}
@Override
public boolean isExecutable() {
if (currentIteration < iterations) {
currentIteration++;
return true;
} else {
return false;
}
}
}
public static final class ToxicityType extends FloatType {
protected ToxicityType(float value) {
super(value);
}
public static ToxicityType fullToxic() {
return new ToxicityType(1f);
}
public static ToxicityType noToxic() {
return new ToxicityType(0f);
}
/**
* Toxicity value. It is a percentage between 0 and 1.
* @param toxicity
* @return
*/
public static ToxicityType toxicity(float toxicity) {
return new ToxicityType(toxicity);
}
}
public static final class SliceSizeVariationType extends LongType {
protected SliceSizeVariationType(long value) {
super(value);
}
public static SliceSizeVariationType sliceSizeVariation(long sizeVariation) {
return new SliceSizeVariationType(sizeVariation);
}
}
public static final class SliceAverageSizeType extends LongType {
protected SliceAverageSizeType(long value) {
super(value);
}
public static SliceAverageSizeType sliceAverageSize(long average) {
return new SliceAverageSizeType(average);
}
}
public static final class DelayType extends LongType {
protected DelayType(long value) {
super(value);
}
public static DelayType delay(long delay) {
return new DelayType(delay);
}
}
public static final class RateType extends LongType {
protected RateType(long value) {
super(value);
}
public static RateType rate(long rate) {
return new RateType(rate);
}
}
public static final class JitterType extends IntegerType {
protected JitterType(int value) {
super(value);
}
public static JitterType jitter(int jitter) {
return new JitterType(jitter);
}
}
public static final class LatencyType extends LongType {
protected LatencyType(long value) {
super(value);
}
public static LatencyType latency(long time) {
return new LatencyType(time);
}
}
public static final class TimeoutType extends LongType {
protected TimeoutType(long value) {
super(value);
}
public static TimeoutType timeout(long time) {
return new TimeoutType(time);
}
}
public static abstract class FloatType {
private float value;
public FloatType(float value) {
this.value = value;
}
public float getValue() {
return value;
}
}
public static abstract class LongType {
private long value;
protected LongType(long value) {
this.value = value;
}
public long getValue() {
return value;
}
}
public static abstract class IntegerType {
private int value;
protected IntegerType(int value) {
this.value = value;
}
public int getValue() {
return value;
}
}
}
|
add timeunit support in time toxics
|
api/src/main/java/org/arquillian/cube/q/api/Q.java
|
add timeunit support in time toxics
|
|
Java
|
apache-2.0
|
37e4d4e54772df0f8368d789963083ab5aaa806c
| 0
|
paulstapleton/flowable-engine,yvoswillens/flowable-engine,dbmalkovsky/flowable-engine,lsmall/flowable-engine,lsmall/flowable-engine,flowable/flowable-engine,yvoswillens/flowable-engine,lsmall/flowable-engine,paulstapleton/flowable-engine,yvoswillens/flowable-engine,yvoswillens/flowable-engine,lsmall/flowable-engine,flowable/flowable-engine,flowable/flowable-engine,dbmalkovsky/flowable-engine,paulstapleton/flowable-engine,paulstapleton/flowable-engine,dbmalkovsky/flowable-engine,dbmalkovsky/flowable-engine,flowable/flowable-engine
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.compatibility;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import javax.xml.namespace.QName;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.impl.bpmn.parser.factory.ActivityBehaviorFactory;
import org.activiti.engine.impl.bpmn.parser.factory.ListenerFactory;
import org.activiti.engine.impl.el.ExpressionManager;
import org.activiti.engine.parse.BpmnParseHandler;
import org.flowable.common.engine.impl.EngineDeployer;
import org.flowable.common.engine.impl.history.HistoryLevel;
import org.flowable.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.flowable.engine.impl.cfg.StandaloneProcessEngineConfiguration;
import org.flowable.engine.impl.rules.RulesDeployer;
public class DefaultProcessEngineFactory {
/**
* Takes in an Flowable 6 process engine config, gives back an Flowable 5 Process engine.
*/
public ProcessEngine buildProcessEngine(ProcessEngineConfigurationImpl flowable6Configuration) {
org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration = null;
if (flowable6Configuration instanceof StandaloneProcessEngineConfiguration) {
flowable5Configuration = new org.activiti.engine.impl.cfg.StandaloneProcessEngineConfiguration();
copyConfigItems(flowable6Configuration, flowable5Configuration);
return flowable5Configuration.buildProcessEngine();
} else {
throw new ActivitiException("Unsupported process engine configuration");
}
}
protected void copyConfigItems(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setFlowable5CompatibilityHandler(flowable6Configuration.getFlowable5CompatibilityHandler());
copyJdbcConfig(flowable6Configuration, flowable5Configuration);
copyHistoryConfig(flowable6Configuration, flowable5Configuration);
copyMailConfig(flowable6Configuration, flowable5Configuration);
copyDiagramConfig(flowable6Configuration, flowable5Configuration);
copyAsyncExecutorConfig(flowable6Configuration, flowable5Configuration);
copyJpaConfig(flowable6Configuration, flowable5Configuration);
copyBeans(flowable6Configuration, flowable5Configuration);
copyCaches(flowable6Configuration, flowable5Configuration);
copyActivityBehaviorFactory(flowable6Configuration, flowable5Configuration);
copyExpressionManager(flowable6Configuration, flowable5Configuration);
copyListenerFactory(flowable6Configuration, flowable5Configuration);
convertParseHandlers(flowable6Configuration, flowable5Configuration);
copyCustomMybatisMappers(flowable6Configuration, flowable5Configuration);
copyWsConfig(flowable6Configuration, flowable5Configuration);
flowable5Configuration.setEventDispatcher(flowable6Configuration.getEventDispatcher());
copyPostDeployers(flowable6Configuration, flowable5Configuration);
flowable5Configuration.setBusinessCalendarManager(flowable6Configuration.getBusinessCalendarManager());
copyCustomVariableTypes(flowable6Configuration, flowable5Configuration);
}
protected void copyJdbcConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setDataSource(flowable6Configuration.getDataSource());
if (flowable6Configuration.getJdbcDriver() != null) {
flowable5Configuration.setJdbcDriver(flowable6Configuration.getJdbcDriver());
}
if (flowable6Configuration.getJdbcUrl() != null) {
flowable5Configuration.setJdbcUrl(flowable6Configuration.getJdbcUrl());
}
if (flowable6Configuration.getJdbcUsername() != null) {
flowable5Configuration.setJdbcUsername(flowable6Configuration.getJdbcUsername());
}
if (flowable6Configuration.getJdbcPassword() != null) {
flowable5Configuration.setJdbcPassword(flowable6Configuration.getJdbcPassword());
}
if (flowable6Configuration.getIdBlockSize() > 0) {
flowable5Configuration.setIdBlockSize(flowable6Configuration.getIdBlockSize());
}
if (flowable6Configuration.getJdbcMaxActiveConnections() > 0) {
flowable5Configuration.setJdbcMaxActiveConnections(flowable6Configuration.getJdbcMaxActiveConnections());
}
if (flowable6Configuration.getDatabaseTablePrefix() != null) {
flowable5Configuration.setDatabaseTablePrefix(flowable6Configuration.getDatabaseTablePrefix());
}
}
protected void copyHistoryConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setHistoryLevel(HistoryLevel.getHistoryLevelForKey(flowable6Configuration.getHistoryLevel().getKey()));
}
protected void copyDiagramConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setCreateDiagramOnDeploy(flowable6Configuration.isCreateDiagramOnDeploy());
flowable5Configuration.setActivityFontName(flowable6Configuration.getActivityFontName());
flowable5Configuration.setLabelFontName(flowable6Configuration.getLabelFontName());
flowable5Configuration.setAnnotationFontName(flowable6Configuration.getAnnotationFontName());
}
protected void copyMailConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setMailServerDefaultFrom(flowable6Configuration.getMailServerDefaultFrom());
flowable5Configuration.setMailServerHost(flowable6Configuration.getMailServerHost());
flowable5Configuration.setMailServerPassword(flowable6Configuration.getMailServerPassword());
flowable5Configuration.setMailServerPort(flowable6Configuration.getMailServerPort());
flowable5Configuration.setMailServerUsername(flowable6Configuration.getMailServerUsername());
flowable5Configuration.setMailServerUseSSL(flowable6Configuration.getMailServerUseSSL());
flowable5Configuration.setMailServerUseTLS(flowable6Configuration.getMailServerUseTLS());
if (flowable6Configuration.getMailServers() != null && flowable6Configuration.getMailServers().size() > 0) {
flowable5Configuration.getMailServers().putAll(flowable6Configuration.getMailServers());
}
if (flowable6Configuration.getMailSessionJndi() != null) {
flowable5Configuration.setMailSessionJndi(flowable6Configuration.getMailSessionJndi());
}
}
protected void copyAsyncExecutorConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.isAsyncExecutorActivate()) {
flowable5Configuration.setAsyncExecutorActivate(true);
}
flowable5Configuration.setDefaultFailedJobWaitTime(flowable6Configuration.getDefaultFailedJobWaitTime());
flowable5Configuration.setAsyncFailedJobWaitTime(flowable6Configuration.getAsyncFailedJobWaitTime());
flowable5Configuration.setAsyncExecutor(flowable6Configuration.getAsyncExecutor());
}
protected void copyJpaConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setJpaCloseEntityManager(flowable6Configuration.isJpaCloseEntityManager());
flowable5Configuration.setJpaHandleTransaction(flowable6Configuration.isJpaHandleTransaction());
// We want to reuse the entity manager factory between the two engines
if (flowable6Configuration.getJpaEntityManagerFactory() != null) {
flowable5Configuration.setJpaEntityManagerFactory(flowable6Configuration.getJpaEntityManagerFactory());
} else {
flowable5Configuration.setJpaPersistenceUnitName(flowable6Configuration.getJpaPersistenceUnitName());
}
}
protected void copyBeans(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getBeans() != null) {
flowable5Configuration.setBeans(flowable6Configuration.getBeans());
}
}
protected void copyCaches(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setProcessDefinitionCacheLimit(flowable6Configuration.getProcessDefinitionCacheLimit());
flowable5Configuration.setEnableProcessDefinitionInfoCache(flowable6Configuration.isEnableProcessDefinitionInfoCache());
flowable5Configuration.setProcessDefinitionCache(flowable6Configuration.getProcessDefinitionCache());
flowable5Configuration.setKnowledgeBaseCacheLimit(flowable6Configuration.getKnowledgeBaseCacheLimit());
flowable5Configuration.setKnowledgeBaseCache(flowable6Configuration.getKnowledgeBaseCache());
}
protected void copyActivityBehaviorFactory(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5ActivityBehaviorFactory() != null) {
flowable5Configuration.setActivityBehaviorFactory((ActivityBehaviorFactory) flowable6Configuration.getFlowable5ActivityBehaviorFactory());
}
}
protected void copyExpressionManager(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5ExpressionManager() != null) {
flowable5Configuration.setExpressionManager((ExpressionManager) flowable6Configuration.getFlowable5ExpressionManager());
}
}
protected void copyListenerFactory(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5ListenerFactory() != null) {
flowable5Configuration.setListenerFactory((ListenerFactory) flowable6Configuration.getFlowable5ListenerFactory());
}
}
protected void copyCustomMybatisMappers(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5CustomMybatisMappers() != null) {
flowable5Configuration.setCustomMybatisMappers(flowable6Configuration.getFlowable5CustomMybatisMappers());
}
if (flowable6Configuration.getFlowable5CustomMybatisXMLMappers() != null) {
flowable5Configuration.setCustomMybatisXMLMappers(flowable6Configuration.getFlowable5CustomMybatisXMLMappers());
}
}
protected void copyWsConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getWsSyncFactoryClassName() != null) {
flowable5Configuration.setWsSyncFactoryClassName(flowable6Configuration.getWsSyncFactoryClassName());
}
ConcurrentMap<QName, URL> endpointMap = flowable6Configuration.getWsOverridenEndpointAddresses();
if (endpointMap != null) {
for (QName endpointQName : endpointMap.keySet()) {
flowable5Configuration.addWsEndpointAddress(endpointQName, endpointMap.get(endpointQName));
}
}
}
protected void copyPostDeployers(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getCustomPostDeployers() != null) {
List<org.activiti.engine.impl.persistence.deploy.Deployer> activiti5Deployers = new ArrayList<>();
for (EngineDeployer deployer : flowable6Configuration.getCustomPostDeployers()) {
if (deployer instanceof RulesDeployer) {
activiti5Deployers.add(new org.activiti.engine.impl.rules.RulesDeployer());
break;
}
}
if (activiti5Deployers.size() > 0) {
if (flowable5Configuration.getCustomPostDeployers() != null) {
flowable5Configuration.getCustomPostDeployers().addAll(activiti5Deployers);
} else {
flowable5Configuration.setCustomPostDeployers(activiti5Deployers);
}
}
}
}
protected void copyCustomVariableTypes(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setCustomPreVariableTypes(flowable6Configuration.getCustomPreVariableTypes());
flowable5Configuration.setCustomPostVariableTypes(flowable6Configuration.getCustomPostVariableTypes());
flowable5Configuration.setVariableTypes(flowable6Configuration.getVariableTypes());
}
protected void convertParseHandlers(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setPreBpmnParseHandlers(convert(flowable6Configuration.getFlowable5PreBpmnParseHandlers()));
flowable5Configuration.setPostBpmnParseHandlers(convert(flowable6Configuration.getFlowable5PostBpmnParseHandlers()));
flowable5Configuration.setCustomDefaultBpmnParseHandlers(convert(flowable6Configuration.getFlowable5CustomDefaultBpmnParseHandlers()));
}
protected List<BpmnParseHandler> convert(List<Object> activiti5BpmnParseHandlers) {
if (activiti5BpmnParseHandlers == null) {
return null;
}
List<BpmnParseHandler> parseHandlers = new ArrayList<>(activiti5BpmnParseHandlers.size());
for (Object activiti6BpmnParseHandler : activiti5BpmnParseHandlers) {
parseHandlers.add((BpmnParseHandler) activiti6BpmnParseHandler);
}
return parseHandlers;
}
}
|
modules/flowable5-compatibility/src/main/java/org/flowable/compatibility/DefaultProcessEngineFactory.java
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.compatibility;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import javax.xml.namespace.QName;
import org.activiti.engine.ActivitiException;
import org.activiti.engine.ProcessEngine;
import org.activiti.engine.impl.bpmn.parser.factory.ActivityBehaviorFactory;
import org.activiti.engine.impl.bpmn.parser.factory.ListenerFactory;
import org.activiti.engine.impl.el.ExpressionManager;
import org.activiti.engine.parse.BpmnParseHandler;
import org.flowable.common.engine.impl.EngineDeployer;
import org.flowable.common.engine.impl.history.HistoryLevel;
import org.flowable.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.flowable.engine.impl.cfg.StandaloneProcessEngineConfiguration;
import org.flowable.engine.impl.rules.RulesDeployer;
public class DefaultProcessEngineFactory {
/**
* Takes in an Flowable 6 process engine config, gives back an Flowable 5 Process engine.
*/
public ProcessEngine buildProcessEngine(ProcessEngineConfigurationImpl flowable6Configuration) {
org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration = null;
if (flowable6Configuration instanceof StandaloneProcessEngineConfiguration) {
flowable5Configuration = new org.activiti.engine.impl.cfg.StandaloneProcessEngineConfiguration();
copyConfigItems(flowable6Configuration, flowable5Configuration);
return flowable5Configuration.buildProcessEngine();
} else {
throw new ActivitiException("Unsupported process engine configuration");
}
}
protected void copyConfigItems(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setFlowable5CompatibilityHandler(flowable6Configuration.getFlowable5CompatibilityHandler());
copyJdbcConfig(flowable6Configuration, flowable5Configuration);
copyHistoryConfig(flowable6Configuration, flowable5Configuration);
copyMailConfig(flowable6Configuration, flowable5Configuration);
copyDiagramConfig(flowable6Configuration, flowable5Configuration);
copyAsyncExecutorConfig(flowable6Configuration, flowable5Configuration);
copyJpaConfig(flowable6Configuration, flowable5Configuration);
copyBeans(flowable6Configuration, flowable5Configuration);
copyCaches(flowable6Configuration, flowable5Configuration);
copyActivityBehaviorFactory(flowable6Configuration, flowable5Configuration);
copyExpressionManager(flowable6Configuration, flowable5Configuration);
copyListenerFactory(flowable6Configuration, flowable5Configuration);
convertParseHandlers(flowable6Configuration, flowable5Configuration);
copyCustomMybatisMappers(flowable6Configuration, flowable5Configuration);
copyWsConfig(flowable6Configuration, flowable5Configuration);
flowable5Configuration.setEventDispatcher(flowable6Configuration.getEventDispatcher());
copyPostDeployers(flowable6Configuration, flowable5Configuration);
flowable5Configuration.setBusinessCalendarManager(flowable6Configuration.getBusinessCalendarManager());
}
protected void copyJdbcConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setDataSource(flowable6Configuration.getDataSource());
if (flowable6Configuration.getJdbcDriver() != null) {
flowable5Configuration.setJdbcDriver(flowable6Configuration.getJdbcDriver());
}
if (flowable6Configuration.getJdbcUrl() != null) {
flowable5Configuration.setJdbcUrl(flowable6Configuration.getJdbcUrl());
}
if (flowable6Configuration.getJdbcUsername() != null) {
flowable5Configuration.setJdbcUsername(flowable6Configuration.getJdbcUsername());
}
if (flowable6Configuration.getJdbcPassword() != null) {
flowable5Configuration.setJdbcPassword(flowable6Configuration.getJdbcPassword());
}
if (flowable6Configuration.getIdBlockSize() > 0) {
flowable5Configuration.setIdBlockSize(flowable6Configuration.getIdBlockSize());
}
if (flowable6Configuration.getJdbcMaxActiveConnections() > 0) {
flowable5Configuration.setJdbcMaxActiveConnections(flowable6Configuration.getJdbcMaxActiveConnections());
}
if (flowable6Configuration.getDatabaseTablePrefix() != null) {
flowable5Configuration.setDatabaseTablePrefix(flowable6Configuration.getDatabaseTablePrefix());
}
}
protected void copyHistoryConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setHistoryLevel(HistoryLevel.getHistoryLevelForKey(flowable6Configuration.getHistoryLevel().getKey()));
}
protected void copyDiagramConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setCreateDiagramOnDeploy(flowable6Configuration.isCreateDiagramOnDeploy());
flowable5Configuration.setActivityFontName(flowable6Configuration.getActivityFontName());
flowable5Configuration.setLabelFontName(flowable6Configuration.getLabelFontName());
flowable5Configuration.setAnnotationFontName(flowable6Configuration.getAnnotationFontName());
}
protected void copyMailConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setMailServerDefaultFrom(flowable6Configuration.getMailServerDefaultFrom());
flowable5Configuration.setMailServerHost(flowable6Configuration.getMailServerHost());
flowable5Configuration.setMailServerPassword(flowable6Configuration.getMailServerPassword());
flowable5Configuration.setMailServerPort(flowable6Configuration.getMailServerPort());
flowable5Configuration.setMailServerUsername(flowable6Configuration.getMailServerUsername());
flowable5Configuration.setMailServerUseSSL(flowable6Configuration.getMailServerUseSSL());
flowable5Configuration.setMailServerUseTLS(flowable6Configuration.getMailServerUseTLS());
if (flowable6Configuration.getMailServers() != null && flowable6Configuration.getMailServers().size() > 0) {
flowable5Configuration.getMailServers().putAll(flowable6Configuration.getMailServers());
}
if (flowable6Configuration.getMailSessionJndi() != null) {
flowable5Configuration.setMailSessionJndi(flowable6Configuration.getMailSessionJndi());
}
}
protected void copyAsyncExecutorConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.isAsyncExecutorActivate()) {
flowable5Configuration.setAsyncExecutorActivate(true);
}
flowable5Configuration.setDefaultFailedJobWaitTime(flowable6Configuration.getDefaultFailedJobWaitTime());
flowable5Configuration.setAsyncFailedJobWaitTime(flowable6Configuration.getAsyncFailedJobWaitTime());
flowable5Configuration.setAsyncExecutor(flowable6Configuration.getAsyncExecutor());
}
protected void copyJpaConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setJpaCloseEntityManager(flowable6Configuration.isJpaCloseEntityManager());
flowable5Configuration.setJpaHandleTransaction(flowable6Configuration.isJpaHandleTransaction());
// We want to reuse the entity manager factory between the two engines
if (flowable6Configuration.getJpaEntityManagerFactory() != null) {
flowable5Configuration.setJpaEntityManagerFactory(flowable6Configuration.getJpaEntityManagerFactory());
} else {
flowable5Configuration.setJpaPersistenceUnitName(flowable6Configuration.getJpaPersistenceUnitName());
}
}
protected void copyBeans(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getBeans() != null) {
flowable5Configuration.setBeans(flowable6Configuration.getBeans());
}
}
protected void copyCaches(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setProcessDefinitionCacheLimit(flowable6Configuration.getProcessDefinitionCacheLimit());
flowable5Configuration.setEnableProcessDefinitionInfoCache(flowable6Configuration.isEnableProcessDefinitionInfoCache());
flowable5Configuration.setProcessDefinitionCache(flowable6Configuration.getProcessDefinitionCache());
flowable5Configuration.setKnowledgeBaseCacheLimit(flowable6Configuration.getKnowledgeBaseCacheLimit());
flowable5Configuration.setKnowledgeBaseCache(flowable6Configuration.getKnowledgeBaseCache());
}
protected void copyActivityBehaviorFactory(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5ActivityBehaviorFactory() != null) {
flowable5Configuration.setActivityBehaviorFactory((ActivityBehaviorFactory) flowable6Configuration.getFlowable5ActivityBehaviorFactory());
}
}
protected void copyExpressionManager(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5ExpressionManager() != null) {
flowable5Configuration.setExpressionManager((ExpressionManager) flowable6Configuration.getFlowable5ExpressionManager());
}
}
protected void copyListenerFactory(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5ListenerFactory() != null) {
flowable5Configuration.setListenerFactory((ListenerFactory) flowable6Configuration.getFlowable5ListenerFactory());
}
}
protected void copyCustomMybatisMappers(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getFlowable5CustomMybatisMappers() != null) {
flowable5Configuration.setCustomMybatisMappers(flowable6Configuration.getFlowable5CustomMybatisMappers());
}
if (flowable6Configuration.getFlowable5CustomMybatisXMLMappers() != null) {
flowable5Configuration.setCustomMybatisXMLMappers(flowable6Configuration.getFlowable5CustomMybatisXMLMappers());
}
}
protected void copyWsConfig(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getWsSyncFactoryClassName() != null) {
flowable5Configuration.setWsSyncFactoryClassName(flowable6Configuration.getWsSyncFactoryClassName());
}
ConcurrentMap<QName, URL> endpointMap = flowable6Configuration.getWsOverridenEndpointAddresses();
if (endpointMap != null) {
for (QName endpointQName : endpointMap.keySet()) {
flowable5Configuration.addWsEndpointAddress(endpointQName, endpointMap.get(endpointQName));
}
}
}
protected void copyPostDeployers(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
if (flowable6Configuration.getCustomPostDeployers() != null) {
List<org.activiti.engine.impl.persistence.deploy.Deployer> activiti5Deployers = new ArrayList<>();
for (EngineDeployer deployer : flowable6Configuration.getCustomPostDeployers()) {
if (deployer instanceof RulesDeployer) {
activiti5Deployers.add(new org.activiti.engine.impl.rules.RulesDeployer());
break;
}
}
if (activiti5Deployers.size() > 0) {
if (flowable5Configuration.getCustomPostDeployers() != null) {
flowable5Configuration.getCustomPostDeployers().addAll(activiti5Deployers);
} else {
flowable5Configuration.setCustomPostDeployers(activiti5Deployers);
}
}
}
}
protected void convertParseHandlers(ProcessEngineConfigurationImpl flowable6Configuration, org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl flowable5Configuration) {
flowable5Configuration.setPreBpmnParseHandlers(convert(flowable6Configuration.getFlowable5PreBpmnParseHandlers()));
flowable5Configuration.setPostBpmnParseHandlers(convert(flowable6Configuration.getFlowable5PostBpmnParseHandlers()));
flowable5Configuration.setCustomDefaultBpmnParseHandlers(convert(flowable6Configuration.getFlowable5CustomDefaultBpmnParseHandlers()));
}
protected List<BpmnParseHandler> convert(List<Object> activiti5BpmnParseHandlers) {
if (activiti5BpmnParseHandlers == null) {
return null;
}
List<BpmnParseHandler> parseHandlers = new ArrayList<>(activiti5BpmnParseHandlers.size());
for (Object activiti6BpmnParseHandler : activiti5BpmnParseHandlers) {
parseHandlers.add((BpmnParseHandler) activiti6BpmnParseHandler);
}
return parseHandlers;
}
}
|
issue 1065 add copy method for custom variable types (#1066)
|
modules/flowable5-compatibility/src/main/java/org/flowable/compatibility/DefaultProcessEngineFactory.java
|
issue 1065 add copy method for custom variable types (#1066)
|
|
Java
|
apache-2.0
|
52f6121544176e71e50e5a9f6225ea6579c284c7
| 0
|
b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl
|
/*
* Copyright 2011-2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.internal.eventbus;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.net4j.util.CheckUtil;
import org.eclipse.net4j.util.factory.ProductCreationException;
import org.eclipse.net4j.util.lifecycle.Lifecycle;
import org.eclipse.net4j.util.lifecycle.LifecycleUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.eventbus.IHandler;
import com.b2international.snowowl.eventbus.IMessage;
import com.b2international.snowowl.eventbus.net4j.EventBusConstants;
import com.b2international.snowowl.eventbus.net4j.IEventBusProtocol;
import com.google.common.collect.MapMaker;
import com.google.common.primitives.Ints;
/**
* @since 3.1
*/
public class EventBus extends Lifecycle implements IEventBus {
private static final Logger LOG = LoggerFactory.getLogger(EventBus.class);
private final Set<String> addressBook = new CopyOnWriteArraySet<>();
private final ConcurrentMap<String, ChoosableList<Handler>> protocolMap;
private final ConcurrentMap<String, ChoosableList<Handler>> handlerMap ;
private final ConcurrentMap<String, AtomicLong> inQueueMessages;
private final ConcurrentMap<String, AtomicLong> currentlyProcessingMessages;
private final ConcurrentMap<String, AtomicLong> succeededMessages;
private final ConcurrentMap<String, AtomicLong> completedMessages;
private final ConcurrentMap<String, AtomicLong> failedMessages;
private final String description;
private final int maxThreads;
private final ExecutorServiceFactory executorServiceFactory;
private ExecutorService executorService;
public EventBus() {
this(EventBusConstants.GLOBAL_BUS, Runtime.getRuntime().availableProcessors());
}
public EventBus(String description, int maxThreads) {
CheckUtil.checkArg(description, "Description should be specified");
CheckUtil.checkArg(maxThreads >= 0, "Number of workers must be greater than zero");
this.description = description;
this.maxThreads = maxThreads;
this.executorServiceFactory = maxThreads == 0 ? ExecutorServiceFactory.DIRECT : new WorkerExecutorServiceFactory();
// init stat maps with 1-4 concurrencyLevel
final int concurrencyLevel = Ints.constrainToRange(maxThreads, 1, 4);
this.protocolMap = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.handlerMap = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.inQueueMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.currentlyProcessingMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.succeededMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.completedMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.failedMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
}
@Override
protected void doActivate() throws Exception {
super.doActivate();
executorService = executorServiceFactory.createExecutorService(description, maxThreads);
}
@Override
public IEventBus send(String address, Object message, Map<String, String> headers) {
return send(address, message, IMessage.DEFAULT_TAG, headers);
}
@Override
public IEventBus send(String address, Object message, String tag, Map<String, String> headers) {
return send(address, message, tag, headers, null);
}
@Override
public IEventBus send(String address, Object message, Map<String, String> headers, IHandler<IMessage> handler) {
return send(address, message, IMessage.DEFAULT_TAG, headers, handler);
}
@Override
public IEventBus send(String address, Object message, String tag, Map<String, String> headers, IHandler<IMessage> replyHandler) {
return sendMessageInternal(null, MessageFactory.createMessage(address, message, tag, headers), true, replyHandler);
}
@Override
public IEventBus publish(String address, Object message, Map<String, String> headers) {
return publish(address, message, IMessage.DEFAULT_TAG, headers);
}
@Override
public IEventBus publish(String address, Object message, String tag, Map<String, String> headers) {
return sendMessageInternal(null, MessageFactory.createMessage(address, message, tag, headers), false, null);
}
@Override
public IEventBus receive(IMessage message) {
CheckUtil.checkArg(message instanceof BaseMessage, "Accepts only BaseMessage instances");
receiveMessage((BaseMessage) message);
return this;
}
private void receiveMessage(BaseMessage message) {
final String address = message.address();
message.bus = this;
final ChoosableList<Handler> handlers = handlerMap.get(address);
receiveMessage(handlers, message);
}
private void receiveMessage(ChoosableList<Handler> handlers, BaseMessage message) {
LOG.trace("Received message: {}", message);
if (handlers != null) {
if (message.isSend()) {
final Handler handler = handlers.choose();
if (handler != null) {
doReceive(message, handler);
}
} else {
for (final Handler holder : handlers.list) {
doReceive(message, holder);
}
}
} else {
// TODO send reply to indicate that there is no handler
LOG.trace("No event handler registered to handle message: {}", message);
}
}
private void doReceive(final IMessage message, final Handler holder) {
queue(message);
holder.context.submit(new Runnable() {
@Override
public void run() {
try {
process(message);
holder.handler.handle(message);
} catch (Exception e) {
LOG.error("Exception happened while delivering message", e);
message.fail(e);
} finally {
complete(message);
if (holder.isReplyHandler || !LifecycleUtil.isActive(holder.handler)) {
unregisterHandler(holder.address, holder.handler);
}
}
}
});
}
@Override
public IEventBus registerHandler(String address, IHandler<IMessage> handler) {
if (isActive()) {
registerHandler(address, handler, false, false);
}
return this;
}
@Override
public IEventBus unregisterHandler(String address, IHandler<IMessage> handler) {
if (isActive()) {
MessageFactory.checkAddress(address);
if (handler != null) {
final ConcurrentMap<String, ChoosableList<Handler>> map = handler instanceof IEventBusProtocol ? protocolMap : handlerMap;
final ChoosableList<Handler> handlers = map.get(address);
if (handlers != null) {
synchronized (handlers) {
final int size = handlers.list.size();
// Requires a list traversal. This is tricky to optimise since we can't use a set since
// we need fast ordered traversal for the round robin
for (int i = 0; i < size; i++) {
final Handler entry = handlers.list.get(i);
if (entry.handler == handler) {
handlers.list.remove(i);
if (handlers.list.isEmpty()) {
map.remove(address);
// if this was the last non protocol based handler, send unregistration event
if (!entry.isReplyHandler && !(handler instanceof IEventBusProtocol)) {
addressBook.remove(address);
fireEvent(new HandlerChangedEvent(this, address, false));
}
}
LOG.trace("Unregistered handler {} from address {}", entry.handler, address);
return this;
}
}
}
}
}
}
return this;
}
private void queue(IMessage message) {
final String tag = message.tag();
increment(tag, inQueueMessages);
}
private void process(IMessage message) {
final String tag = message.tag();
decrement(tag, inQueueMessages);
increment(tag, currentlyProcessingMessages);
}
private void complete(IMessage message) {
final String tag = message.tag();
decrement(tag, currentlyProcessingMessages);
if (message.isSucceeded()) {
increment(tag, succeededMessages);
} else {
increment(tag, failedMessages);
}
increment(tag, completedMessages);
}
private void increment(String tag, Map<String, AtomicLong> toIncrement) {
final AtomicLong counter = getOrCreateCounter(tag, toIncrement);
counter.incrementAndGet();
}
private void decrement(String tag, Map<String, AtomicLong> toDecrement) {
final AtomicLong counter = getOrCreateCounter(tag, toDecrement);
counter.decrementAndGet();
}
private AtomicLong getOrCreateCounter(final String tag, final Map<String, AtomicLong> counterMap) {
if (!counterMap.containsKey(tag)) {
synchronized (counterMap) {
if (!counterMap.containsKey(tag)) {
counterMap.put(tag, new AtomicLong(0L));
}
}
}
return counterMap.get(tag);
}
private void registerHandler(String address, IHandler<IMessage> handler, boolean replyHandler, boolean localOnly) {
checkActive();
MessageFactory.checkAddress(address);
if (handler != null) {
ChoosableList<Handler> handlers = null;
if (handler instanceof IEventBusProtocol) {
handlers = protocolMap.get(address);
if (handlers == null) {
handlers = new ChoosableList<Handler>();
final ChoosableList<Handler> previousHandlers = protocolMap.putIfAbsent(address, handlers);
if (previousHandlers != null) {
handlers = previousHandlers;
}
}
} else {
handlers = handlerMap.get(address);
if (handlers == null) {
handlers = new ChoosableList<Handler>();
final ChoosableList<Handler> previousHandlers = handlerMap.putIfAbsent(address, handlers);
if (previousHandlers != null) {
handlers = previousHandlers;
}
}
}
final Handler h = new Handler(address, handler, executorService, replyHandler);
if (!handlers.list.contains(h)) {
handlers.list.add(h);
LOG.trace("Registered handler {} to address {}", handler, address);
if (!replyHandler && handlers.list.size() == 1 && !(handler instanceof IEventBusProtocol)) {
addressBook.add(address);
fireEvent(new HandlerChangedEvent(this, address, true));
}
}
}
}
@Override
public Set<String> getAddressBook() {
return new HashSet<String>(addressBook);
}
@Override
public ExecutorService getExecutorService() {
return executorService;
}
@Override
public long getInQueueMessages(String tag) {
return getOrCreateCounter(tag, inQueueMessages).get();
}
@Override
public long getProcessingMessages(String tag) {
return getOrCreateCounter(tag, currentlyProcessingMessages).get();
}
@Override
public long getFailedMessages(String tag) {
return getOrCreateCounter(tag, failedMessages).get();
}
@Override
public long getCompletedMessages(String tag) {
return getOrCreateCounter(tag, completedMessages).get();
}
@Override
public long getSucceededMessages(String tag) {
return getOrCreateCounter(tag, succeededMessages).get();
}
private IEventBus sendMessageInternal(IEventBusProtocol protocol, BaseMessage message, boolean send, IHandler<IMessage> replyHandler) {
checkActive();
message.send = send;
if (replyHandler != null && send) {
// register a random UUID address to handle result for this send message
final String replyAddress = UUID.randomUUID().toString();
message.replyAddress = replyAddress;
// register reply handler to allow result
registerHandler(replyAddress, replyHandler, true, true);
// TODO set timer with a given timeout to remove this handler
}
if (protocol != null) {
protocol.handle(message);
} else {
final ChoosableList<Handler> handlers = protocolMap.get(message.address());
if (handlers != null) {
receiveMessage(handlers, message);
}
receiveMessage(message);
}
return this;
}
public static class Factory extends org.eclipse.net4j.util.factory.Factory {
public Factory() {
super(EventBusConstants.EVENT_BUS_PRODUCT_GROUP, EventBusConstants.PROTOCOL_NAME);
}
@Override
public Object create(String description) throws ProductCreationException {
final String[] values = description.split(":");
return new EventBus(values[0], Integer.parseInt(values[1]));
}
}
private static class Handler {
final String address;
final IHandler<IMessage> handler;
final boolean isReplyHandler;
ExecutorService context;
public Handler(String address, IHandler<IMessage> handler, ExecutorService context, boolean isReplyHandler) {
this.address = address;
this.handler = handler;
this.context = context;
this.isReplyHandler = isReplyHandler;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((address == null) ? 0 : address.hashCode());
result = prime * result + ((handler == null) ? 0 : handler.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Handler other = (Handler) obj;
if (address == null) {
if (other.address != null)
return false;
} else if (!address.equals(other.address))
return false;
if (handler == null) {
if (other.handler != null)
return false;
} else if (!handler.equals(other.handler))
return false;
return true;
}
}
private static class ChoosableList<T> {
final List<T> list = new CopyOnWriteArrayList<>();
final AtomicInteger pos = new AtomicInteger(0);
T choose() {
while (true) {
int size = list.size();
if (size == 0) {
return null;
}
int p = pos.getAndIncrement();
if (p >= size - 1) {
pos.set(0);
}
try {
return list.get(p);
} catch (IndexOutOfBoundsException e) {
// Can happen
pos.set(0);
}
}
}
}
/*package*/ IEventBus sendReply(IEventBusProtocol replyProtocol,
BaseMessage message, IHandler<IMessage> replyHandler) {
return sendMessageInternal(replyProtocol, message, true, replyHandler);
}
}
|
net4j/com.b2international.snowowl.eventbus/src/com/b2international/snowowl/internal/eventbus/EventBus.java
|
/*
* Copyright 2011-2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.internal.eventbus;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.net4j.util.CheckUtil;
import org.eclipse.net4j.util.factory.ProductCreationException;
import org.eclipse.net4j.util.lifecycle.Lifecycle;
import org.eclipse.net4j.util.lifecycle.LifecycleUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.eventbus.IHandler;
import com.b2international.snowowl.eventbus.IMessage;
import com.b2international.snowowl.eventbus.net4j.EventBusConstants;
import com.b2international.snowowl.eventbus.net4j.IEventBusProtocol;
import com.google.common.collect.MapMaker;
/**
* @since 3.1
*/
public class EventBus extends Lifecycle implements IEventBus {
private static final Logger LOG = LoggerFactory.getLogger(EventBus.class);
private final Set<String> addressBook = new CopyOnWriteArraySet<>();
private final ConcurrentMap<String, ChoosableList<Handler>> protocolMap;
private final ConcurrentMap<String, ChoosableList<Handler>> handlerMap ;
private final ConcurrentMap<String, AtomicLong> inQueueMessages;
private final ConcurrentMap<String, AtomicLong> currentlyProcessingMessages;
private final ConcurrentMap<String, AtomicLong> succeededMessages;
private final ConcurrentMap<String, AtomicLong> completedMessages;
private final ConcurrentMap<String, AtomicLong> failedMessages;
private final String description;
private final int maxThreads;
private final ExecutorServiceFactory executorServiceFactory;
private ExecutorService executorService;
public EventBus() {
this(EventBusConstants.GLOBAL_BUS, Runtime.getRuntime().availableProcessors());
}
public EventBus(String description, int maxThreads) {
CheckUtil.checkArg(description, "Description should be specified");
CheckUtil.checkArg(maxThreads >= 0, "Number of workers must be greater than zero");
this.description = description;
this.maxThreads = maxThreads;
this.executorServiceFactory = maxThreads == 0 ? ExecutorServiceFactory.DIRECT : new WorkerExecutorServiceFactory();
// init stat maps with at least 1 concurrencyLevel
final int concurrencyLevel = Math.min(4, maxThreads);
this.protocolMap = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.handlerMap = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.inQueueMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.currentlyProcessingMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.succeededMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.completedMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
this.failedMessages = new MapMaker().concurrencyLevel(concurrencyLevel).makeMap();
}
@Override
protected void doActivate() throws Exception {
super.doActivate();
executorService = executorServiceFactory.createExecutorService(description, maxThreads);
}
@Override
public IEventBus send(String address, Object message, Map<String, String> headers) {
return send(address, message, IMessage.DEFAULT_TAG, headers);
}
@Override
public IEventBus send(String address, Object message, String tag, Map<String, String> headers) {
return send(address, message, tag, headers, null);
}
@Override
public IEventBus send(String address, Object message, Map<String, String> headers, IHandler<IMessage> handler) {
return send(address, message, IMessage.DEFAULT_TAG, headers, handler);
}
@Override
public IEventBus send(String address, Object message, String tag, Map<String, String> headers, IHandler<IMessage> replyHandler) {
return sendMessageInternal(null, MessageFactory.createMessage(address, message, tag, headers), true, replyHandler);
}
@Override
public IEventBus publish(String address, Object message, Map<String, String> headers) {
return publish(address, message, IMessage.DEFAULT_TAG, headers);
}
@Override
public IEventBus publish(String address, Object message, String tag, Map<String, String> headers) {
return sendMessageInternal(null, MessageFactory.createMessage(address, message, tag, headers), false, null);
}
@Override
public IEventBus receive(IMessage message) {
CheckUtil.checkArg(message instanceof BaseMessage, "Accepts only BaseMessage instances");
receiveMessage((BaseMessage) message);
return this;
}
private void receiveMessage(BaseMessage message) {
final String address = message.address();
message.bus = this;
final ChoosableList<Handler> handlers = handlerMap.get(address);
receiveMessage(handlers, message);
}
private void receiveMessage(ChoosableList<Handler> handlers, BaseMessage message) {
LOG.trace("Received message: {}", message);
if (handlers != null) {
if (message.isSend()) {
final Handler handler = handlers.choose();
if (handler != null) {
doReceive(message, handler);
}
} else {
for (final Handler holder : handlers.list) {
doReceive(message, holder);
}
}
} else {
// TODO send reply to indicate that there is no handler
LOG.trace("No event handler registered to handle message: {}", message);
}
}
private void doReceive(final IMessage message, final Handler holder) {
queue(message);
holder.context.submit(new Runnable() {
@Override
public void run() {
try {
process(message);
holder.handler.handle(message);
} catch (Exception e) {
LOG.error("Exception happened while delivering message", e);
message.fail(e);
} finally {
complete(message);
if (holder.isReplyHandler || !LifecycleUtil.isActive(holder.handler)) {
unregisterHandler(holder.address, holder.handler);
}
}
}
});
}
@Override
public IEventBus registerHandler(String address, IHandler<IMessage> handler) {
if (isActive()) {
registerHandler(address, handler, false, false);
}
return this;
}
@Override
public IEventBus unregisterHandler(String address, IHandler<IMessage> handler) {
if (isActive()) {
MessageFactory.checkAddress(address);
if (handler != null) {
final ConcurrentMap<String, ChoosableList<Handler>> map = handler instanceof IEventBusProtocol ? protocolMap : handlerMap;
final ChoosableList<Handler> handlers = map.get(address);
if (handlers != null) {
synchronized (handlers) {
final int size = handlers.list.size();
// Requires a list traversal. This is tricky to optimise since we can't use a set since
// we need fast ordered traversal for the round robin
for (int i = 0; i < size; i++) {
final Handler entry = handlers.list.get(i);
if (entry.handler == handler) {
handlers.list.remove(i);
if (handlers.list.isEmpty()) {
map.remove(address);
// if this was the last non protocol based handler, send unregistration event
if (!entry.isReplyHandler && !(handler instanceof IEventBusProtocol)) {
addressBook.remove(address);
fireEvent(new HandlerChangedEvent(this, address, false));
}
}
LOG.trace("Unregistered handler {} from address {}", entry.handler, address);
return this;
}
}
}
}
}
}
return this;
}
private void queue(IMessage message) {
final String tag = message.tag();
increment(tag, inQueueMessages);
}
private void process(IMessage message) {
final String tag = message.tag();
decrement(tag, inQueueMessages);
increment(tag, currentlyProcessingMessages);
}
private void complete(IMessage message) {
final String tag = message.tag();
decrement(tag, currentlyProcessingMessages);
if (message.isSucceeded()) {
increment(tag, succeededMessages);
} else {
increment(tag, failedMessages);
}
increment(tag, completedMessages);
}
private void increment(String tag, Map<String, AtomicLong> toIncrement) {
final AtomicLong counter = getOrCreateCounter(tag, toIncrement);
counter.incrementAndGet();
}
private void decrement(String tag, Map<String, AtomicLong> toDecrement) {
final AtomicLong counter = getOrCreateCounter(tag, toDecrement);
counter.decrementAndGet();
}
private AtomicLong getOrCreateCounter(final String tag, final Map<String, AtomicLong> counterMap) {
if (!counterMap.containsKey(tag)) {
synchronized (counterMap) {
if (!counterMap.containsKey(tag)) {
counterMap.put(tag, new AtomicLong(0L));
}
}
}
return counterMap.get(tag);
}
private void registerHandler(String address, IHandler<IMessage> handler, boolean replyHandler, boolean localOnly) {
checkActive();
MessageFactory.checkAddress(address);
if (handler != null) {
ChoosableList<Handler> handlers = null;
if (handler instanceof IEventBusProtocol) {
handlers = protocolMap.get(address);
if (handlers == null) {
handlers = new ChoosableList<Handler>();
final ChoosableList<Handler> previousHandlers = protocolMap.putIfAbsent(address, handlers);
if (previousHandlers != null) {
handlers = previousHandlers;
}
}
} else {
handlers = handlerMap.get(address);
if (handlers == null) {
handlers = new ChoosableList<Handler>();
final ChoosableList<Handler> previousHandlers = handlerMap.putIfAbsent(address, handlers);
if (previousHandlers != null) {
handlers = previousHandlers;
}
}
}
final Handler h = new Handler(address, handler, executorService, replyHandler);
if (!handlers.list.contains(h)) {
handlers.list.add(h);
LOG.trace("Registered handler {} to address {}", handler, address);
if (!replyHandler && handlers.list.size() == 1 && !(handler instanceof IEventBusProtocol)) {
addressBook.add(address);
fireEvent(new HandlerChangedEvent(this, address, true));
}
}
}
}
@Override
public Set<String> getAddressBook() {
return new HashSet<String>(addressBook);
}
@Override
public ExecutorService getExecutorService() {
return executorService;
}
@Override
public long getInQueueMessages(String tag) {
return getOrCreateCounter(tag, inQueueMessages).get();
}
@Override
public long getProcessingMessages(String tag) {
return getOrCreateCounter(tag, currentlyProcessingMessages).get();
}
@Override
public long getFailedMessages(String tag) {
return getOrCreateCounter(tag, failedMessages).get();
}
@Override
public long getCompletedMessages(String tag) {
return getOrCreateCounter(tag, completedMessages).get();
}
@Override
public long getSucceededMessages(String tag) {
return getOrCreateCounter(tag, succeededMessages).get();
}
private IEventBus sendMessageInternal(IEventBusProtocol protocol, BaseMessage message, boolean send, IHandler<IMessage> replyHandler) {
checkActive();
message.send = send;
if (replyHandler != null && send) {
// register a random UUID address to handle result for this send message
final String replyAddress = UUID.randomUUID().toString();
message.replyAddress = replyAddress;
// register reply handler to allow result
registerHandler(replyAddress, replyHandler, true, true);
// TODO set timer with a given timeout to remove this handler
}
if (protocol != null) {
protocol.handle(message);
} else {
final ChoosableList<Handler> handlers = protocolMap.get(message.address());
if (handlers != null) {
receiveMessage(handlers, message);
}
receiveMessage(message);
}
return this;
}
public static class Factory extends org.eclipse.net4j.util.factory.Factory {
public Factory() {
super(EventBusConstants.EVENT_BUS_PRODUCT_GROUP, EventBusConstants.PROTOCOL_NAME);
}
@Override
public Object create(String description) throws ProductCreationException {
final String[] values = description.split(":");
return new EventBus(values[0], Integer.parseInt(values[1]));
}
}
private static class Handler {
final String address;
final IHandler<IMessage> handler;
final boolean isReplyHandler;
ExecutorService context;
public Handler(String address, IHandler<IMessage> handler, ExecutorService context, boolean isReplyHandler) {
this.address = address;
this.handler = handler;
this.context = context;
this.isReplyHandler = isReplyHandler;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((address == null) ? 0 : address.hashCode());
result = prime * result + ((handler == null) ? 0 : handler.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Handler other = (Handler) obj;
if (address == null) {
if (other.address != null)
return false;
} else if (!address.equals(other.address))
return false;
if (handler == null) {
if (other.handler != null)
return false;
} else if (!handler.equals(other.handler))
return false;
return true;
}
}
private static class ChoosableList<T> {
final List<T> list = new CopyOnWriteArrayList<>();
final AtomicInteger pos = new AtomicInteger(0);
T choose() {
while (true) {
int size = list.size();
if (size == 0) {
return null;
}
int p = pos.getAndIncrement();
if (p >= size - 1) {
pos.set(0);
}
try {
return list.get(p);
} catch (IndexOutOfBoundsException e) {
// Can happen
pos.set(0);
}
}
}
}
/*package*/ IEventBus sendReply(IEventBusProtocol replyProtocol,
BaseMessage message, IHandler<IMessage> replyHandler) {
return sendMessageInternal(replyProtocol, message, true, replyHandler);
}
}
|
[core] constrain concurrency level to the 1-4 range
|
net4j/com.b2international.snowowl.eventbus/src/com/b2international/snowowl/internal/eventbus/EventBus.java
|
[core] constrain concurrency level to the 1-4 range
|
|
Java
|
apache-2.0
|
9daddb86eb18be2a0003d7fe5ab8f0a8bb975c28
| 0
|
abimarank/carbon-apimgt,lakmali/carbon-apimgt,lalaji/carbon-apimgt,dewmini/carbon-apimgt,thusithak/carbon-apimgt,dewmini/carbon-apimgt,rswijesena/carbon-apimgt,dewmini/carbon-apimgt,thusithak/carbon-apimgt,ChamNDeSilva/carbon-apimgt,ChamNDeSilva/carbon-apimgt,sambaheerathan/carbon-apimgt,ChamNDeSilva/carbon-apimgt,lakmali/carbon-apimgt,rswijesena/carbon-apimgt,lakmali/carbon-apimgt,rswijesena/carbon-apimgt,Minoli/carbon-apimgt,Minoli/carbon-apimgt,lalaji/carbon-apimgt,Minoli/carbon-apimgt,abimarank/carbon-apimgt,abimarank/carbon-apimgt,sambaheerathan/carbon-apimgt,lalaji/carbon-apimgt,thusithak/carbon-apimgt,sambaheerathan/carbon-apimgt
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.core.impl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.apimgt.core.api.Broker;
import org.wso2.carbon.apimgt.core.configuration.models.BrokerConfigurations;
import org.wso2.carbon.apimgt.core.configuration.models.JMSConnectionConfiguration;
import org.wso2.carbon.apimgt.core.exception.BrokerException;
import org.wso2.carbon.apimgt.core.exception.ExceptionCodes;
import org.wso2.carbon.apimgt.core.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.core.util.BrokerUtil;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import javax.jms.JMSException;
import javax.jms.TopicConnection;
import javax.jms.TopicConnectionFactory;
/**
* The implementation for APIM broker
*/
public class BrokerImpl implements Broker {
private BrokerConfigurations config;
private TopicConnectionFactory connFactory = null;
private static final Logger log = LoggerFactory.getLogger(BrokerUtil.class);
public BrokerImpl() {
config = ServiceReferenceHolder.getInstance().getAPIMConfiguration().getBrokerConfiguration();
JMSConnectionConfiguration jmsConnectionConfiguration = config.getJmsConnectionConfiguration();
Class<?> clientClass = null;
Constructor<?> construct = null;
Object clientInst = null;
try {
clientClass = Class.forName("org.apache.activemq.ActiveMQConnectionFactory");
construct = clientClass.getConstructor(String.class);
clientInst = construct.newInstance(jmsConnectionConfiguration.getTopicConnectionFactoryURL());
connFactory = (TopicConnectionFactory) clientInst;
} catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException
| InvocationTargetException e) {
String error = "Could not create a JMS client connection from the class";
log.error(error);
}
}
/**
* {@inheritDoc}
*/
@Override
public TopicConnection getTopicConnection() throws JMSException, BrokerException {
if (connFactory == null) {
String error = "Could not create a new connection to the broker. Connection Factory:[null].";
log.error(error);
throw new BrokerException(error, ExceptionCodes.BROKER_EXCEPTION);
}
return connFactory.createTopicConnection();
}
}
|
components/apimgt/org.wso2.carbon.apimgt.core/src/main/java/org/wso2/carbon/apimgt/core/impl/BrokerImpl.java
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.core.impl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.apimgt.core.api.Broker;
import org.wso2.carbon.apimgt.core.configuration.models.BrokerConfigurations;
import org.wso2.carbon.apimgt.core.configuration.models.JMSConnectionConfiguration;
import org.wso2.carbon.apimgt.core.exception.BrokerException;
import org.wso2.carbon.apimgt.core.exception.ExceptionCodes;
import org.wso2.carbon.apimgt.core.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.core.util.BrokerUtil;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import javax.jms.JMSException;
import javax.jms.TopicConnection;
import javax.jms.TopicConnectionFactory;
/**
* The implementation for APIM broker
*/
public class BrokerImpl implements Broker {
private BrokerConfigurations config;
private TopicConnectionFactory connFactory = null;
// private static final String CF_NAME_PREFIX = "connectionfactory.";
private static final Logger log = LoggerFactory.getLogger(BrokerUtil.class);
public BrokerImpl() {
config = ServiceReferenceHolder.getInstance().getAPIMConfiguration().getBrokerConfiguration();
JMSConnectionConfiguration jmsConnectionConfiguration = config.getJmsConnectionConfiguration();
Class<?> clientClass = null;
Constructor<?> construct = null;
Object clientInst = null;
try {
clientClass = Class.forName("org.apache.activemq.ActiveMQConnectionFactory");
construct = clientClass.getConstructor(String.class);
clientInst = construct.newInstance(jmsConnectionConfiguration.getTopicConnectionFactoryURL());
connFactory = (TopicConnectionFactory) clientInst;
} catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException
| InvocationTargetException e) {
String error = "Could not create a JMS client connection from the class";
log.error(error);
}
}
/**
* {@inheritDoc}
*/
@Override
public TopicConnection getTopicConnection() throws JMSException, BrokerException {
if (connFactory == null) {
String error = "Could not create a new connection to the broker. Connection Factory:[null].";
log.error(error);
throw new BrokerException(error, ExceptionCodes.BROKER_EXCEPTION);
}
return connFactory.createTopicConnection();
}
}
|
remove unwanted line
|
components/apimgt/org.wso2.carbon.apimgt.core/src/main/java/org/wso2/carbon/apimgt/core/impl/BrokerImpl.java
|
remove unwanted line
|
|
Java
|
apache-2.0
|
129cd6ace57699c19343bd878d41dd33f7148f37
| 0
|
glyptodon/guacamole-client,mike-jumper/incubator-guacamole-client,mike-jumper/incubator-guacamole-client,mike-jumper/incubator-guacamole-client,glyptodon/guacamole-client,glyptodon/guacamole-client,glyptodon/guacamole-client,mike-jumper/incubator-guacamole-client,glyptodon/guacamole-client
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.auth.saml;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.onelogin.saml2.authn.AuthnRequest;
import com.onelogin.saml2.authn.SamlResponse;
import com.onelogin.saml2.exception.SettingsException;
import com.onelogin.saml2.exception.ValidationError;
import com.onelogin.saml2.settings.Saml2Settings;
import com.onelogin.saml2.util.Util;
import java.io.IOException;
import java.util.Arrays;
import javax.servlet.http.HttpServletRequest;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import org.apache.guacamole.auth.saml.conf.ConfigurationService;
import org.apache.guacamole.auth.saml.form.SAMLRedirectField;
import org.apache.guacamole.auth.saml.user.SAMLAuthenticatedUser;
import org.apache.guacamole.GuacamoleException;
import org.apache.guacamole.form.Field;
import org.apache.guacamole.net.auth.AuthenticatedUser;
import org.apache.guacamole.net.auth.Credentials;
import org.apache.guacamole.net.auth.credentials.CredentialsInfo;
import org.apache.guacamole.net.auth.credentials.GuacamoleInvalidCredentialsException;
import org.apache.guacamole.net.auth.credentials.GuacamoleInsufficientCredentialsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* Class that provides services for use by the SAMLAuthenticationProvider class.
*/
public class AuthenticationProviderService {
/**
* Logger for this class.
*/
private final Logger logger = LoggerFactory.getLogger(AuthenticationProviderService.class);
/**
* Service for retrieving SAML configuration information.
*/
@Inject
private ConfigurationService confService;
/**
* Provider for AuthenticatedUser objects.
*/
@Inject
private Provider<SAMLAuthenticatedUser> authenticatedUserProvider;
/**
* The map used to track active SAML responses.
*/
@Inject
private SAMLResponseMap samlResponseMap;
/**
* Returns an AuthenticatedUser representing the user authenticated by the
* given credentials.
*
* @param credentials
* The credentials to use for authentication.
*
* @return
* An AuthenticatedUser representing the user authenticated by the
* given credentials.
*
* @throws GuacamoleException
* If an error occurs while authenticating the user, or if access is
* denied.
*/
public AuthenticatedUser authenticateUser(Credentials credentials)
throws GuacamoleException {
HttpServletRequest request = credentials.getRequest();
// Initialize and configure SAML client.
Saml2Settings samlSettings = confService.getSamlSettings();
if (request != null) {
// Look for the SAML Response parameter.
String responseHash = request.getParameter("responseHash");
if (responseHash != null) {
try {
// Generate the response object
if (!samlResponseMap.hasSamlResponse(responseHash))
throw new GuacamoleInvalidCredentialsException("Provided response has not found.",
CredentialsInfo.USERNAME_PASSWORD);
SamlResponse samlResponse = samlResponseMap.getSamlResponse(responseHash);
if (!samlResponse.validateNumAssertions()) {
logger.warn("SAML response contained other than single assertion.");
logger.debug("validateNumAssertions returned false.");
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
if (!samlResponse.validateTimestamps()) {
logger.warn("SAML response timestamps were invalid.");
logger.debug("validateTimestamps returned false.");
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
// Grab the username, and, if present, finish authentication.
String username = samlResponse.getNameId().toLowerCase();
if (username != null) {
credentials.setUsername(username);
SAMLAuthenticatedUser authenticatedUser = authenticatedUserProvider.get();
authenticatedUser.init(username, credentials);
return authenticatedUser;
}
}
// Errors are logged and result in a normal username/password login box.
catch (IOException e) {
logger.warn("Error during I/O while parsing SAML response: {}", e.getMessage());
logger.debug("Received IOException when trying to parse SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (ParserConfigurationException e) {
logger.warn("Error configuring XML parser: {}", e.getMessage());
logger.debug("Received ParserConfigurationException when trying to parse SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (SAXException e) {
logger.warn("Bad XML when parsing SAML response: {}", e.getMessage());
logger.debug("Received SAXException while parsing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (SettingsException e) {
logger.warn("Error with SAML settings while parsing response: {}", e.getMessage());
logger.debug("Received SettingsException while parsing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (ValidationError e) {
logger.warn("Error validating SAML response: {}", e.getMessage());
logger.debug("Received ValidationError while parsing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (XPathExpressionException e) {
logger.warn("Problem with XML parsing response: {}", e.getMessage());
logger.debug("Received XPathExpressionException while processing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (Exception e) {
logger.warn("Exception while getting name from SAML response: {}", e.getMessage());
logger.debug("Received Exception while retrieving name from SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
}
}
// No SAML Response is present, so generate a request.
AuthnRequest samlReq = new AuthnRequest(samlSettings);
String reqString;
try {
reqString = samlSettings.getIdpSingleSignOnServiceUrl() + "?SAMLRequest=" +
Util.urlEncoder(samlReq.getEncodedAuthnRequest());
}
catch (IOException e) {
logger.error("Error encoding authentication request to string: {}", e.getMessage());
logger.debug("Got IOException encoding authentication request.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
// Redirect to SAML Identity Provider (IdP)
throw new GuacamoleInsufficientCredentialsException("Redirecting to SAML IdP.",
new CredentialsInfo(Arrays.asList(new Field[] {
new SAMLRedirectField(reqString)
}))
);
}
}
|
extensions/guacamole-auth-saml/src/main/java/org/apache/guacamole/auth/saml/AuthenticationProviderService.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.auth.saml;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.onelogin.saml2.authn.AuthnRequest;
import com.onelogin.saml2.authn.SamlResponse;
import com.onelogin.saml2.exception.SettingsException;
import com.onelogin.saml2.exception.ValidationError;
import com.onelogin.saml2.settings.Saml2Settings;
import com.onelogin.saml2.util.Util;
import java.io.IOException;
import java.util.Arrays;
import javax.servlet.http.HttpServletRequest;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;
import org.apache.guacamole.auth.saml.conf.ConfigurationService;
import org.apache.guacamole.auth.saml.form.SAMLRedirectField;
import org.apache.guacamole.auth.saml.user.SAMLAuthenticatedUser;
import org.apache.guacamole.GuacamoleException;
import org.apache.guacamole.form.Field;
import org.apache.guacamole.net.auth.AuthenticatedUser;
import org.apache.guacamole.net.auth.Credentials;
import org.apache.guacamole.net.auth.credentials.CredentialsInfo;
import org.apache.guacamole.net.auth.credentials.GuacamoleInvalidCredentialsException;
import org.apache.guacamole.net.auth.credentials.GuacamoleInsufficientCredentialsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* Class that provides services for use by the SAMLAuthenticationProvider class.
*/
public class AuthenticationProviderService {
/**
* Logger for this class.
*/
private final Logger logger = LoggerFactory.getLogger(AuthenticationProviderService.class);
/**
* Service for retrieving SAML configuration information.
*/
@Inject
private ConfigurationService confService;
/**
* Provider for AuthenticatedUser objects.
*/
@Inject
private Provider<SAMLAuthenticatedUser> authenticatedUserProvider;
/**
* The map used to track active SAML responses.
*/
@Inject
private SAMLResponseMap samlResponseMap;
/**
* Returns an AuthenticatedUser representing the user authenticated by the
* given credentials.
*
* @param credentials
* The credentials to use for authentication.
*
* @return
* An AuthenticatedUser representing the user authenticated by the
* given credentials.
*
* @throws GuacamoleException
* If an error occurs while authenticating the user, or if access is
* denied.
*/
public AuthenticatedUser authenticateUser(Credentials credentials)
throws GuacamoleException {
HttpServletRequest request = credentials.getRequest();
// Initialize and configure SAML client.
Saml2Settings samlSettings = confService.getSamlSettings();
if (request != null) {
// Look for the SAML Response parameter.
String responseHash = Util.urlDecoder(request.getParameter("responseHash"));
if (responseHash != null) {
try {
// Generate the response object
if (!samlResponseMap.hasSamlResponse(responseHash))
throw new GuacamoleInvalidCredentialsException("Provided response has not found.",
CredentialsInfo.USERNAME_PASSWORD);
SamlResponse samlResponse = samlResponseMap.getSamlResponse(responseHash);
if (!samlResponse.validateNumAssertions()) {
logger.warn("SAML response contained other than single assertion.");
logger.debug("validateNumAssertions returned false.");
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
if (!samlResponse.validateTimestamps()) {
logger.warn("SAML response timestamps were invalid.");
logger.debug("validateTimestamps returned false.");
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
// Grab the username, and, if present, finish authentication.
String username = samlResponse.getNameId().toLowerCase();
if (username != null) {
credentials.setUsername(username);
SAMLAuthenticatedUser authenticatedUser = authenticatedUserProvider.get();
authenticatedUser.init(username, credentials);
return authenticatedUser;
}
}
// Errors are logged and result in a normal username/password login box.
catch (IOException e) {
logger.warn("Error during I/O while parsing SAML response: {}", e.getMessage());
logger.debug("Received IOException when trying to parse SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (ParserConfigurationException e) {
logger.warn("Error configuring XML parser: {}", e.getMessage());
logger.debug("Received ParserConfigurationException when trying to parse SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (SAXException e) {
logger.warn("Bad XML when parsing SAML response: {}", e.getMessage());
logger.debug("Received SAXException while parsing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (SettingsException e) {
logger.warn("Error with SAML settings while parsing response: {}", e.getMessage());
logger.debug("Received SettingsException while parsing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (ValidationError e) {
logger.warn("Error validating SAML response: {}", e.getMessage());
logger.debug("Received ValidationError while parsing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (XPathExpressionException e) {
logger.warn("Problem with XML parsing response: {}", e.getMessage());
logger.debug("Received XPathExpressionException while processing SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
catch (Exception e) {
logger.warn("Exception while getting name from SAML response: {}", e.getMessage());
logger.debug("Received Exception while retrieving name from SAML response.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
}
}
// No SAML Response is present, so generate a request.
AuthnRequest samlReq = new AuthnRequest(samlSettings);
String reqString;
try {
reqString = samlSettings.getIdpSingleSignOnServiceUrl() + "?SAMLRequest=" +
Util.urlEncoder(samlReq.getEncodedAuthnRequest());
}
catch (IOException e) {
logger.error("Error encoding authentication request to string: {}", e.getMessage());
logger.debug("Got IOException encoding authentication request.", e);
throw new GuacamoleInvalidCredentialsException("Error during SAML login.",
CredentialsInfo.USERNAME_PASSWORD);
}
// Redirect to SAML Identity Provider (IdP)
throw new GuacamoleInsufficientCredentialsException("Redirecting to SAML IdP.",
new CredentialsInfo(Arrays.asList(new Field[] {
new SAMLRedirectField(reqString)
}))
);
}
}
|
GUACAMOLE-103: Remove unnecessary parameter decoding
|
extensions/guacamole-auth-saml/src/main/java/org/apache/guacamole/auth/saml/AuthenticationProviderService.java
|
GUACAMOLE-103: Remove unnecessary parameter decoding
|
|
Java
|
apache-2.0
|
cc21c64f26ce3258d3b3d26186fb57ea505e220b
| 0
|
krosenvold/AxonFramework,AxonFramework/AxonFramework
|
/*
* Copyright (c) 2010-2018. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.integrationtests.eventhandling;
import junit.framework.TestCase;
import org.axonframework.common.transaction.NoTransactionManager;
import org.axonframework.common.transaction.Transaction;
import org.axonframework.common.transaction.TransactionManager;
import org.axonframework.eventhandling.*;
import org.axonframework.eventhandling.tokenstore.TokenStore;
import org.axonframework.eventhandling.tokenstore.UnableToClaimTokenException;
import org.axonframework.eventhandling.tokenstore.inmemory.InMemoryTokenStore;
import org.axonframework.eventsourcing.eventstore.EmbeddedEventStore;
import org.axonframework.eventsourcing.eventstore.inmemory.InMemoryEventStorageEngine;
import org.axonframework.integrationtests.utils.MockException;
import org.axonframework.messaging.StreamableMessageSource;
import org.axonframework.serialization.SerializationException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import org.springframework.test.annotation.DirtiesContext;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import static java.util.Arrays.asList;
import static java.util.Collections.emptySortedSet;
import static java.util.stream.Collectors.toList;
import static junit.framework.TestCase.*;
import static org.axonframework.eventhandling.EventUtils.asTrackedEventMessage;
import static org.axonframework.integrationtests.utils.AssertUtils.assertWithin;
import static org.axonframework.integrationtests.utils.EventTestUtils.createEvent;
import static org.axonframework.integrationtests.utils.EventTestUtils.createEvents;
import static org.mockito.Mockito.*;
/**
* @author Rene de Waele
* @author Nakul Mishra
*/
public class TrackingEventProcessorTest {
private TrackingEventProcessor testSubject;
private EmbeddedEventStore eventBus;
private TokenStore tokenStore;
private EventHandlerInvoker eventHandlerInvoker;
private EventMessageHandler mockHandler;
private List<Long> sleepInstructions;
private TransactionManager mockTransactionManager;
private Transaction mockTransaction;
static TrackingEventStream trackingEventStreamOf(Iterator<TrackedEventMessage<?>> iterator) {
return new TrackingEventStream() {
private boolean hasPeeked;
private TrackedEventMessage<?> peekEvent;
@Override
public Optional<TrackedEventMessage<?>> peek() {
if (!hasPeeked) {
if (!hasNextAvailable()) {
return Optional.empty();
}
peekEvent = iterator.next();
hasPeeked = true;
}
return Optional.of(peekEvent);
}
@Override
public boolean hasNextAvailable(int timeout, TimeUnit unit) {
return hasPeeked || iterator.hasNext();
}
@Override
public TrackedEventMessage nextAvailable() {
if (!hasPeeked) {
return iterator.next();
}
TrackedEventMessage<?> result = peekEvent;
peekEvent = null;
hasPeeked = false;
return result;
}
@Override
public void close() {
}
};
}
@Before
public void setUp() {
tokenStore = spy(new InMemoryTokenStore());
mockHandler = mock(EventMessageHandler.class);
when(mockHandler.canHandle(any())).thenReturn(true);
when(mockHandler.supportsReset()).thenReturn(true);
eventHandlerInvoker = Mockito.spy(SimpleEventHandlerInvoker.builder().eventHandlers(mockHandler).build());
mockTransaction = mock(Transaction.class);
mockTransactionManager = mock(TransactionManager.class);
when(mockTransactionManager.startTransaction()).thenReturn(mockTransaction);
when(mockTransactionManager.fetchInTransaction(any(Supplier.class))).thenAnswer(i -> {
Supplier s = i.getArgument(0);
return s.get();
});
doAnswer(i -> {
Runnable r = i.getArgument(0);
r.run();
return null;
}).when(mockTransactionManager).executeInTransaction(any(Runnable.class));
eventBus = EmbeddedEventStore.builder().storageEngine(new InMemoryEventStorageEngine()).build();
sleepInstructions = new ArrayList<>();
TrackingEventProcessor.Builder eventProcessorBuilder =
TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(mockTransactionManager);
testSubject = new TrackingEventProcessor(eventProcessorBuilder) {
@Override
protected void doSleepFor(long millisToSleep) {
if (isRunning()) {
sleepInstructions.add(millisToSleep);
}
}
};
}
@After
public void tearDown() {
testSubject.shutDown();
eventBus.shutDown();
}
@Test
public void testPublishedEventsGetPassedToHandler() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(2);
doAnswer(invocation -> {
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected Handler to have received 2 published events", countDownLatch.await(5, TimeUnit.SECONDS));
}
@Test
public void testHandlerIsInvokedInTransactionScope() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicInteger counter = new AtomicInteger();
AtomicInteger counterAtHandle = new AtomicInteger();
when(mockTransactionManager.startTransaction()).thenAnswer(i -> {
counter.incrementAndGet();
return mockTransaction;
});
doAnswer(i -> counter.decrementAndGet()).when(mockTransaction).rollback();
doAnswer(i -> counter.decrementAndGet()).when(mockTransaction).commit();
doAnswer(invocation -> {
counterAtHandle.set(counter.get());
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected Handler to have received 2 published events", countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(1, counterAtHandle.get());
}
@Test
public void testProcessorStopsOnNonTransientExceptionWhenLoadingToken() {
when(tokenStore.fetchToken("test", 0)).thenThrow(new SerializationException("Faking a serialization issue"));
testSubject.start();
assertWithin(
1, TimeUnit.SECONDS,
() -> assertFalse("Expected processor to have stopped", testSubject.isRunning())
);
assertWithin(
1, TimeUnit.SECONDS,
() -> assertTrue("Expected processor to set the error flag", testSubject.isError())
);
assertEquals(Collections.emptyList(), sleepInstructions);
}
@Test
public void testProcessorRetriesOnTransientExceptionWhenLoadingToken() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
doAnswer(invocation -> {
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
when(tokenStore.fetchToken("test", 0)).thenThrow(new RuntimeException("Faking a recoverable issue"))
.thenCallRealMethod();
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvent());
assertTrue("Expected Handler to have received published event", countDownLatch.await(5, TimeUnit.SECONDS));
assertTrue(testSubject.isRunning());
assertFalse(testSubject.isError());
assertEquals(Collections.singletonList(5000L), sleepInstructions);
}
@Test
public void testTokenIsStoredWhenEventIsRead() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvent());
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
verify(tokenStore).extendClaim(eq(testSubject.getName()), anyInt());
verify(tokenStore).storeToken(any(), any(), anyInt());
assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
public void testTokenIsStoredOncePerEventBatch() throws Exception {
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
CountDownLatch countDownLatch = new CountDownLatch(2);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected Unit of Work to have reached clean up phase for 2 messages",
countDownLatch.await(5, TimeUnit.SECONDS));
InOrder inOrder = inOrder(tokenStore);
inOrder.verify(tokenStore, times(1)).extendClaim(eq(testSubject.getName()), anyInt());
inOrder.verify(tokenStore, times(1)).storeToken(any(), any(), anyInt());
assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
public void testTokenIsNotStoredWhenUnitOfWorkIsRolledBack() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCommit(uow -> {
throw new MockException();
});
return interceptorChain.proceed();
}));
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvent());
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
assertNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
@DirtiesContext
public void testContinueFromPreviousToken() throws Exception {
tokenStore = new InMemoryTokenStore();
eventBus.publish(createEvents(10));
TrackedEventMessage<?> firstEvent = eventBus.openStream(null).nextAvailable();
tokenStore.storeToken(firstEvent.trackingToken(), testSubject.getName(), 0);
assertEquals(firstEvent.trackingToken(), tokenStore.fetchToken(testSubject.getName(), 0));
List<EventMessage<?>> ackedEvents = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(9);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected 9 invocations on Event Handler by now", countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(9, ackedEvents.size());
}
@Test(timeout = 10000)
@DirtiesContext
public void testContinueAfterPause() throws Exception {
List<EventMessage<?>> ackedEvents = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(2);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected 2 invocations on Event Handler by now", countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(2, ackedEvents.size());
testSubject.shutDown();
// The thread may block for 1 second waiting for a next event to pop up
while (testSubject.activeProcessorThreads() > 0) {
Thread.sleep(1);
// wait...
}
CountDownLatch countDownLatch2 = new CountDownLatch(2);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch2.countDown();
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(2));
assertEquals(2, countDownLatch2.getCount());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected 4 invocations on Event Handler by now", countDownLatch2.await(5, TimeUnit.SECONDS));
assertEquals(4, ackedEvents.size());
}
@Test
@DirtiesContext
public void testProcessorGoesToRetryModeWhenOpenStreamFails() throws Exception {
eventBus = spy(eventBus);
tokenStore = new InMemoryTokenStore();
eventBus.publish(createEvents(5));
when(eventBus.openStream(any())).thenThrow(new MockException()).thenCallRealMethod();
List<EventMessage<?>> ackedEvents = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(5);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected 5 invocations on Event Handler by now", countDownLatch.await(10, TimeUnit.SECONDS));
assertEquals(5, ackedEvents.size());
verify(eventBus, times(2)).openStream(any());
}
@Test
public void testFirstTokenIsStoredWhenUnitOfWorkIsRolledBackOnSecondEvent() throws Exception {
List<? extends EventMessage<?>> events = createEvents(2);
CountDownLatch countDownLatch = new CountDownLatch(2);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCommit(uow -> {
if (uow.getMessage().equals(events.get(1))) {
throw new MockException();
}
});
return interceptorChain.proceed();
}));
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(events);
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt());
assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
@DirtiesContext
@SuppressWarnings("unchecked")
public void testEventsWithTheSameTokenAreProcessedInTheSameBatch() throws Exception {
eventBus.shutDown();
eventBus = mock(EmbeddedEventStore.class);
TrackingToken trackingToken = new GlobalSequenceTrackingToken(0);
List<TrackedEventMessage<?>> events =
createEvents(2).stream().map(event -> asTrackedEventMessage(event, trackingToken)).collect(toList());
when(eventBus.openStream(null)).thenReturn(trackingEventStreamOf(events.iterator()));
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
//noinspection Duplicates
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCommit(uow -> {
if (uow.getMessage().equals(events.get(1))) {
throw new MockException();
}
});
return interceptorChain.proceed();
}));
CountDownLatch countDownLatch = new CountDownLatch(2);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// Give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt());
assertNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
public void testResetCausesEventsToBeReplayed() throws Exception {
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(4));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
testSubject.shutDown();
testSubject.resetTokens();
// Resetting twice caused problems (see issue #559)
testSubject.resetTokens();
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(8, handled.size()));
assertEquals(handled.subList(0, 4), handled.subList(4, 8));
assertEquals(handled.subList(4, 8), handledInRedelivery);
assertTrue(testSubject.processingStatus().get(0).isReplaying());
eventBus.publish(createEvents(1));
assertWithin(1, TimeUnit.SECONDS, () -> assertFalse(testSubject.processingStatus().get(0).isReplaying()));
}
@Test
public void testResetToPositionCausesCertainEventsToBeReplayed() throws Exception {
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(4));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
testSubject.shutDown();
testSubject.resetTokens(source -> new GlobalSequenceTrackingToken(1L));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(6, handled.size()));
assertFalse(handledInRedelivery.contains(handled.get(0)));
assertFalse(handledInRedelivery.contains(handled.get(1)));
assertEquals(handled.subList(2, 4), handled.subList(4, 6));
assertEquals(handled.subList(4, 6), handledInRedelivery);
assertTrue(testSubject.processingStatus().get(0).isReplaying());
eventBus.publish(createEvents(1));
assertWithin(1, TimeUnit.SECONDS, () -> assertFalse(testSubject.processingStatus().get(0).isReplaying()));
}
@Test
public void testResetOnInitializeWithTokenResetToThatToken() throws Exception {
TrackingEventProcessorConfiguration config =
TrackingEventProcessorConfiguration.forSingleThreadedProcessing()
.andInitialTrackingToken(ms -> new GlobalSequenceTrackingToken(1L));
TrackingEventProcessor.Builder eventProcessorBuilder =
TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.trackingEventProcessorConfiguration(config);
testSubject = new TrackingEventProcessor(eventProcessorBuilder) {
@Override
protected void doSleepFor(long millisToSleep) {
if (isRunning()) {
sleepInstructions.add(millisToSleep);
}
}
};
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(4));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(2, handled.size()));
testSubject.shutDown();
testSubject.resetTokens();
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
assertEquals(handled.subList(0, 2), handled.subList(2, 4));
assertEquals(handled.subList(2, 4), handledInRedelivery);
assertTrue(testSubject.processingStatus().get(0).isReplaying());
eventBus.publish(createEvents(1));
assertWithin(1, TimeUnit.SECONDS, () -> assertFalse(testSubject.processingStatus().get(0).isReplaying()));
}
@Test
public void testResetBeforeStartingPerformsANormalRun() throws Exception {
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
testSubject.start();
testSubject.shutDown();
testSubject.resetTokens();
testSubject.start();
eventBus.publish(createEvents(4));
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
assertEquals(0, handledInRedelivery.size());
assertFalse(testSubject.processingStatus().get(0).isReplaying());
}
@SuppressWarnings("unchecked")
@Test
public void testReplayFlagAvailableWhenReplayInDifferentOrder() throws Exception {
StreamableMessageSource<TrackedEventMessage<?>> stubSource = mock(StreamableMessageSource.class);
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(stubSource)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
when(stubSource.openStream(any())).thenReturn(new StubTrackingEventStream(0, 1, 2, 5))
.thenReturn(new StubTrackingEventStream(0, 1, 2, 3, 4, 5, 6, 7));
when(eventHandlerInvoker.supportsReset()).thenReturn(true);
doReturn(true).when(eventHandlerInvoker).canHandle(any(), any());
List<TrackingToken> firstRun = new CopyOnWriteArrayList<>();
List<TrackingToken> replayRun = new CopyOnWriteArrayList<>();
doAnswer(i -> {
firstRun.add(i.<TrackedEventMessage>getArgument(0).trackingToken());
return null;
}).when(eventHandlerInvoker).handle(any(), any());
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, firstRun.size()));
testSubject.shutDown();
doAnswer(i -> {
replayRun.add(i.<TrackedEventMessage>getArgument(0).trackingToken());
return null;
}).when(eventHandlerInvoker).handle(any(), any());
testSubject.resetTokens();
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(8, replayRun.size()));
TestCase.assertEquals(GapAwareTrackingToken.newInstance(5, asList(3L, 4L)), firstRun.get(3));
assertTrue(replayRun.get(0) instanceof ReplayToken);
assertTrue(replayRun.get(5) instanceof ReplayToken);
assertEquals(GapAwareTrackingToken.newInstance(6, emptySortedSet()), replayRun.get(6));
}
@Test(expected = IllegalStateException.class)
public void testResetRejectedWhileRunning() {
testSubject.start();
testSubject.resetTokens();
}
@Test
public void testResetNotSupportedWhenInvokerDoesNotSupportReset() {
when(mockHandler.supportsReset()).thenReturn(false);
assertFalse(testSubject.supportsReset());
}
@Test(expected = IllegalStateException.class)
public void testResetRejectedWhenInvokerDoesNotSupportReset() {
when(mockHandler.supportsReset()).thenReturn(false);
testSubject.resetTokens();
}
@Test
public void testResetRejectedIfNotAllTokensCanBeClaimed() {
tokenStore.initializeTokenSegments("test", 4);
when(tokenStore.fetchToken("test", 3)).thenThrow(new UnableToClaimTokenException("Mock"));
try {
testSubject.resetTokens();
fail("Expected exception");
} catch (UnableToClaimTokenException e) {
// expected
}
verify(tokenStore, never()).storeToken(isNull(), anyString(), anyInt());
}
@Test
public void testWhenFailureDuringInit() throws InterruptedException {
when(tokenStore.fetchSegments(anyString()))
.thenThrow(new RuntimeException("Faking issue during fetchSegments"))
.thenReturn(new int[]{})
.thenReturn(new int[]{0});
doThrow(new RuntimeException("Faking issue during initializeTokenSegments"))
// and on further calls
.doNothing()
.when(tokenStore).initializeTokenSegments(anyString(), anyInt());
testSubject.start();
Thread.sleep(2500);
assertEquals(1, testSubject.activeProcessorThreads());
}
@Test
public void testUpdateActiveSegmentsWhenBatchIsEmpty() throws Exception {
StreamableMessageSource<TrackedEventMessage<?>> stubSource = mock(StreamableMessageSource.class);
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(stubSource)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE).build();
when(stubSource.openStream(any())).thenReturn(new StubTrackingEventStream(0, 1, 2, 5));
doReturn(true, false).when(eventHandlerInvoker).canHandle(any(), any());
List<TrackingToken> trackingTokens = new CopyOnWriteArrayList<>();
doAnswer(i -> {
trackingTokens.add(i.<TrackedEventMessage>getArgument(0).trackingToken());
return null;
}).when(eventHandlerInvoker).handle(any(), any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
EventTrackerStatus eventTrackerStatus = testSubject.processingStatus().get(0);
assertTrue(eventTrackerStatus.isCaughtUp());
GapAwareTrackingToken expectedToken = GapAwareTrackingToken.newInstance(5, asList(3L, 4L));
TrackingToken lastToken = eventTrackerStatus.getTrackingToken();
assertTrue(lastToken.covers(expectedToken));
}
@Test
public void testReleaseSegment() {
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.activeProcessorThreads()));
testSubject.releaseSegment(0);
assertWithin(2, TimeUnit.SECONDS, () -> assertEquals(0, testSubject.activeProcessorThreads()));
assertWithin(15, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.activeProcessorThreads()));
}
@Test
public void testHasAvailableSegments() {
assertEquals(1, testSubject.availableProcessorThreads());
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(0, testSubject.availableProcessorThreads()));
testSubject.releaseSegment(0);
assertWithin(2, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.availableProcessorThreads()));
}
private static class StubTrackingEventStream implements TrackingEventStream {
private final Queue<TrackedEventMessage<?>> eventMessages;
public StubTrackingEventStream(long... tokens) {
GapAwareTrackingToken lastToken = GapAwareTrackingToken.newInstance(-1, emptySortedSet());
eventMessages = new LinkedList<>();
for (Long seq : tokens) {
lastToken = lastToken.advanceTo(seq, 1000, true);
eventMessages.add(new GenericTrackedEventMessage<>(lastToken, createEvent(seq)));
}
}
@Override
public Optional<TrackedEventMessage<?>> peek() {
if (eventMessages.isEmpty()) {
return Optional.empty();
}
return Optional.of(eventMessages.peek());
}
@Override
public boolean hasNextAvailable(int timeout, TimeUnit unit) {
return !eventMessages.isEmpty();
}
@Override
public TrackedEventMessage<?> nextAvailable() {
return eventMessages.poll();
}
@Override
public void close() {
}
}
}
|
integrationtests/src/test/java/org/axonframework/integrationtests/eventhandling/TrackingEventProcessorTest.java
|
/*
* Copyright (c) 2010-2018. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.integrationtests.eventhandling;
import junit.framework.TestCase;
import org.axonframework.common.transaction.NoTransactionManager;
import org.axonframework.common.transaction.Transaction;
import org.axonframework.common.transaction.TransactionManager;
import org.axonframework.eventhandling.*;
import org.axonframework.eventhandling.tokenstore.TokenStore;
import org.axonframework.eventhandling.tokenstore.UnableToClaimTokenException;
import org.axonframework.eventhandling.tokenstore.inmemory.InMemoryTokenStore;
import org.axonframework.eventsourcing.eventstore.EmbeddedEventStore;
import org.axonframework.eventsourcing.eventstore.inmemory.InMemoryEventStorageEngine;
import org.axonframework.integrationtests.utils.MockException;
import org.axonframework.messaging.StreamableMessageSource;
import org.axonframework.serialization.SerializationException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import org.springframework.test.annotation.DirtiesContext;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import static java.util.Arrays.asList;
import static java.util.Collections.emptySortedSet;
import static java.util.stream.Collectors.toList;
import static junit.framework.TestCase.*;
import static org.axonframework.eventhandling.EventUtils.asTrackedEventMessage;
import static org.axonframework.integrationtests.utils.AssertUtils.assertWithin;
import static org.axonframework.integrationtests.utils.EventTestUtils.createEvent;
import static org.axonframework.integrationtests.utils.EventTestUtils.createEvents;
import static org.mockito.Mockito.*;
/**
* @author Rene de Waele
* @author Nakul Mishra
*/
public class TrackingEventProcessorTest {
private TrackingEventProcessor testSubject;
private EmbeddedEventStore eventBus;
private TokenStore tokenStore;
private EventHandlerInvoker eventHandlerInvoker;
private EventMessageHandler mockHandler;
private List<Long> sleepInstructions;
private TransactionManager mockTransactionManager;
private Transaction mockTransaction;
static TrackingEventStream trackingEventStreamOf(Iterator<TrackedEventMessage<?>> iterator) {
return new TrackingEventStream() {
private boolean hasPeeked;
private TrackedEventMessage<?> peekEvent;
@Override
public Optional<TrackedEventMessage<?>> peek() {
if (!hasPeeked) {
if (!hasNextAvailable()) {
return Optional.empty();
}
peekEvent = iterator.next();
hasPeeked = true;
}
return Optional.of(peekEvent);
}
@Override
public boolean hasNextAvailable(int timeout, TimeUnit unit) {
return hasPeeked || iterator.hasNext();
}
@Override
public TrackedEventMessage nextAvailable() {
if (!hasPeeked) {
return iterator.next();
}
TrackedEventMessage<?> result = peekEvent;
peekEvent = null;
hasPeeked = false;
return result;
}
@Override
public void close() {
}
};
}
@Before
public void setUp() {
tokenStore = spy(new InMemoryTokenStore());
mockHandler = mock(EventMessageHandler.class);
when(mockHandler.canHandle(any())).thenReturn(true);
when(mockHandler.supportsReset()).thenReturn(true);
eventHandlerInvoker = Mockito.spy(SimpleEventHandlerInvoker.builder().eventHandlers(mockHandler).build());
mockTransaction = mock(Transaction.class);
mockTransactionManager = mock(TransactionManager.class);
when(mockTransactionManager.startTransaction()).thenReturn(mockTransaction);
when(mockTransactionManager.fetchInTransaction(any(Supplier.class))).thenAnswer(i -> {
Supplier s = i.getArgument(0);
return s.get();
});
doAnswer(i -> {
Runnable r = i.getArgument(0);
r.run();
return null;
}).when(mockTransactionManager).executeInTransaction(any(Runnable.class));
eventBus = EmbeddedEventStore.builder().storageEngine(new InMemoryEventStorageEngine()).build();
sleepInstructions = new ArrayList<>();
TrackingEventProcessor.Builder eventProcessorBuilder =
TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(mockTransactionManager);
testSubject = new TrackingEventProcessor(eventProcessorBuilder) {
@Override
protected void doSleepFor(long millisToSleep) {
if (isRunning()) {
sleepInstructions.add(millisToSleep);
}
}
};
}
@After
public void tearDown() {
testSubject.shutDown();
eventBus.shutDown();
}
@Test
public void testPublishedEventsGetPassedToHandler() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(2);
doAnswer(invocation -> {
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected Handler to have received 2 published events", countDownLatch.await(5, TimeUnit.SECONDS));
}
@Test
public void testHandlerIsInvokedInTransactionScope() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
AtomicInteger counter = new AtomicInteger();
AtomicInteger counterAtHandle = new AtomicInteger();
when(mockTransactionManager.startTransaction()).thenAnswer(i -> {
counter.incrementAndGet();
return mockTransaction;
});
doAnswer(i -> counter.decrementAndGet()).when(mockTransaction).rollback();
doAnswer(i -> counter.decrementAndGet()).when(mockTransaction).commit();
doAnswer(invocation -> {
counterAtHandle.set(counter.get());
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected Handler to have received 2 published events", countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(1, counterAtHandle.get());
}
@Test
public void testProcessorStopsOnNonTransientExceptionWhenLoadingToken() {
when(tokenStore.fetchToken("test", 0)).thenThrow(new SerializationException("Faking a serialization issue"));
testSubject.start();
assertWithin(
1, TimeUnit.SECONDS,
() -> assertFalse("Expected processor to have stopped", testSubject.isRunning())
);
assertWithin(
1, TimeUnit.SECONDS,
() -> assertTrue("Expected processor to set the error flag", testSubject.isError())
);
assertEquals(Collections.emptyList(), sleepInstructions);
}
@Test
public void testProcessorRetriesOnTransientExceptionWhenLoadingToken() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
doAnswer(invocation -> {
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
when(tokenStore.fetchToken("test", 0)).thenThrow(new RuntimeException("Faking a recoverable issue"))
.thenCallRealMethod();
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvent());
assertTrue("Expected Handler to have received published event", countDownLatch.await(5, TimeUnit.SECONDS));
assertTrue(testSubject.isRunning());
assertFalse(testSubject.isError());
assertEquals(Collections.singletonList(5000L), sleepInstructions);
}
@Test
public void testTokenIsStoredWhenEventIsRead() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvent());
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
verify(tokenStore).extendClaim(eq(testSubject.getName()), anyInt());
verify(tokenStore).storeToken(any(), any(), anyInt());
assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
public void testTokenIsStoredOncePerEventBatch() throws Exception {
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
CountDownLatch countDownLatch = new CountDownLatch(2);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected Unit of Work to have reached clean up phase for 2 messages",
countDownLatch.await(5, TimeUnit.SECONDS));
InOrder inOrder = inOrder(tokenStore);
inOrder.verify(tokenStore, times(1)).extendClaim(eq(testSubject.getName()), anyInt());
inOrder.verify(tokenStore, times(1)).storeToken(any(), any(), anyInt());
assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
public void testTokenIsNotStoredWhenUnitOfWorkIsRolledBack() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCommit(uow -> {
throw new MockException();
});
return interceptorChain.proceed();
}));
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvent());
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
assertNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
@DirtiesContext
public void testContinueFromPreviousToken() throws Exception {
tokenStore = new InMemoryTokenStore();
eventBus.publish(createEvents(10));
TrackedEventMessage<?> firstEvent = eventBus.openStream(null).nextAvailable();
tokenStore.storeToken(firstEvent.trackingToken(), testSubject.getName(), 0);
assertEquals(firstEvent.trackingToken(), tokenStore.fetchToken(testSubject.getName(), 0));
List<EventMessage<?>> ackedEvents = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(9);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected 9 invocations on Event Handler by now", countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(9, ackedEvents.size());
}
@Test(timeout = 10000)
@DirtiesContext
public void testContinueAfterPause() throws Exception {
List<EventMessage<?>> ackedEvents = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(2);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(createEvents(2));
assertTrue("Expected 2 invocations on Event Handler by now", countDownLatch.await(5, TimeUnit.SECONDS));
assertEquals(2, ackedEvents.size());
testSubject.shutDown();
// The thread may block for 1 second waiting for a next event to pop up
while (testSubject.activeProcessorThreads() > 0) {
Thread.sleep(1);
// wait...
}
CountDownLatch countDownLatch2 = new CountDownLatch(2);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch2.countDown();
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(2));
assertEquals(2, countDownLatch2.getCount());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected 4 invocations on Event Handler by now", countDownLatch2.await(5, TimeUnit.SECONDS));
assertEquals(4, ackedEvents.size());
}
@Test
@DirtiesContext
public void testProcessorGoesToRetryModeWhenOpenStreamFails() throws Exception {
eventBus = spy(eventBus);
tokenStore = new InMemoryTokenStore();
eventBus.publish(createEvents(5));
when(eventBus.openStream(any())).thenThrow(new MockException()).thenCallRealMethod();
List<EventMessage<?>> ackedEvents = new ArrayList<>();
CountDownLatch countDownLatch = new CountDownLatch(5);
doAnswer(invocation -> {
ackedEvents.add((EventMessage<?>) invocation.getArguments()[0]);
countDownLatch.countDown();
return null;
}).when(mockHandler).handle(any());
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected 5 invocations on Event Handler by now", countDownLatch.await(10, TimeUnit.SECONDS));
assertEquals(5, ackedEvents.size());
verify(eventBus, times(2)).openStream(any());
}
@Test
public void testFirstTokenIsStoredWhenUnitOfWorkIsRolledBackOnSecondEvent() throws Exception {
List<? extends EventMessage<?>> events = createEvents(2);
CountDownLatch countDownLatch = new CountDownLatch(2);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCommit(uow -> {
if (uow.getMessage().equals(events.get(1))) {
throw new MockException();
}
});
return interceptorChain.proceed();
}));
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
eventBus.publish(events);
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt());
assertNotNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
@DirtiesContext
@SuppressWarnings("unchecked")
public void testEventsWithTheSameTokenAreProcessedInTheSameBatch() throws Exception {
eventBus.shutDown();
eventBus = mock(EmbeddedEventStore.class);
TrackingToken trackingToken = new GlobalSequenceTrackingToken(0);
List<TrackedEventMessage<?>> events =
createEvents(2).stream().map(event -> asTrackedEventMessage(event, trackingToken)).collect(toList());
when(eventBus.openStream(null)).thenReturn(trackingEventStreamOf(events.iterator()));
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
//noinspection Duplicates
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCommit(uow -> {
if (uow.getMessage().equals(events.get(1))) {
throw new MockException();
}
});
return interceptorChain.proceed();
}));
CountDownLatch countDownLatch = new CountDownLatch(2);
testSubject.registerHandlerInterceptor(((unitOfWork, interceptorChain) -> {
unitOfWork.onCleanup(uow -> countDownLatch.countDown());
return interceptorChain.proceed();
}));
testSubject.start();
// Give it a bit of time to start
Thread.sleep(200);
assertTrue("Expected Unit of Work to have reached clean up phase", countDownLatch.await(5, TimeUnit.SECONDS));
verify(tokenStore, atLeastOnce()).storeToken(any(), any(), anyInt());
assertNull(tokenStore.fetchToken(testSubject.getName(), 0));
}
@Test
public void testResetCausesEventsToBeReplayed() throws Exception {
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(4));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
testSubject.shutDown();
testSubject.resetTokens();
// Resetting twice caused problems (see issue #559)
testSubject.resetTokens();
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(8, handled.size()));
assertEquals(handled.subList(0, 3), handled.subList(4, 7));
assertEquals(handled.subList(4, 7), handledInRedelivery);
assertTrue(testSubject.processingStatus().get(0).isReplaying());
eventBus.publish(createEvents(1));
assertWithin(1, TimeUnit.SECONDS, () -> assertFalse(testSubject.processingStatus().get(0).isReplaying()));
}
@Test
public void testResetToPositionCausesCertainEventsToBeReplayed() throws Exception {
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(4));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
testSubject.shutDown();
testSubject.resetTokens(source -> new GlobalSequenceTrackingToken(1L));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(6, handled.size()));
assertFalse(handledInRedelivery.contains(handled.get(0)));
assertFalse(handledInRedelivery.contains(handled.get(1)));
assertEquals(handled.subList(2, 3), handled.subList(4, 5));
assertEquals(handled.subList(4, 5), handledInRedelivery);
assertTrue(testSubject.processingStatus().get(0).isReplaying());
eventBus.publish(createEvents(1));
assertWithin(1, TimeUnit.SECONDS, () -> assertFalse(testSubject.processingStatus().get(0).isReplaying()));
}
@Test
public void testResetOnInitializeWithTokenResetToThatToken() throws Exception {
TrackingEventProcessorConfiguration config =
TrackingEventProcessorConfiguration.forSingleThreadedProcessing()
.andInitialTrackingToken(ms -> new GlobalSequenceTrackingToken(1L));
TrackingEventProcessor.Builder eventProcessorBuilder =
TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(eventBus)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.trackingEventProcessorConfiguration(config);
testSubject = new TrackingEventProcessor(eventProcessorBuilder) {
@Override
protected void doSleepFor(long millisToSleep) {
if (isRunning()) {
sleepInstructions.add(millisToSleep);
}
}
};
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
eventBus.publish(createEvents(4));
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(2, handled.size()));
testSubject.shutDown();
testSubject.resetTokens();
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
assertEquals(handled.subList(0, 1), handled.subList(2, 3));
assertEquals(handled.subList(2, 3), handledInRedelivery);
assertTrue(testSubject.processingStatus().get(0).isReplaying());
eventBus.publish(createEvents(1));
assertWithin(1, TimeUnit.SECONDS, () -> assertFalse(testSubject.processingStatus().get(0).isReplaying()));
}
@Test
public void testResetBeforeStartingPerformsANormalRun() throws Exception {
when(mockHandler.supportsReset()).thenReturn(true);
final List<String> handled = new CopyOnWriteArrayList<>();
final List<String> handledInRedelivery = new CopyOnWriteArrayList<>();
//noinspection Duplicates
doAnswer(i -> {
EventMessage message = i.getArgument(0);
handled.add(message.getIdentifier());
if (ReplayToken.isReplay(message)) {
handledInRedelivery.add(message.getIdentifier());
}
return null;
}).when(mockHandler).handle(any());
testSubject.start();
testSubject.shutDown();
testSubject.resetTokens();
testSubject.start();
eventBus.publish(createEvents(4));
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, handled.size()));
assertEquals(0, handledInRedelivery.size());
assertFalse(testSubject.processingStatus().get(0).isReplaying());
}
@SuppressWarnings("unchecked")
@Test
public void testReplayFlagAvailableWhenReplayInDifferentOrder() throws Exception {
StreamableMessageSource<TrackedEventMessage<?>> stubSource = mock(StreamableMessageSource.class);
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(stubSource)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE)
.build();
when(stubSource.openStream(any())).thenReturn(new StubTrackingEventStream(0, 1, 2, 5))
.thenReturn(new StubTrackingEventStream(0, 1, 2, 3, 4, 5, 6, 7));
when(eventHandlerInvoker.supportsReset()).thenReturn(true);
doReturn(true).when(eventHandlerInvoker).canHandle(any(), any());
List<TrackingToken> firstRun = new CopyOnWriteArrayList<>();
List<TrackingToken> replayRun = new CopyOnWriteArrayList<>();
doAnswer(i -> {
firstRun.add(i.<TrackedEventMessage>getArgument(0).trackingToken());
return null;
}).when(eventHandlerInvoker).handle(any(), any());
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(4, firstRun.size()));
testSubject.shutDown();
doAnswer(i -> {
replayRun.add(i.<TrackedEventMessage>getArgument(0).trackingToken());
return null;
}).when(eventHandlerInvoker).handle(any(), any());
testSubject.resetTokens();
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(8, replayRun.size()));
TestCase.assertEquals(GapAwareTrackingToken.newInstance(5, asList(3L, 4L)), firstRun.get(3));
assertTrue(replayRun.get(0) instanceof ReplayToken);
assertTrue(replayRun.get(5) instanceof ReplayToken);
assertEquals(GapAwareTrackingToken.newInstance(6, emptySortedSet()), replayRun.get(6));
}
@Test(expected = IllegalStateException.class)
public void testResetRejectedWhileRunning() {
testSubject.start();
testSubject.resetTokens();
}
@Test
public void testResetNotSupportedWhenInvokerDoesNotSupportReset() {
when(mockHandler.supportsReset()).thenReturn(false);
assertFalse(testSubject.supportsReset());
}
@Test(expected = IllegalStateException.class)
public void testResetRejectedWhenInvokerDoesNotSupportReset() {
when(mockHandler.supportsReset()).thenReturn(false);
testSubject.resetTokens();
}
@Test
public void testResetRejectedIfNotAllTokensCanBeClaimed() {
tokenStore.initializeTokenSegments("test", 4);
when(tokenStore.fetchToken("test", 3)).thenThrow(new UnableToClaimTokenException("Mock"));
try {
testSubject.resetTokens();
fail("Expected exception");
} catch (UnableToClaimTokenException e) {
// expected
}
verify(tokenStore, never()).storeToken(isNull(), anyString(), anyInt());
}
@Test
public void testWhenFailureDuringInit() throws InterruptedException {
when(tokenStore.fetchSegments(anyString()))
.thenThrow(new RuntimeException("Faking issue during fetchSegments"))
.thenReturn(new int[]{})
.thenReturn(new int[]{0});
doThrow(new RuntimeException("Faking issue during initializeTokenSegments"))
// and on further calls
.doNothing()
.when(tokenStore).initializeTokenSegments(anyString(), anyInt());
testSubject.start();
Thread.sleep(2500);
assertEquals(1, testSubject.activeProcessorThreads());
}
@Test
public void testUpdateActiveSegmentsWhenBatchIsEmpty() throws Exception {
StreamableMessageSource<TrackedEventMessage<?>> stubSource = mock(StreamableMessageSource.class);
testSubject = TrackingEventProcessor.builder()
.name("test")
.eventHandlerInvoker(eventHandlerInvoker)
.messageSource(stubSource)
.tokenStore(tokenStore)
.transactionManager(NoTransactionManager.INSTANCE).build();
when(stubSource.openStream(any())).thenReturn(new StubTrackingEventStream(0, 1, 2, 5));
doReturn(true, false).when(eventHandlerInvoker).canHandle(any(), any());
List<TrackingToken> trackingTokens = new CopyOnWriteArrayList<>();
doAnswer(i -> {
trackingTokens.add(i.<TrackedEventMessage>getArgument(0).trackingToken());
return null;
}).when(eventHandlerInvoker).handle(any(), any());
testSubject.start();
// give it a bit of time to start
Thread.sleep(200);
EventTrackerStatus eventTrackerStatus = testSubject.processingStatus().get(0);
assertTrue(eventTrackerStatus.isCaughtUp());
GapAwareTrackingToken expectedToken = GapAwareTrackingToken.newInstance(5, asList(3L, 4L));
TrackingToken lastToken = eventTrackerStatus.getTrackingToken();
assertTrue(lastToken.covers(expectedToken));
}
@Test
public void testReleaseSegment() {
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.activeProcessorThreads()));
testSubject.releaseSegment(0);
assertWithin(2, TimeUnit.SECONDS, () -> assertEquals(0, testSubject.activeProcessorThreads()));
assertWithin(15, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.activeProcessorThreads()));
}
@Test
public void testHasAvailableSegments() {
assertEquals(1, testSubject.availableProcessorThreads());
testSubject.start();
assertWithin(1, TimeUnit.SECONDS, () -> assertEquals(0, testSubject.availableProcessorThreads()));
testSubject.releaseSegment(0);
assertWithin(2, TimeUnit.SECONDS, () -> assertEquals(1, testSubject.availableProcessorThreads()));
}
private static class StubTrackingEventStream implements TrackingEventStream {
private final Queue<TrackedEventMessage<?>> eventMessages;
public StubTrackingEventStream(long... tokens) {
GapAwareTrackingToken lastToken = GapAwareTrackingToken.newInstance(-1, emptySortedSet());
eventMessages = new LinkedList<>();
for (Long seq : tokens) {
lastToken = lastToken.advanceTo(seq, 1000, true);
eventMessages.add(new GenericTrackedEventMessage<>(lastToken, createEvent(seq)));
}
}
@Override
public Optional<TrackedEventMessage<?>> peek() {
if (eventMessages.isEmpty()) {
return Optional.empty();
}
return Optional.of(eventMessages.peek());
}
@Override
public boolean hasNextAvailable(int timeout, TimeUnit unit) {
return !eventMessages.isEmpty();
}
@Override
public TrackedEventMessage<?> nextAvailable() {
return eventMessages.poll();
}
@Override
public void close() {
}
}
}
|
fixed faulty integration tests
|
integrationtests/src/test/java/org/axonframework/integrationtests/eventhandling/TrackingEventProcessorTest.java
|
fixed faulty integration tests
|
|
Java
|
apache-2.0
|
d40e236e3b694bfa3bd2537ba6f2fe21a5724232
| 0
|
aduprat/james,chibenwa/james,aduprat/james,chibenwa/james,rouazana/james,rouazana/james,chibenwa/james,rouazana/james,aduprat/james,aduprat/james,rouazana/james,chibenwa/james
|
/***********************************************************************
* Copyright (c) 1999-2004 The Apache Software Foundation. *
* All rights reserved. *
* ------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); you *
* may not use this file except in compliance with the License. You *
* may obtain a copy of the License at: *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or *
* implied. See the License for the specific language governing *
* permissions and limitations under the License. *
***********************************************************************/
package org.apache.mailet;
import java.util.Locale;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.ParseException;
/**
* A representation of an email address.
* <p>This class encapsulates functionalities to access to different
* parts of an email address without dealing with its parsing.</p>
*
* <p>A MailAddress is an address specified in the MAIL FROM and
* RCPT TO commands in SMTP sessions. These are either passed by
* an external server to the mailet-compliant SMTP server, or they
* are created programmatically by the mailet-compliant server to
* send to another (external) SMTP server. Mailets and matchers
* use the MailAddress for the purpose of evaluating the sender
* and recipient(s) of a message.</p>
*
* <p>MailAddress parses an email address as defined in RFC 821
* (SMTP) p. 30 and 31 where addresses are defined in BNF convention.
* As the mailet API does not support the aged "SMTP-relayed mail"
* addressing protocol, this leaves all addresses to be a <mailbox>,
* as per the spec. The MailAddress's "user" is the <local-part> of
* the <mailbox> and "host" is the <domain> of the mailbox.</p>
*
* <p>This class is a good way to validate email addresses as there are
* some valid addresses which would fail with a simpler approach
* to parsing address. It also removes parsing burden from
* mailets and matchers that might not realize the flexibility of an
* SMTP address. For instance, "serge@home"@lokitech.com is a valid
* SMTP address (the quoted text serge@home is the user and
* lokitech.com is the host). This means all current parsing to date
* is incorrect as we just find the first @ and use that to separate
* user from host.</p>
*
* <p>This parses an address as per the BNF specification for <mailbox>
* from RFC 821 on page 30 and 31, section 4.1.2. COMMAND SYNTAX.
* http://www.freesoft.org/CIE/RFC/821/15.htm</p>
*
* @version 1.0
*/
public class MailAddress implements java.io.Serializable {
//We hardcode the serialVersionUID so that from James 1.2 on,
// MailAddress will be deserializable (so your mail doesn't get lost)
public static final long serialVersionUID = 2779163542539434916L;
private final static char[] SPECIAL =
{'<', '>', '(', ')', '[', ']', '\\', '.', ',', ';', ':', '@', '\"'};
private String user = null;
private String host = null;
//Used for parsing
private int pos = 0;
/**
* strip source routing, according to RFC-2821 it is an allowed approach to handle mails
* contaning RFC-821 source-route information
*/
private void stripSourceRoute(String address) throws ParseException {
if (pos < address.length()) {
if(address.charAt(pos)=='@') {
int i = address.indexOf(':');
if(i != -1) {
pos = i+1;
}
}
}
}
/**
* <p>Construct a MailAddress parsing the provided <code>String</code> object.</p>
*
* <p>The <code>personal</code> variable is left empty.</p>
*
* @param address the email address compliant to the RFC822 format
* @throws ParseException if the parse failed
*/
public MailAddress(String address) throws ParseException {
address = address.trim();
// Test if mail address has source routing information (RFC-821) and get rid of it!!
//must be called first!! (or at least prior to updating pos)
stripSourceRoute(address);
StringBuffer userSB = new StringBuffer();
StringBuffer hostSB = new StringBuffer();
//Begin parsing
//<mailbox> ::= <local-part> "@" <domain>
try {
//parse local-part
//<local-part> ::= <dot-string> | <quoted-string>
if (address.charAt(pos) == '\"') {
userSB.append(parseQuotedLocalPart(address));
} else {
userSB.append(parseUnquotedLocalPart(address));
}
if (userSB.toString().length() == 0) {
throw new ParseException("No local-part (user account) found at position " + (pos + 1));
}
//find @
if (pos >= address.length() || address.charAt(pos) != '@') {
throw new ParseException("Did not find @ between local-part and domain at position " + (pos + 1));
}
pos++;
//parse domain
//<domain> ::= <element> | <element> "." <domain>
//<element> ::= <name> | "#" <number> | "[" <dotnum> "]"
while (true) {
if (address.charAt(pos) == '#') {
hostSB.append(parseNumber(address));
} else if (address.charAt(pos) == '[') {
hostSB.append(parseDotNum(address));
} else {
hostSB.append(parseDomainName(address));
}
if (pos >= address.length()) {
break;
}
if (address.charAt(pos) == '.') {
hostSB.append('.');
pos++;
continue;
}
break;
}
if (hostSB.toString().length() == 0) {
throw new ParseException("No domain found at position " + (pos + 1));
}
} catch (IndexOutOfBoundsException ioobe) {
throw new ParseException("Out of data at position " + (pos + 1));
}
user = userSB.toString();
host = hostSB.toString();
}
/**
* Construct a MailAddress with the provided personal name and email
* address.
*
* @param user the username or account name on the mail server
* @param host the server that should accept messages for this user
* @throws ParseException if the parse failed
*/
public MailAddress(String newUser, String newHost) throws ParseException {
/* NEEDS TO BE REWORKED TO VALIDATE EACH CHAR */
user = newUser;
host = newHost;
}
/**
* Constructs a MailAddress from a JavaMail InternetAddress, using only the
* email address portion, discarding the personal name.
*/
public MailAddress(InternetAddress address) throws ParseException {
this(address.getAddress());
}
/**
* Return the host part.
*
* @return a <code>String</code> object representing the host part
* of this email address. If the host is of the dotNum form
* (e.g. [yyy.yyy.yyy.yyy]) then strip the braces first.
*/
public String getHost() {
if (!(host.startsWith("[") && host.endsWith("]"))) {
return host;
} else {
return host.substring(1, host.length() -1);
}
}
/**
* Return the user part.
*
* @return a <code>String</code> object representing the user part
* of this email address.
* @throws AddressException if the parse failed
*/
public String getUser() {
return user;
}
public String toString() {
StringBuffer addressBuffer =
new StringBuffer(128)
.append(user)
.append("@")
.append(host);
return addressBuffer.toString();
}
public InternetAddress toInternetAddress() {
try {
return new InternetAddress(toString());
} catch (javax.mail.internet.AddressException ae) {
//impossible really
return null;
}
}
public boolean equals(Object obj) {
if (obj == null) {
return false;
} else if (obj instanceof String) {
String theString = (String)obj;
return toString().equalsIgnoreCase(theString);
} else if (obj instanceof MailAddress) {
MailAddress addr = (MailAddress)obj;
return getUser().equalsIgnoreCase(addr.getUser()) && getHost().equalsIgnoreCase(addr.getHost());
}
return false;
}
/**
* Return a hashCode for this object which should be identical for addresses
* which are equivalent. This is implemented by obtaining the default
* hashcode of the String representation of the MailAddress. Without this
* explicit definition, the default hashCode will create different hashcodes
* for separate object instances.
*
* @return the hashcode.
*/
public int hashCode() {
return toString().toLowerCase(Locale.US).hashCode();
}
private String parseQuotedLocalPart(String address) throws ParseException {
StringBuffer resultSB = new StringBuffer();
resultSB.append('\"');
pos++;
//<quoted-string> ::= """ <qtext> """
//<qtext> ::= "\" <x> | "\" <x> <qtext> | <q> | <q> <qtext>
while (true) {
if (address.charAt(pos) == '\"') {
resultSB.append('\"');
//end of quoted string... move forward
pos++;
break;
}
if (address.charAt(pos) == '\\') {
resultSB.append('\\');
pos++;
//<x> ::= any one of the 128 ASCII characters (no exceptions)
char x = address.charAt(pos);
if (x < 0 || x > 127) {
throw new ParseException("Invalid \\ syntaxed character at position " + (pos + 1));
}
resultSB.append(x);
pos++;
} else {
//<q> ::= any one of the 128 ASCII characters except <CR>,
//<LF>, quote ("), or backslash (\)
char q = address.charAt(pos);
if (q <= 0 || q == '\n' || q == '\r' || q == '\"' || q == '\\') {
throw new ParseException("Unquoted local-part (user account) must be one of the 128 ASCI characters exception <CR>, <LF>, quote (\"), or backslash (\\) at position " + (pos + 1));
}
resultSB.append(q);
pos++;
}
}
return resultSB.toString();
}
private String parseUnquotedLocalPart(String address) throws ParseException {
StringBuffer resultSB = new StringBuffer();
//<dot-string> ::= <string> | <string> "." <dot-string>
boolean lastCharDot = false;
while (true) {
//<string> ::= <char> | <char> <string>
//<char> ::= <c> | "\" <x>
if (address.charAt(pos) == '\\') {
resultSB.append('\\');
pos++;
//<x> ::= any one of the 128 ASCII characters (no exceptions)
char x = address.charAt(pos);
if (x < 0 || x > 127) {
throw new ParseException("Invalid \\ syntaxed character at position " + (pos + 1));
}
resultSB.append(x);
pos++;
lastCharDot = false;
} else if (address.charAt(pos) == '.') {
resultSB.append('.');
pos++;
lastCharDot = true;
} else if (address.charAt(pos) == '@') {
//End of local-part
break;
} else {
//<c> ::= any one of the 128 ASCII characters, but not any
// <special> or <SP>
//<special> ::= "<" | ">" | "(" | ")" | "[" | "]" | "\" | "."
// | "," | ";" | ":" | "@" """ | the control
// characters (ASCII codes 0 through 31 inclusive and
// 127)
//<SP> ::= the space character (ASCII code 32)
char c = address.charAt(pos);
if (c <= 31 || c >= 127 || c == ' ') {
throw new ParseException("Invalid character in local-part (user account) at position " + (pos + 1));
}
for (int i = 0; i < SPECIAL.length; i++) {
if (c == SPECIAL[i]) {
throw new ParseException("Invalid character in local-part (user account) at position " + (pos + 1));
}
}
resultSB.append(c);
pos++;
lastCharDot = false;
}
}
if (lastCharDot) {
throw new ParseException("local-part (user account) ended with a \".\", which is invalid.");
}
return resultSB.toString();
}
private String parseNumber(String address) throws ParseException {
//<number> ::= <d> | <d> <number>
StringBuffer resultSB = new StringBuffer();
//We keep the position from the class level pos field
while (true) {
if (pos >= address.length()) {
break;
}
//<d> ::= any one of the ten digits 0 through 9
char d = address.charAt(pos);
if (d == '.') {
break;
}
if (d < '0' || d > '9') {
throw new ParseException("In domain, did not find a number in # address at position " + (pos + 1));
}
resultSB.append(d);
pos++;
}
return resultSB.toString();
}
private String parseDotNum(String address) throws ParseException {
//throw away all irrelevant '\' they're not necessary for escaping of '.' or digits, and are illegal as part of the domain-literal
while(address.indexOf("\\")>-1){
address= address.substring(0,address.indexOf("\\")) + address.substring(address.indexOf("\\")+1);
}
StringBuffer resultSB = new StringBuffer();
//we were passed the string with pos pointing the the [ char.
// take the first char ([), put it in the result buffer and increment pos
resultSB.append(address.charAt(pos));
pos++;
//<dotnum> ::= <snum> "." <snum> "." <snum> "." <snum>
for (int octet = 0; octet < 4; octet++) {
//<snum> ::= one, two, or three digits representing a decimal
// integer value in the range 0 through 255
//<d> ::= any one of the ten digits 0 through 9
StringBuffer snumSB = new StringBuffer();
for (int digits = 0; digits < 3; digits++) {
char d = address.charAt(pos);
if (d == '.') {
break;
}
if (d == ']') {
break;
}
if (d < '0' || d > '9') {
throw new ParseException("Invalid number at position " + (pos + 1));
}
snumSB.append(d);
pos++;
}
if (snumSB.toString().length() == 0) {
throw new ParseException("Number not found at position " + (pos + 1));
}
try {
int snum = Integer.parseInt(snumSB.toString());
if (snum > 255) {
throw new ParseException("Invalid number at position " + (pos + 1));
}
} catch (NumberFormatException nfe) {
throw new ParseException("Invalid number at position " + (pos + 1));
}
resultSB.append(snumSB.toString());
if (address.charAt(pos) == ']') {
if (octet < 3) {
throw new ParseException("End of number reached too quickly at " + (pos + 1));
} else {
break;
}
}
if (address.charAt(pos) == '.') {
resultSB.append('.');
pos++;
}
}
if (address.charAt(pos) != ']') {
throw new ParseException("Did not find closing bracket \"]\" in domain at position " + (pos + 1));
}
resultSB.append(']');
pos++;
return resultSB.toString();
}
private String parseDomainName(String address) throws ParseException {
StringBuffer resultSB = new StringBuffer();
//<name> ::= <a> <ldh-str> <let-dig>
//<ldh-str> ::= <let-dig-hyp> | <let-dig-hyp> <ldh-str>
//<let-dig> ::= <a> | <d>
//<let-dig-hyp> ::= <a> | <d> | "-"
//<a> ::= any one of the 52 alphabetic characters A through Z
// in upper case and a through z in lower case
//<d> ::= any one of the ten digits 0 through 9
// basically, this is a series of letters, digits, and hyphens,
// but it can't start with a digit or hypthen
// and can't end with a hyphen
// in practice though, we should relax this as domain names can start
// with digits as well as letters. So only check that doesn't start
// or end with hyphen.
while (true) {
if (pos >= address.length()) {
break;
}
char ch = address.charAt(pos);
if ((ch >= '0' && ch <= '9') ||
(ch >= 'a' && ch <= 'z') ||
(ch >= 'A' && ch <= 'Z') ||
(ch == '-')) {
resultSB.append(ch);
pos++;
continue;
}
if (ch == '.') {
break;
}
throw new ParseException("Invalid character at " + pos);
}
String result = resultSB.toString();
if (result.startsWith("-") || result.endsWith("-")) {
throw new ParseException("Domain name cannot begin or end with a hyphen \"-\" at position " + (pos + 1));
}
return result;
}
}
|
src/java/org/apache/mailet/MailAddress.java
|
/***********************************************************************
* Copyright (c) 1999-2004 The Apache Software Foundation. *
* All rights reserved. *
* ------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); you *
* may not use this file except in compliance with the License. You *
* may obtain a copy of the License at: *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or *
* implied. See the License for the specific language governing *
* permissions and limitations under the License. *
***********************************************************************/
package org.apache.mailet;
import java.util.Locale;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.ParseException;
/**
* A representation of an email address.
* <p>This class encapsulates functionalities to access to different
* parts of an email address without dealing with its parsing.</p>
*
* <p>A MailAddress is an address specified in the MAIL FROM and
* RCPT TO commands in SMTP sessions. These are either passed by
* an external server to the mailet-compliant SMTP server, or they
* are created programmatically by the mailet-compliant server to
* send to another (external) SMTP server. Mailets and matchers
* use the MailAddress for the purpose of evaluating the sender
* and recipient(s) of a message.</p>
*
* <p>MailAddress parses an email address as defined in RFC 821
* (SMTP) p. 30 and 31 where addresses are defined in BNF convention.
* As the mailet API does not support the aged "SMTP-relayed mail"
* addressing protocol, this leaves all addresses to be a <mailbox>,
* as per the spec. The MailAddress's "user" is the <local-part> of
* the <mailbox> and "host" is the <domain> of the mailbox.</p>
*
* <p>This class is a good way to validate email addresses as there are
* some valid addresses which would fail with a simpler approach
* to parsing address. It also removes parsing burden from
* mailets and matchers that might not realize the flexibility of an
* SMTP address. For instance, "serge@home"@lokitech.com is a valid
* SMTP address (the quoted text serge@home is the user and
* lokitech.com is the host). This means all current parsing to date
* is incorrect as we just find the first @ and use that to separate
* user from host.</p>
*
* <p>This parses an address as per the BNF specification for <mailbox>
* from RFC 821 on page 30 and 31, section 4.1.2. COMMAND SYNTAX.
* http://www.freesoft.org/CIE/RFC/821/15.htm</p>
*
* @version 1.0
*/
public class MailAddress implements java.io.Serializable {
//We hardcode the serialVersionUID so that from James 1.2 on,
// MailAddress will be deserializable (so your mail doesn't get lost)
public static final long serialVersionUID = 2779163542539434916L;
private final static char[] SPECIAL =
{'<', '>', '(', ')', '[', ']', '\\', '.', ',', ';', ':', '@', '\"'};
private String user = null;
private String host = null;
//Used for parsing
private int pos = 0;
/**
* <p>Construct a MailAddress parsing the provided <code>String</code> object.</p>
*
* <p>The <code>personal</code> variable is left empty.</p>
*
* @param address the email address compliant to the RFC822 format
* @throws ParseException if the parse failed
*/
public MailAddress(String address) throws ParseException {
address = address.trim();
StringBuffer userSB = new StringBuffer();
StringBuffer hostSB = new StringBuffer();
//Begin parsing
//<mailbox> ::= <local-part> "@" <domain>
try {
//parse local-part
//<local-part> ::= <dot-string> | <quoted-string>
if (address.charAt(pos) == '\"') {
userSB.append(parseQuotedLocalPart(address));
} else {
userSB.append(parseUnquotedLocalPart(address));
}
if (userSB.toString().length() == 0) {
throw new ParseException("No local-part (user account) found at position " + (pos + 1));
}
//find @
if (pos >= address.length() || address.charAt(pos) != '@') {
throw new ParseException("Did not find @ between local-part and domain at position " + (pos + 1));
}
pos++;
//parse domain
//<domain> ::= <element> | <element> "." <domain>
//<element> ::= <name> | "#" <number> | "[" <dotnum> "]"
while (true) {
if (address.charAt(pos) == '#') {
hostSB.append(parseNumber(address));
} else if (address.charAt(pos) == '[') {
hostSB.append(parseDotNum(address));
} else {
hostSB.append(parseDomainName(address));
}
if (pos >= address.length()) {
break;
}
if (address.charAt(pos) == '.') {
hostSB.append('.');
pos++;
continue;
}
break;
}
if (hostSB.toString().length() == 0) {
throw new ParseException("No domain found at position " + (pos + 1));
}
} catch (IndexOutOfBoundsException ioobe) {
throw new ParseException("Out of data at position " + (pos + 1));
}
user = userSB.toString();
host = hostSB.toString();
}
/**
* Construct a MailAddress with the provided personal name and email
* address.
*
* @param user the username or account name on the mail server
* @param host the server that should accept messages for this user
* @throws ParseException if the parse failed
*/
public MailAddress(String newUser, String newHost) throws ParseException {
/* NEEDS TO BE REWORKED TO VALIDATE EACH CHAR */
user = newUser;
host = newHost;
}
/**
* Constructs a MailAddress from a JavaMail InternetAddress, using only the
* email address portion, discarding the personal name.
*/
public MailAddress(InternetAddress address) throws ParseException {
this(address.getAddress());
}
/**
* Return the host part.
*
* @return a <code>String</code> object representing the host part
* of this email address. If the host is of the dotNum form
* (e.g. [yyy.yyy.yyy.yyy]) then strip the braces first.
*/
public String getHost() {
if (!(host.startsWith("[") && host.endsWith("]"))) {
return host;
} else {
return host.substring(1, host.length() -1);
}
}
/**
* Return the user part.
*
* @return a <code>String</code> object representing the user part
* of this email address.
* @throws AddressException if the parse failed
*/
public String getUser() {
return user;
}
public String toString() {
StringBuffer addressBuffer =
new StringBuffer(128)
.append(user)
.append("@")
.append(host);
return addressBuffer.toString();
}
public InternetAddress toInternetAddress() {
try {
return new InternetAddress(toString());
} catch (javax.mail.internet.AddressException ae) {
//impossible really
return null;
}
}
public boolean equals(Object obj) {
if (obj == null) {
return false;
} else if (obj instanceof String) {
String theString = (String)obj;
return toString().equalsIgnoreCase(theString);
} else if (obj instanceof MailAddress) {
MailAddress addr = (MailAddress)obj;
return getUser().equalsIgnoreCase(addr.getUser()) && getHost().equalsIgnoreCase(addr.getHost());
}
return false;
}
/**
* Return a hashCode for this object which should be identical for addresses
* which are equivalent. This is implemented by obtaining the default
* hashcode of the String representation of the MailAddress. Without this
* explicit definition, the default hashCode will create different hashcodes
* for separate object instances.
*
* @return the hashcode.
*/
public int hashCode() {
return toString().toLowerCase(Locale.US).hashCode();
}
private String parseQuotedLocalPart(String address) throws ParseException {
StringBuffer resultSB = new StringBuffer();
resultSB.append('\"');
pos++;
//<quoted-string> ::= """ <qtext> """
//<qtext> ::= "\" <x> | "\" <x> <qtext> | <q> | <q> <qtext>
while (true) {
if (address.charAt(pos) == '\"') {
resultSB.append('\"');
//end of quoted string... move forward
pos++;
break;
}
if (address.charAt(pos) == '\\') {
resultSB.append('\\');
pos++;
//<x> ::= any one of the 128 ASCII characters (no exceptions)
char x = address.charAt(pos);
if (x < 0 || x > 127) {
throw new ParseException("Invalid \\ syntaxed character at position " + (pos + 1));
}
resultSB.append(x);
pos++;
} else {
//<q> ::= any one of the 128 ASCII characters except <CR>,
//<LF>, quote ("), or backslash (\)
char q = address.charAt(pos);
if (q <= 0 || q == '\n' || q == '\r' || q == '\"' || q == '\\') {
throw new ParseException("Unquoted local-part (user account) must be one of the 128 ASCI characters exception <CR>, <LF>, quote (\"), or backslash (\\) at position " + (pos + 1));
}
resultSB.append(q);
pos++;
}
}
return resultSB.toString();
}
private String parseUnquotedLocalPart(String address) throws ParseException {
StringBuffer resultSB = new StringBuffer();
//<dot-string> ::= <string> | <string> "." <dot-string>
boolean lastCharDot = false;
while (true) {
//<string> ::= <char> | <char> <string>
//<char> ::= <c> | "\" <x>
if (address.charAt(pos) == '\\') {
resultSB.append('\\');
pos++;
//<x> ::= any one of the 128 ASCII characters (no exceptions)
char x = address.charAt(pos);
if (x < 0 || x > 127) {
throw new ParseException("Invalid \\ syntaxed character at position " + (pos + 1));
}
resultSB.append(x);
pos++;
lastCharDot = false;
} else if (address.charAt(pos) == '.') {
resultSB.append('.');
pos++;
lastCharDot = true;
} else if (address.charAt(pos) == '@') {
//End of local-part
break;
} else {
//<c> ::= any one of the 128 ASCII characters, but not any
// <special> or <SP>
//<special> ::= "<" | ">" | "(" | ")" | "[" | "]" | "\" | "."
// | "," | ";" | ":" | "@" """ | the control
// characters (ASCII codes 0 through 31 inclusive and
// 127)
//<SP> ::= the space character (ASCII code 32)
char c = address.charAt(pos);
if (c <= 31 || c >= 127 || c == ' ') {
throw new ParseException("Invalid character in local-part (user account) at position " + (pos + 1));
}
for (int i = 0; i < SPECIAL.length; i++) {
if (c == SPECIAL[i]) {
throw new ParseException("Invalid character in local-part (user account) at position " + (pos + 1));
}
}
resultSB.append(c);
pos++;
lastCharDot = false;
}
}
if (lastCharDot) {
throw new ParseException("local-part (user account) ended with a \".\", which is invalid.");
}
return resultSB.toString();
}
private String parseNumber(String address) throws ParseException {
//<number> ::= <d> | <d> <number>
StringBuffer resultSB = new StringBuffer();
//We keep the position from the class level pos field
while (true) {
if (pos >= address.length()) {
break;
}
//<d> ::= any one of the ten digits 0 through 9
char d = address.charAt(pos);
if (d == '.') {
break;
}
if (d < '0' || d > '9') {
throw new ParseException("In domain, did not find a number in # address at position " + (pos + 1));
}
resultSB.append(d);
pos++;
}
return resultSB.toString();
}
private String parseDotNum(String address) throws ParseException {
//throw away all irrelevant '\' they're not necessary for escaping of '.' or digits, and are illegal as part of the domain-literal
while(address.indexOf("\\")>-1){
address= address.substring(0,address.indexOf("\\")) + address.substring(address.indexOf("\\")+1);
}
StringBuffer resultSB = new StringBuffer();
//we were passed the string with pos pointing the the [ char.
// take the first char ([), put it in the result buffer and increment pos
resultSB.append(address.charAt(pos));
pos++;
//<dotnum> ::= <snum> "." <snum> "." <snum> "." <snum>
for (int octet = 0; octet < 4; octet++) {
//<snum> ::= one, two, or three digits representing a decimal
// integer value in the range 0 through 255
//<d> ::= any one of the ten digits 0 through 9
StringBuffer snumSB = new StringBuffer();
for (int digits = 0; digits < 3; digits++) {
char d = address.charAt(pos);
if (d == '.') {
break;
}
if (d == ']') {
break;
}
if (d < '0' || d > '9') {
throw new ParseException("Invalid number at position " + (pos + 1));
}
snumSB.append(d);
pos++;
}
if (snumSB.toString().length() == 0) {
throw new ParseException("Number not found at position " + (pos + 1));
}
try {
int snum = Integer.parseInt(snumSB.toString());
if (snum > 255) {
throw new ParseException("Invalid number at position " + (pos + 1));
}
} catch (NumberFormatException nfe) {
throw new ParseException("Invalid number at position " + (pos + 1));
}
resultSB.append(snumSB.toString());
if (address.charAt(pos) == ']') {
if (octet < 3) {
throw new ParseException("End of number reached too quickly at " + (pos + 1));
} else {
break;
}
}
if (address.charAt(pos) == '.') {
resultSB.append('.');
pos++;
}
}
if (address.charAt(pos) != ']') {
throw new ParseException("Did not find closing bracket \"]\" in domain at position " + (pos + 1));
}
resultSB.append(']');
pos++;
return resultSB.toString();
}
private String parseDomainName(String address) throws ParseException {
StringBuffer resultSB = new StringBuffer();
//<name> ::= <a> <ldh-str> <let-dig>
//<ldh-str> ::= <let-dig-hyp> | <let-dig-hyp> <ldh-str>
//<let-dig> ::= <a> | <d>
//<let-dig-hyp> ::= <a> | <d> | "-"
//<a> ::= any one of the 52 alphabetic characters A through Z
// in upper case and a through z in lower case
//<d> ::= any one of the ten digits 0 through 9
// basically, this is a series of letters, digits, and hyphens,
// but it can't start with a digit or hypthen
// and can't end with a hyphen
// in practice though, we should relax this as domain names can start
// with digits as well as letters. So only check that doesn't start
// or end with hyphen.
while (true) {
if (pos >= address.length()) {
break;
}
char ch = address.charAt(pos);
if ((ch >= '0' && ch <= '9') ||
(ch >= 'a' && ch <= 'z') ||
(ch >= 'A' && ch <= 'Z') ||
(ch == '-')) {
resultSB.append(ch);
pos++;
continue;
}
if (ch == '.') {
break;
}
throw new ParseException("Invalid character at " + pos);
}
String result = resultSB.toString();
if (result.startsWith("-") || result.endsWith("-")) {
throw new ParseException("Domain name cannot begin or end with a hyphen \"-\" at position " + (pos + 1));
}
return result;
}
}
|
Strip RFC-821 source routing information. JAMES-296
git-svn-id: de9d04cf23151003780adc3e4ddb7078e3680318@239384 13f79535-47bb-0310-9956-ffa450edef68
|
src/java/org/apache/mailet/MailAddress.java
|
Strip RFC-821 source routing information. JAMES-296
|
|
Java
|
apache-2.0
|
c244aa657387009fb2cff27ad6aec90b569d9d9d
| 0
|
DMHP/carbon-identity,ChamaraPhilipsuom/carbon-identity,thilina27/carbon-identity,thusithathilina/carbon-identity,mefarazath/carbon-identity,nuwandi-is/carbon-identity,malithie/carbon-identity,Niranjan-K/carbon-identity,GayanM/carbon-identity,virajsenevirathne/carbon-identity,thanujalk/carbon-identity,ChamaraPhilipsuom/carbon-identity,thariyarox/carbon-identity,Niranjan-K/carbon-identity,darshanasbg/carbon-identity,madurangasiriwardena/carbon-identity,GayanM/carbon-identity,keerthu/carbon-identity,kesavany/carbon-identity,IsuraD/carbon-identity,thusithathilina/carbon-identity,virajsenevirathne/carbon-identity,DMHP/carbon-identity,wso2/carbon-identity,ChamaraPhilipsuom/carbon-identity,darshanasbg/carbon-identity,virajsenevirathne/carbon-identity,IsuraD/carbon-identity,thariyarox/carbon-identity,0xkasun/carbon-identity,thilina27/carbon-identity,IndunilRathnayake/carbon-identity,DMHP/carbon-identity,IndunilRathnayake/carbon-identity,malithie/carbon-identity,0xkasun/carbon-identity,pulasthi7/carbon-identity,pulasthi7/carbon-identity,IndunilRathnayake/carbon-identity,keerthu/carbon-identity,madurangasiriwardena/carbon-identity,nuwandi-is/carbon-identity,nuwand/carbon-identity,thanujalk/carbon-identity,keerthu/carbon-identity,madurangasiriwardena/carbon-identity,mefarazath/carbon-identity,nuwand/carbon-identity,thariyarox/carbon-identity,GayanM/carbon-identity,thilina27/carbon-identity,thanujalk/carbon-identity,pulasthi7/carbon-identity,malithie/carbon-identity,0xkasun/carbon-identity,mefarazath/carbon-identity,nuwand/carbon-identity,Niranjan-K/carbon-identity,darshanasbg/carbon-identity,wso2/carbon-identity,nuwandi-is/carbon-identity,kesavany/carbon-identity,thusithathilina/carbon-identity,wso2/carbon-identity,IsuraD/carbon-identity,kesavany/carbon-identity
|
/*
* Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.core.AbstractAdmin;
import org.wso2.carbon.identity.application.common.model.User;
import org.wso2.carbon.identity.core.model.OAuthAppDO;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.oauth.cache.AppInfoCache;
import org.wso2.carbon.identity.oauth.cache.OAuthCache;
import org.wso2.carbon.identity.oauth.cache.OAuthCacheKey;
import org.wso2.carbon.identity.oauth.common.OAuth2ErrorCodes;
import org.wso2.carbon.identity.oauth.common.OAuthConstants;
import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException;
import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration;
import org.wso2.carbon.identity.oauth.dao.OAuthAppDAO;
import org.wso2.carbon.identity.oauth.dto.OAuthConsumerAppDTO;
import org.wso2.carbon.identity.oauth.dto.OAuthRevocationRequestDTO;
import org.wso2.carbon.identity.oauth.dto.OAuthRevocationResponseDTO;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception;
import org.wso2.carbon.identity.oauth2.dao.TokenMgtDAO;
import org.wso2.carbon.identity.oauth2.model.AccessTokenDO;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class OAuthAdminService extends AbstractAdmin {
public static final String IMPLICIT = "implicit";
public static final String AUTHORIZATION_CODE = "authorization_code";
private static List<String> allowedGrants = null;
protected Log log = LogFactory.getLog(OAuthAdminService.class);
private AppInfoCache appInfoCache = AppInfoCache.getInstance(OAuthServerConfiguration.getInstance().
getAppInfoCacheTimeout());
/**
* Registers an consumer secret against the logged in user. A given user can only have a single
* consumer secret at a time. Calling this method again and again will update the existing
* consumer secret key.
*
* @return An array containing the consumer key and the consumer secret correspondingly.
* @throws Exception Error when persisting the data in the persistence store.
*/
public String[] registerOAuthConsumer() throws IdentityOAuthAdminException {
String loggedInUser = CarbonContext.getThreadLocalCarbonContext().getUsername();
if (log.isDebugEnabled()) {
log.debug("Adding a consumer secret for the logged in user " + loggedInUser);
}
String tenantUser = MultitenantUtils.getTenantAwareUsername(loggedInUser);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
String userDomain = UserCoreUtil.extractDomainFromName(loggedInUser);
OAuthAppDAO dao = new OAuthAppDAO();
return dao.addOAuthConsumer(UserCoreUtil.removeDomainFromName(tenantUser), tenantId, userDomain);
}
/**
* Get all registered OAuth applications for the logged in user.
*
* @return An array of <code>OAuthConsumerAppDTO</code> objecting containing the application
* information of the user
* @throws Exception Error when reading the data from the persistence store.
*/
public OAuthConsumerAppDTO[] getAllOAuthApplicationData() throws IdentityOAuthAdminException {
String userName = CarbonContext.getThreadLocalCarbonContext().getUsername();
OAuthConsumerAppDTO[] dtos = new OAuthConsumerAppDTO[0];
if (userName == null) {
if (log.isErrorEnabled()) {
log.debug("User not logged in");
}
throw new IdentityOAuthAdminException("User not logged in");
}
String tenantUser = MultitenantUtils.getTenantAwareUsername(userName);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
OAuthAppDAO dao = new OAuthAppDAO();
OAuthAppDO[] apps = dao.getOAuthConsumerAppsOfUser(tenantUser, tenantId);
if (apps != null && apps.length > 0) {
dtos = new OAuthConsumerAppDTO[apps.length];
OAuthConsumerAppDTO dto = null;
OAuthAppDO app = null;
for (int i = 0; i < apps.length; i++) {
app = apps[i];
dto = new OAuthConsumerAppDTO();
dto.setApplicationName(app.getApplicationName());
dto.setCallbackUrl(app.getCallbackUrl());
dto.setOauthConsumerKey(app.getOauthConsumerKey());
dto.setOauthConsumerSecret(app.getOauthConsumerSecret());
dto.setOAuthVersion(app.getOauthVersion());
dto.setGrantTypes(app.getGrantTypes());
dto.setUsername(app.getUserName());
dtos[i] = dto;
}
}
return dtos;
}
/**
* Get OAuth application data by the consumer key.
*
* @param consumerKey Consumer Key
* @return <code>OAuthConsumerAppDTO</code> with application information
* @throws Exception Error when reading application information from persistence store.
*/
public OAuthConsumerAppDTO getOAuthApplicationData(String consumerKey) throws IdentityOAuthAdminException {
OAuthConsumerAppDTO dto = new OAuthConsumerAppDTO();
OAuthAppDAO dao = new OAuthAppDAO();
try {
OAuthAppDO app = dao.getAppInformation(consumerKey);
if (app != null) {
dto.setApplicationName(app.getApplicationName());
dto.setCallbackUrl(app.getCallbackUrl());
dto.setOauthConsumerKey(app.getOauthConsumerKey());
dto.setOauthConsumerSecret(app.getOauthConsumerSecret());
dto.setOAuthVersion(app.getOauthVersion());
dto.setGrantTypes(app.getGrantTypes());
}
return dto;
} catch (InvalidOAuthClientException | IdentityOAuth2Exception e) {
throw new IdentityOAuthAdminException("Error while retrieving the app information using consumer key", e);
}
}
/**
* Get OAuth application data by the application name.
*
* @param appName OAuth application name
* @return <code>OAuthConsumerAppDTO</code> with application information
* @throws Exception Error when reading application information from persistence store.
*/
public OAuthConsumerAppDTO getOAuthApplicationDataByAppName(String appName) throws IdentityOAuthAdminException {
OAuthConsumerAppDTO dto = new OAuthConsumerAppDTO();
OAuthAppDAO dao = new OAuthAppDAO();
try {
OAuthAppDO app = dao.getAppInformationByAppName(appName);
if (app != null) {
dto.setApplicationName(app.getApplicationName());
dto.setCallbackUrl(app.getCallbackUrl());
dto.setOauthConsumerKey(app.getOauthConsumerKey());
dto.setOauthConsumerSecret(app.getOauthConsumerSecret());
dto.setOAuthVersion(app.getOauthVersion());
dto.setGrantTypes(app.getGrantTypes());
}
return dto;
}catch (InvalidOAuthClientException | IdentityOAuth2Exception e){
throw new IdentityOAuthAdminException("Error while retrieving the app information by app name", e);
}
}
/**
* Registers an OAuth consumer application.
*
* @param application <code>OAuthConsumerAppDTO</code> with application information
* @throws Exception Error when persisting the application information to the persistence store
*/
public void registerOAuthApplicationData(OAuthConsumerAppDTO application) throws IdentityOAuthAdminException{
String userName = CarbonContext.getThreadLocalCarbonContext().getUsername();
if (userName != null) {
String tenantUser = MultitenantUtils.getTenantAwareUsername(userName);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
OAuthAppDAO dao = new OAuthAppDAO();
OAuthAppDO app = new OAuthAppDO();
if (application != null) {
app.setApplicationName(application.getApplicationName());
if ((application.getGrantTypes().contains(AUTHORIZATION_CODE) || application.getGrantTypes()
.contains(IMPLICIT)) && StringUtils.isEmpty(application.getCallbackUrl())) {
throw new IdentityOAuthAdminException("Callback Url is required for Code or Implicit grant types");
}
app.setCallbackUrl(application.getCallbackUrl());
if (application.getOauthConsumerKey() == null) {
app.setOauthConsumerKey(OAuthUtil.getRandomNumber());
app.setOauthConsumerSecret(OAuthUtil.getRandomNumber());
} else {
app.setOauthConsumerKey(application.getOauthConsumerKey());
app.setOauthConsumerSecret(application.getOauthConsumerSecret());
}
String applicationUser = application.getUsername();
if (applicationUser != null && applicationUser.trim().length() > 0) {
try {
if (CarbonContext.getThreadLocalCarbonContext().getUserRealm().
getUserStoreManager().isExistingUser(application.getUsername())) {
tenantUser = applicationUser;
} else {
log.warn("OAuth application registrant user name " + applicationUser +
" does not exist in the user store. Using logged-in user name " + tenantUser +
" as registrant name");
}
} catch (UserStoreException e) {
throw new IdentityOAuthAdminException("Error while retrieving the user store manager", e);
}
}
app.setUserName(UserCoreUtil.removeDomainFromName(tenantUser));
app.setTenantId(tenantId);
app.setUserDomain(UserCoreUtil.extractDomainFromName(userName));
if (application.getOAuthVersion() != null) {
app.setOauthVersion(application.getOAuthVersion());
} else { // by default, assume OAuth 2.0, if it is not set.
app.setOauthVersion(OAuthConstants.OAuthVersions.VERSION_2);
}
if (OAuthConstants.OAuthVersions.VERSION_2.equals(application.getOAuthVersion())) {
List<String> allowedGrants = new ArrayList<>(Arrays.asList(getAllowedGrantTypes()));
String[] requestGrants = application.getGrantTypes().split("\\s");
for (String requestedGrant : requestGrants) {
if ("".equals(requestedGrant.trim())) {
continue;
}
if (!allowedGrants.contains(requestedGrant)) {
throw new IdentityOAuthAdminException(requestedGrant + " not allowed");
}
}
app.setGrantTypes(application.getGrantTypes());
}
dao.addOAuthApplication(app);
if (OAuthServerConfiguration.getInstance().isCacheEnabled()) {
appInfoCache.addToCache(app.getOauthConsumerKey(), app);
}
}
}
}
/**
* Update existing consumer application.
*
* @param consumerAppDTO <code>OAuthConsumerAppDTO</code> with updated application information
* @throws IdentityOAuthAdminException Error when updating the underlying identity persistence store.
*/
public void updateConsumerApplication(OAuthConsumerAppDTO consumerAppDTO) throws IdentityOAuthAdminException {
String userName = CarbonContext.getThreadLocalCarbonContext().getUsername();
String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userName);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
OAuthAppDAO dao = new OAuthAppDAO();
OAuthAppDO oauthappdo = new OAuthAppDO();
oauthappdo.setUserName(tenantAwareUsername);
oauthappdo.setTenantId(tenantId);
oauthappdo.setUserDomain(UserCoreUtil.extractDomainFromName(userName));
oauthappdo.setOauthConsumerKey(consumerAppDTO.getOauthConsumerKey());
oauthappdo.setOauthConsumerSecret(consumerAppDTO.getOauthConsumerSecret());
oauthappdo.setCallbackUrl(consumerAppDTO.getCallbackUrl());
oauthappdo.setApplicationName(consumerAppDTO.getApplicationName());
if (OAuthConstants.OAuthVersions.VERSION_2.equals(consumerAppDTO.getOAuthVersion())) {
List<String> allowedGrants = new ArrayList<>(Arrays.asList(getAllowedGrantTypes()));
String[] requestGrants = consumerAppDTO.getGrantTypes().split("\\s");
for (String requestedGrant : requestGrants) {
if ("".equals(requestedGrant.trim())) {
continue;
}
if (!allowedGrants.contains(requestedGrant)) {
throw new IdentityOAuthAdminException(requestedGrant + " not allowed");
}
}
oauthappdo.setGrantTypes(consumerAppDTO.getGrantTypes());
}
dao.updateConsumerApplication(oauthappdo);
if (OAuthServerConfiguration.getInstance().isCacheEnabled()) {
appInfoCache.addToCache(oauthappdo.getOauthConsumerKey(), oauthappdo);
}
}
/**
* Removes an OAuth consumer application.
*
* @param consumerKey Consumer Key
* @throws Exception Error when removing the consumer information from the database.
*/
public void removeOAuthApplicationData(String consumerKey) throws IdentityOAuthAdminException {
OAuthAppDAO dao = new OAuthAppDAO();
dao.removeConsumerApplication(consumerKey);
// remove client credentials from cache
if (OAuthServerConfiguration.getInstance().isCacheEnabled()) {
OAuthCache.getInstance(0).clearCacheEntry(new OAuthCacheKey(consumerKey));
appInfoCache.clearCacheEntry(consumerKey);
if (log.isDebugEnabled()) {
log.debug("Client credentials are removed from the cache.");
}
}
}
/**
* Get apps that are authorized by the given user
*
* @return OAuth applications authorized by the user that have tokens in ACTIVE or EXPIRED state
*/
public OAuthConsumerAppDTO[] getAppsAuthorizedByUser() throws IdentityOAuthAdminException {
TokenMgtDAO tokenMgtDAO = new TokenMgtDAO();
OAuthAppDAO appDAO = new OAuthAppDAO();
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain();
String tenantAwareUserName = PrivilegedCarbonContext.getThreadLocalCarbonContext().getUsername();
String username = tenantAwareUserName + "@" + tenantDomain;
String userStoreDomain = null;
if (OAuth2Util.checkAccessTokenPartitioningEnabled() && OAuth2Util.checkUserNameAssertionEnabled()) {
try {
userStoreDomain = OAuth2Util.getUserStoreDomainFromUserId(username);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while getting user store domain for User ID : " + username;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
Set<String> clientIds = null;
try {
clientIds = tokenMgtDAO.getAllTimeAuthorizedClientIds(username);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving apps authorized by User ID : " + username;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
Set<OAuthConsumerAppDTO> appDTOs = new HashSet<OAuthConsumerAppDTO>();
for (String clientId : clientIds) {
Set<AccessTokenDO> accessTokenDOs = null;
try {
accessTokenDOs = tokenMgtDAO.retrieveAccessTokens(clientId, username, userStoreDomain, true);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving access tokens issued for " +
"Client ID : " + clientId + ", User ID : " + username;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
if (!accessTokenDOs.isEmpty()) {
Set<String> distinctClientUserScopeCombo = new HashSet<String>();
for (AccessTokenDO accessTokenDO : accessTokenDOs) {
AccessTokenDO scopedToken = null;
String scopeString = OAuth2Util.buildScopeString(accessTokenDO.getScope());
try {
scopedToken = tokenMgtDAO.retrieveLatestAccessToken(
clientId, username, userStoreDomain, scopeString, true);
if(scopedToken != null && !distinctClientUserScopeCombo.contains(clientId+":"+username)){
OAuthConsumerAppDTO appDTO = new OAuthConsumerAppDTO();
OAuthAppDO appDO;
try {
appDO = appDAO.getAppInformation(scopedToken.getConsumerKey());
appDTO.setOauthConsumerKey(scopedToken.getConsumerKey());
appDTO.setApplicationName(appDO.getApplicationName());
appDTO.setUsername(UserCoreUtil.addTenantDomainToEntry(appDO.getUserName(),
IdentityTenantUtil.getTenantDomain(appDO.getTenantId())));
appDTO.setGrantTypes(appDO.getGrantTypes());
appDTOs.add(appDTO);
} catch (InvalidOAuthClientException e) {
String errorMsg = "Invalid Client ID : " + scopedToken.getConsumerKey();
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving app information " +
"for Client ID : " + scopedToken.getConsumerKey();
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
distinctClientUserScopeCombo.add(clientId+":"+username);
}
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving latest access token issued for Client ID :" +
" " + clientId + ", User ID : " + username + " and Scope : " + scopeString;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
}
}
return appDTOs.toArray(new OAuthConsumerAppDTO[appDTOs.size()]);
}
/**
* Revoke authorization for OAuth apps by resource owners
*
* @param revokeRequestDTO DTO representing authorized user and apps[]
* @return revokeRespDTO DTO representing success or failure message
*/
public OAuthRevocationResponseDTO revokeAuthzForAppsByResoureOwner(
OAuthRevocationRequestDTO revokeRequestDTO) throws IdentityOAuthAdminException {
TokenMgtDAO tokenMgtDAO = new TokenMgtDAO();
if (revokeRequestDTO.getApps() != null && revokeRequestDTO.getApps().length > 0) {
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain();
String tenantAwareUserName = PrivilegedCarbonContext.getThreadLocalCarbonContext().getUsername();
String userName = tenantAwareUserName + "@" + tenantDomain;
String userStoreDomain = null;
if (OAuth2Util.checkAccessTokenPartitioningEnabled() &&
OAuth2Util.checkUserNameAssertionEnabled()) {
try {
userStoreDomain = OAuth2Util.getUserStoreDomainFromUserId(userName);
} catch (IdentityOAuth2Exception e) {
throw new IdentityOAuthAdminException(
"Error occurred while getting user store domain from User ID : " + userName, e);
}
}
OAuthConsumerAppDTO[] appDTOs = getAppsAuthorizedByUser();
for (String appName : revokeRequestDTO.getApps()) {
for (OAuthConsumerAppDTO appDTO : appDTOs) {
if (appDTO.getApplicationName().equals(appName)) {
Set<AccessTokenDO> accessTokenDOs = null;
try {
// retrieve all ACTIVE or EXPIRED access tokens for particular client authorized by this user
accessTokenDOs = tokenMgtDAO.retrieveAccessTokens(
appDTO.getOauthConsumerKey(), userName, userStoreDomain, true);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving access tokens issued for " +
"Client ID : " + appDTO.getOauthConsumerKey() + ", User ID : " + userName;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
User authzUser;
for (AccessTokenDO accessTokenDO : accessTokenDOs) {
//Clear cache with AccessTokenDO
authzUser = accessTokenDO.getAuthzUser();
OAuthUtil.clearOAuthCache(accessTokenDO.getConsumerKey(), authzUser,
OAuth2Util.buildScopeString(accessTokenDO.getScope()));
OAuthUtil.clearOAuthCache(accessTokenDO.getConsumerKey(), authzUser);
OAuthUtil.clearOAuthCache(accessTokenDO.getAccessToken());
AccessTokenDO scopedToken = null;
try {
// retrieve latest access token for particular client, user and scope combination if its ACTIVE or EXPIRED
scopedToken = tokenMgtDAO.retrieveLatestAccessToken(
appDTO.getOauthConsumerKey(), userName, userStoreDomain,
OAuth2Util.buildScopeString(accessTokenDO.getScope()), true);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving latest " +
"access token issued for Client ID : " +
appDTO.getOauthConsumerKey() + ", User ID : " + userName +
" and Scope : " + OAuth2Util.buildScopeString(accessTokenDO.getScope());
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
if (scopedToken != null) {
//Revoking token from database
try {
tokenMgtDAO.revokeTokens(new String[]{scopedToken.getAccessToken()});
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while revoking " + "Access Token : " +
scopedToken.getAccessToken();
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
}
try {
tokenMgtDAO.revokeOAuthConsentByApplicationAndUser(userName, appName);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while removing OAuth Consent of Application " + appName +
" of user " + userName;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
}
}
} else {
OAuthRevocationResponseDTO revokeRespDTO = new OAuthRevocationResponseDTO();
revokeRespDTO.setError(true);
revokeRespDTO.setErrorCode(OAuth2ErrorCodes.INVALID_REQUEST);
revokeRespDTO.setErrorMsg("Invalid revocation request");
return revokeRespDTO;
}
return new OAuthRevocationResponseDTO();
}
public String[] getAllowedGrantTypes() {
if (allowedGrants == null) {
allowedGrants = new ArrayList();
allowedGrants.addAll(OAuthServerConfiguration.getInstance().getSupportedGrantTypes().keySet());
if (OAuthServerConfiguration.getInstance().getSupportedResponseTypes().containsKey("token")) {
allowedGrants.add(IMPLICIT);
}
}
return allowedGrants.toArray(new String[allowedGrants.size()]);
}
}
|
components/oauth/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth/OAuthAdminService.java
|
/*
* Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.core.AbstractAdmin;
import org.wso2.carbon.identity.application.common.model.User;
import org.wso2.carbon.identity.core.model.OAuthAppDO;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.oauth.cache.AppInfoCache;
import org.wso2.carbon.identity.oauth.cache.OAuthCache;
import org.wso2.carbon.identity.oauth.cache.OAuthCacheKey;
import org.wso2.carbon.identity.oauth.common.OAuth2ErrorCodes;
import org.wso2.carbon.identity.oauth.common.OAuthConstants;
import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException;
import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration;
import org.wso2.carbon.identity.oauth.dao.OAuthAppDAO;
import org.wso2.carbon.identity.oauth.dto.OAuthConsumerAppDTO;
import org.wso2.carbon.identity.oauth.dto.OAuthRevocationRequestDTO;
import org.wso2.carbon.identity.oauth.dto.OAuthRevocationResponseDTO;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception;
import org.wso2.carbon.identity.oauth2.dao.TokenMgtDAO;
import org.wso2.carbon.identity.oauth2.model.AccessTokenDO;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class OAuthAdminService extends AbstractAdmin {
public static final String IMPLICIT = "implicit";
public static final String AUTHORIZATION_CODE = "authorization_code";
private static List<String> allowedGrants = null;
protected Log log = LogFactory.getLog(OAuthAdminService.class);
private AppInfoCache appInfoCache = AppInfoCache.getInstance(OAuthServerConfiguration.getInstance().
getAppInfoCacheTimeout());
/**
* Registers an consumer secret against the logged in user. A given user can only have a single
* consumer secret at a time. Calling this method again and again will update the existing
* consumer secret key.
*
* @return An array containing the consumer key and the consumer secret correspondingly.
* @throws Exception Error when persisting the data in the persistence store.
*/
public String[] registerOAuthConsumer() throws IdentityOAuthAdminException {
String loggedInUser = CarbonContext.getThreadLocalCarbonContext().getUsername();
if (log.isDebugEnabled()) {
log.debug("Adding a consumer secret for the logged in user " + loggedInUser);
}
String tenantUser = MultitenantUtils.getTenantAwareUsername(loggedInUser);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
String userDomain = UserCoreUtil.extractDomainFromName(loggedInUser);
OAuthAppDAO dao = new OAuthAppDAO();
return dao.addOAuthConsumer(tenantUser, tenantId, userDomain);
}
/**
* Get all registered OAuth applications for the logged in user.
*
* @return An array of <code>OAuthConsumerAppDTO</code> objecting containing the application
* information of the user
* @throws Exception Error when reading the data from the persistence store.
*/
public OAuthConsumerAppDTO[] getAllOAuthApplicationData() throws IdentityOAuthAdminException {
String userName = CarbonContext.getThreadLocalCarbonContext().getUsername();
OAuthConsumerAppDTO[] dtos = new OAuthConsumerAppDTO[0];
if (userName == null) {
if (log.isErrorEnabled()) {
log.debug("User not logged in");
}
throw new IdentityOAuthAdminException("User not logged in");
}
String tenantUser = MultitenantUtils.getTenantAwareUsername(userName);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
OAuthAppDAO dao = new OAuthAppDAO();
OAuthAppDO[] apps = dao.getOAuthConsumerAppsOfUser(tenantUser, tenantId);
if (apps != null && apps.length > 0) {
dtos = new OAuthConsumerAppDTO[apps.length];
OAuthConsumerAppDTO dto = null;
OAuthAppDO app = null;
for (int i = 0; i < apps.length; i++) {
app = apps[i];
dto = new OAuthConsumerAppDTO();
dto.setApplicationName(app.getApplicationName());
dto.setCallbackUrl(app.getCallbackUrl());
dto.setOauthConsumerKey(app.getOauthConsumerKey());
dto.setOauthConsumerSecret(app.getOauthConsumerSecret());
dto.setOAuthVersion(app.getOauthVersion());
dto.setGrantTypes(app.getGrantTypes());
dto.setUsername(app.getUserName());
dtos[i] = dto;
}
}
return dtos;
}
/**
* Get OAuth application data by the consumer key.
*
* @param consumerKey Consumer Key
* @return <code>OAuthConsumerAppDTO</code> with application information
* @throws Exception Error when reading application information from persistence store.
*/
public OAuthConsumerAppDTO getOAuthApplicationData(String consumerKey) throws IdentityOAuthAdminException {
OAuthConsumerAppDTO dto = new OAuthConsumerAppDTO();
OAuthAppDAO dao = new OAuthAppDAO();
try {
OAuthAppDO app = dao.getAppInformation(consumerKey);
if (app != null) {
dto.setApplicationName(app.getApplicationName());
dto.setCallbackUrl(app.getCallbackUrl());
dto.setOauthConsumerKey(app.getOauthConsumerKey());
dto.setOauthConsumerSecret(app.getOauthConsumerSecret());
dto.setOAuthVersion(app.getOauthVersion());
dto.setGrantTypes(app.getGrantTypes());
}
return dto;
} catch (InvalidOAuthClientException | IdentityOAuth2Exception e) {
throw new IdentityOAuthAdminException("Error while retrieving the app information using consumer key", e);
}
}
/**
* Get OAuth application data by the application name.
*
* @param appName OAuth application name
* @return <code>OAuthConsumerAppDTO</code> with application information
* @throws Exception Error when reading application information from persistence store.
*/
public OAuthConsumerAppDTO getOAuthApplicationDataByAppName(String appName) throws IdentityOAuthAdminException {
OAuthConsumerAppDTO dto = new OAuthConsumerAppDTO();
OAuthAppDAO dao = new OAuthAppDAO();
try {
OAuthAppDO app = dao.getAppInformationByAppName(appName);
if (app != null) {
dto.setApplicationName(app.getApplicationName());
dto.setCallbackUrl(app.getCallbackUrl());
dto.setOauthConsumerKey(app.getOauthConsumerKey());
dto.setOauthConsumerSecret(app.getOauthConsumerSecret());
dto.setOAuthVersion(app.getOauthVersion());
dto.setGrantTypes(app.getGrantTypes());
}
return dto;
}catch (InvalidOAuthClientException | IdentityOAuth2Exception e){
throw new IdentityOAuthAdminException("Error while retrieving the app information by app name", e);
}
}
/**
* Registers an OAuth consumer application.
*
* @param application <code>OAuthConsumerAppDTO</code> with application information
* @throws Exception Error when persisting the application information to the persistence store
*/
public void registerOAuthApplicationData(OAuthConsumerAppDTO application) throws IdentityOAuthAdminException{
String userName = CarbonContext.getThreadLocalCarbonContext().getUsername();
if (userName != null) {
String tenantUser = MultitenantUtils.getTenantAwareUsername(userName);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
OAuthAppDAO dao = new OAuthAppDAO();
OAuthAppDO app = new OAuthAppDO();
if (application != null) {
app.setApplicationName(application.getApplicationName());
if ((application.getGrantTypes().contains(AUTHORIZATION_CODE) || application.getGrantTypes()
.contains(IMPLICIT)) && StringUtils.isEmpty(application.getCallbackUrl())) {
throw new IdentityOAuthAdminException("Callback Url is required for Code or Implicit grant types");
}
app.setCallbackUrl(application.getCallbackUrl());
if (application.getOauthConsumerKey() == null) {
app.setOauthConsumerKey(OAuthUtil.getRandomNumber());
app.setOauthConsumerSecret(OAuthUtil.getRandomNumber());
} else {
app.setOauthConsumerKey(application.getOauthConsumerKey());
app.setOauthConsumerSecret(application.getOauthConsumerSecret());
}
String applicationUser = application.getUsername();
if (applicationUser != null && applicationUser.trim().length() > 0) {
try {
if (CarbonContext.getThreadLocalCarbonContext().getUserRealm().
getUserStoreManager().isExistingUser(application.getUsername())) {
tenantUser = applicationUser;
} else {
log.warn("OAuth application registrant user name " + applicationUser +
" does not exist in the user store. Using logged-in user name " + tenantUser +
" as registrant name");
}
} catch (UserStoreException e) {
throw new IdentityOAuthAdminException("Error while retrieving the user store manager", e);
}
}
app.setUserName(tenantUser);
app.setTenantId(tenantId);
app.setUserDomain(UserCoreUtil.extractDomainFromName(userName));
if (application.getOAuthVersion() != null) {
app.setOauthVersion(application.getOAuthVersion());
} else { // by default, assume OAuth 2.0, if it is not set.
app.setOauthVersion(OAuthConstants.OAuthVersions.VERSION_2);
}
if (OAuthConstants.OAuthVersions.VERSION_2.equals(application.getOAuthVersion())) {
List<String> allowedGrants = new ArrayList<>(Arrays.asList(getAllowedGrantTypes()));
String[] requestGrants = application.getGrantTypes().split("\\s");
for (String requestedGrant : requestGrants) {
if ("".equals(requestedGrant.trim())) {
continue;
}
if (!allowedGrants.contains(requestedGrant)) {
throw new IdentityOAuthAdminException(requestedGrant + " not allowed");
}
}
app.setGrantTypes(application.getGrantTypes());
}
dao.addOAuthApplication(app);
if (OAuthServerConfiguration.getInstance().isCacheEnabled()) {
appInfoCache.addToCache(app.getOauthConsumerKey(), app);
}
}
}
}
/**
* Update existing consumer application.
*
* @param consumerAppDTO <code>OAuthConsumerAppDTO</code> with updated application information
* @throws IdentityOAuthAdminException Error when updating the underlying identity persistence store.
*/
public void updateConsumerApplication(OAuthConsumerAppDTO consumerAppDTO) throws IdentityOAuthAdminException {
String userName = CarbonContext.getThreadLocalCarbonContext().getUsername();
String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userName);
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
OAuthAppDAO dao = new OAuthAppDAO();
OAuthAppDO oauthappdo = new OAuthAppDO();
oauthappdo.setUserName(tenantAwareUsername);
oauthappdo.setTenantId(tenantId);
oauthappdo.setUserDomain(UserCoreUtil.extractDomainFromName(userName));
oauthappdo.setOauthConsumerKey(consumerAppDTO.getOauthConsumerKey());
oauthappdo.setOauthConsumerSecret(consumerAppDTO.getOauthConsumerSecret());
oauthappdo.setCallbackUrl(consumerAppDTO.getCallbackUrl());
oauthappdo.setApplicationName(consumerAppDTO.getApplicationName());
if (OAuthConstants.OAuthVersions.VERSION_2.equals(consumerAppDTO.getOAuthVersion())) {
List<String> allowedGrants = new ArrayList<>(Arrays.asList(getAllowedGrantTypes()));
String[] requestGrants = consumerAppDTO.getGrantTypes().split("\\s");
for (String requestedGrant : requestGrants) {
if ("".equals(requestedGrant.trim())) {
continue;
}
if (!allowedGrants.contains(requestedGrant)) {
throw new IdentityOAuthAdminException(requestedGrant + " not allowed");
}
}
oauthappdo.setGrantTypes(consumerAppDTO.getGrantTypes());
}
dao.updateConsumerApplication(oauthappdo);
if (OAuthServerConfiguration.getInstance().isCacheEnabled()) {
appInfoCache.addToCache(oauthappdo.getOauthConsumerKey(), oauthappdo);
}
}
/**
* Removes an OAuth consumer application.
*
* @param consumerKey Consumer Key
* @throws Exception Error when removing the consumer information from the database.
*/
public void removeOAuthApplicationData(String consumerKey) throws IdentityOAuthAdminException {
OAuthAppDAO dao = new OAuthAppDAO();
dao.removeConsumerApplication(consumerKey);
// remove client credentials from cache
if (OAuthServerConfiguration.getInstance().isCacheEnabled()) {
OAuthCache.getInstance(0).clearCacheEntry(new OAuthCacheKey(consumerKey));
appInfoCache.clearCacheEntry(consumerKey);
if (log.isDebugEnabled()) {
log.debug("Client credentials are removed from the cache.");
}
}
}
/**
* Get apps that are authorized by the given user
*
* @return OAuth applications authorized by the user that have tokens in ACTIVE or EXPIRED state
*/
public OAuthConsumerAppDTO[] getAppsAuthorizedByUser() throws IdentityOAuthAdminException {
TokenMgtDAO tokenMgtDAO = new TokenMgtDAO();
OAuthAppDAO appDAO = new OAuthAppDAO();
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain();
String tenantAwareUserName = PrivilegedCarbonContext.getThreadLocalCarbonContext().getUsername();
String username = tenantAwareUserName + "@" + tenantDomain;
String userStoreDomain = null;
if (OAuth2Util.checkAccessTokenPartitioningEnabled() && OAuth2Util.checkUserNameAssertionEnabled()) {
try {
userStoreDomain = OAuth2Util.getUserStoreDomainFromUserId(username);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while getting user store domain for User ID : " + username;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
Set<String> clientIds = null;
try {
clientIds = tokenMgtDAO.getAllTimeAuthorizedClientIds(username);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving apps authorized by User ID : " + username;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
Set<OAuthConsumerAppDTO> appDTOs = new HashSet<OAuthConsumerAppDTO>();
for (String clientId : clientIds) {
Set<AccessTokenDO> accessTokenDOs = null;
try {
accessTokenDOs = tokenMgtDAO.retrieveAccessTokens(clientId, username, userStoreDomain, true);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving access tokens issued for " +
"Client ID : " + clientId + ", User ID : " + username;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
if (!accessTokenDOs.isEmpty()) {
Set<String> distinctClientUserScopeCombo = new HashSet<String>();
for (AccessTokenDO accessTokenDO : accessTokenDOs) {
AccessTokenDO scopedToken = null;
String scopeString = OAuth2Util.buildScopeString(accessTokenDO.getScope());
try {
scopedToken = tokenMgtDAO.retrieveLatestAccessToken(
clientId, username, userStoreDomain, scopeString, true);
if(scopedToken != null && !distinctClientUserScopeCombo.contains(clientId+":"+username)){
OAuthConsumerAppDTO appDTO = new OAuthConsumerAppDTO();
OAuthAppDO appDO;
try {
appDO = appDAO.getAppInformation(scopedToken.getConsumerKey());
appDTO.setOauthConsumerKey(scopedToken.getConsumerKey());
appDTO.setApplicationName(appDO.getApplicationName());
appDTO.setUsername(UserCoreUtil.addTenantDomainToEntry(appDO.getUserName(),
IdentityTenantUtil.getTenantDomain(appDO.getTenantId())));
appDTO.setGrantTypes(appDO.getGrantTypes());
appDTOs.add(appDTO);
} catch (InvalidOAuthClientException e) {
String errorMsg = "Invalid Client ID : " + scopedToken.getConsumerKey();
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving app information " +
"for Client ID : " + scopedToken.getConsumerKey();
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
distinctClientUserScopeCombo.add(clientId+":"+username);
}
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving latest access token issued for Client ID :" +
" " + clientId + ", User ID : " + username + " and Scope : " + scopeString;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
}
}
return appDTOs.toArray(new OAuthConsumerAppDTO[appDTOs.size()]);
}
/**
* Revoke authorization for OAuth apps by resource owners
*
* @param revokeRequestDTO DTO representing authorized user and apps[]
* @return revokeRespDTO DTO representing success or failure message
*/
public OAuthRevocationResponseDTO revokeAuthzForAppsByResoureOwner(
OAuthRevocationRequestDTO revokeRequestDTO) throws IdentityOAuthAdminException {
TokenMgtDAO tokenMgtDAO = new TokenMgtDAO();
if (revokeRequestDTO.getApps() != null && revokeRequestDTO.getApps().length > 0) {
String tenantDomain = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantDomain();
String tenantAwareUserName = PrivilegedCarbonContext.getThreadLocalCarbonContext().getUsername();
String userName = tenantAwareUserName + "@" + tenantDomain;
String userStoreDomain = null;
if (OAuth2Util.checkAccessTokenPartitioningEnabled() &&
OAuth2Util.checkUserNameAssertionEnabled()) {
try {
userStoreDomain = OAuth2Util.getUserStoreDomainFromUserId(userName);
} catch (IdentityOAuth2Exception e) {
throw new IdentityOAuthAdminException(
"Error occurred while getting user store domain from User ID : " + userName, e);
}
}
OAuthConsumerAppDTO[] appDTOs = getAppsAuthorizedByUser();
for (String appName : revokeRequestDTO.getApps()) {
for (OAuthConsumerAppDTO appDTO : appDTOs) {
if (appDTO.getApplicationName().equals(appName)) {
Set<AccessTokenDO> accessTokenDOs = null;
try {
// retrieve all ACTIVE or EXPIRED access tokens for particular client authorized by this user
accessTokenDOs = tokenMgtDAO.retrieveAccessTokens(
appDTO.getOauthConsumerKey(), userName, userStoreDomain, true);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving access tokens issued for " +
"Client ID : " + appDTO.getOauthConsumerKey() + ", User ID : " + userName;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
User authzUser;
for (AccessTokenDO accessTokenDO : accessTokenDOs) {
//Clear cache with AccessTokenDO
authzUser = accessTokenDO.getAuthzUser();
OAuthUtil.clearOAuthCache(accessTokenDO.getConsumerKey(), authzUser,
OAuth2Util.buildScopeString(accessTokenDO.getScope()));
OAuthUtil.clearOAuthCache(accessTokenDO.getConsumerKey(), authzUser);
OAuthUtil.clearOAuthCache(accessTokenDO.getAccessToken());
AccessTokenDO scopedToken = null;
try {
// retrieve latest access token for particular client, user and scope combination if its ACTIVE or EXPIRED
scopedToken = tokenMgtDAO.retrieveLatestAccessToken(
appDTO.getOauthConsumerKey(), userName, userStoreDomain,
OAuth2Util.buildScopeString(accessTokenDO.getScope()), true);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while retrieving latest " +
"access token issued for Client ID : " +
appDTO.getOauthConsumerKey() + ", User ID : " + userName +
" and Scope : " + OAuth2Util.buildScopeString(accessTokenDO.getScope());
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
if (scopedToken != null) {
//Revoking token from database
try {
tokenMgtDAO.revokeTokens(new String[]{scopedToken.getAccessToken()});
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while revoking " + "Access Token : " +
scopedToken.getAccessToken();
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
}
try {
tokenMgtDAO.revokeOAuthConsentByApplicationAndUser(userName, appName);
} catch (IdentityOAuth2Exception e) {
String errorMsg = "Error occurred while removing OAuth Consent of Application " + appName +
" of user " + userName;
log.error(errorMsg, e);
throw new IdentityOAuthAdminException(errorMsg, e);
}
}
}
}
} else {
OAuthRevocationResponseDTO revokeRespDTO = new OAuthRevocationResponseDTO();
revokeRespDTO.setError(true);
revokeRespDTO.setErrorCode(OAuth2ErrorCodes.INVALID_REQUEST);
revokeRespDTO.setErrorMsg("Invalid revocation request");
return revokeRespDTO;
}
return new OAuthRevocationResponseDTO();
}
public String[] getAllowedGrantTypes() {
if (allowedGrants == null) {
allowedGrants = new ArrayList();
allowedGrants.addAll(OAuthServerConfiguration.getInstance().getSupportedGrantTypes().keySet());
if (OAuthServerConfiguration.getInstance().getSupportedResponseTypes().containsKey("token")) {
allowedGrants.add(IMPLICIT);
}
}
return allowedGrants.toArray(new String[allowedGrants.size()]);
}
}
|
seperate user domain from user name
|
components/oauth/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth/OAuthAdminService.java
|
seperate user domain from user name
|
|
Java
|
apache-2.0
|
13ffcc6b5667af31b93eb580d998e6702026f8b9
| 0
|
b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl
|
/*
* Copyright 2017-2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.datastore.request.rf2;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.Sets.newTreeSet;
import static java.util.Collections.singleton;
import static java.util.stream.Collectors.toSet;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.b2international.commons.FileUtils;
import com.b2international.index.revision.RevisionIndex;
import com.b2international.snowowl.core.api.SnowowlRuntimeException;
import com.b2international.snowowl.core.attachments.Attachment;
import com.b2international.snowowl.core.attachments.AttachmentRegistry;
import com.b2international.snowowl.core.authorization.BranchAccessControl;
import com.b2international.snowowl.core.branch.Branch;
import com.b2international.snowowl.core.branch.Branches;
import com.b2international.snowowl.core.codesystem.CodeSystemEntry;
import com.b2international.snowowl.core.codesystem.CodeSystemRequests;
import com.b2international.snowowl.core.codesystem.CodeSystemVersionEntry;
import com.b2international.snowowl.core.date.DateFormats;
import com.b2international.snowowl.core.date.Dates;
import com.b2international.snowowl.core.date.EffectiveTimes;
import com.b2international.snowowl.core.domain.BranchContext;
import com.b2international.snowowl.core.domain.IComponent;
import com.b2international.snowowl.core.domain.RepositoryContext;
import com.b2international.snowowl.core.events.Request;
import com.b2international.snowowl.core.identity.Permission;
import com.b2international.snowowl.core.repository.RepositoryCodeSystemProvider;
import com.b2international.snowowl.core.repository.RepositoryRequests;
import com.b2international.snowowl.core.request.BranchRequest;
import com.b2international.snowowl.core.request.ResourceRequest;
import com.b2international.snowowl.core.request.RevisionIndexReadRequest;
import com.b2international.snowowl.core.request.SearchResourceRequest.SortField;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.snomed.common.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.common.SnomedRf2Headers;
import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
import com.b2international.snowowl.snomed.core.domain.Rf2RefSetExportLayout;
import com.b2international.snowowl.snomed.core.domain.Rf2ReleaseType;
import com.b2international.snowowl.snomed.core.domain.SnomedConcept;
import com.b2international.snowowl.snomed.core.domain.SnomedConcepts;
import com.b2international.snowowl.snomed.core.domain.SnomedDescription;
import com.b2international.snowowl.snomed.core.domain.SnomedDescriptions;
import com.b2international.snowowl.snomed.core.domain.refset.SnomedRefSetType;
import com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSetMember;
import com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSets;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedDescriptionIndexEntry;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRelationshipIndexEntry;
import com.b2international.snowowl.snomed.datastore.request.SnomedConceptSearchRequestBuilder;
import com.b2international.snowowl.snomed.datastore.request.SnomedRefSetMemberSearchRequestBuilder;
import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2ConceptExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2DescriptionExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2LanguageRefSetExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2RefSetDescriptorRefSetExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2RefSetExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2RelationshipExporter;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Strings;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Ordering;
/**
* @since 5.7
*/
final class SnomedRf2ExportRequest extends ResourceRequest<BranchContext, Attachment> implements BranchAccessControl {
private static final String DESCRIPTION_TYPES_EXCEPT_TEXT_DEFINITION = "<<" + Concepts.DESCRIPTION_TYPE_ROOT_CONCEPT + " MINUS " + Concepts.TEXT_DEFINITION;
private static final String NON_STATED_CHARACTERISTIC_TYPES = "<<" + Concepts.CHARACTERISTIC_TYPE + " MINUS " + Concepts.STATED_RELATIONSHIP;
private static final long serialVersionUID = 1L;
private static final Ordering<CodeSystemVersionEntry> EFFECTIVE_DATE_ORDERING = Ordering.natural()
.onResultOf(CodeSystemVersionEntry::getEffectiveDate);
@JsonProperty
@NotNull
private Rf2ReleaseType releaseType;
@JsonProperty
@NotNull
private Rf2RefSetExportLayout refSetExportLayout;
@JsonProperty
@NotEmpty
private String countryNamespaceElement;
@JsonProperty
private String namespaceFilter;
@JsonProperty
private Long startEffectiveTime;
@JsonProperty
private Long endEffectiveTime;
@JsonProperty
private boolean includePreReleaseContent;
@JsonProperty
private Collection<String> componentTypes;
@JsonProperty
private Collection<String> modules;
@JsonProperty
private Collection<String> refSets;
@JsonProperty
private String transientEffectiveTime;
@JsonProperty
private boolean extensionOnly;
SnomedRf2ExportRequest() {}
void setReleaseType(final Rf2ReleaseType releaseType) {
this.releaseType = releaseType;
}
void setRefSetExportLayout(final Rf2RefSetExportLayout refSetExportLayout) {
this.refSetExportLayout = refSetExportLayout;
}
void setCountryNamespaceElement(final String countryNamespaceElement) {
this.countryNamespaceElement = countryNamespaceElement;
}
void setNamespaceFilter(final String namespaceFilter) {
this.namespaceFilter = namespaceFilter;
}
void setStartEffectiveTime(final Long startEffectiveTime) {
this.startEffectiveTime = startEffectiveTime;
}
void setEndEffectiveTime(final Long endEffectiveTime) {
this.endEffectiveTime = endEffectiveTime;
}
void setIncludePreReleaseContent(final boolean includeUnpublished) {
this.includePreReleaseContent = includeUnpublished;
}
void setComponentTypes(final Collection<String> componentTypes) {
/*
* All component types should be exported if the input value is null; no
* component type should be exported if the input value is an empty collection.
*/
this.componentTypes = (componentTypes != null)
? ImmutableSet.copyOf(componentTypes)
: ImmutableSet.of(SnomedTerminologyComponentConstants.CONCEPT,
SnomedTerminologyComponentConstants.DESCRIPTION,
SnomedTerminologyComponentConstants.RELATIONSHIP,
SnomedTerminologyComponentConstants.REFSET_MEMBER);
}
void setModules(final Collection<String> modules) {
/*
* All modules should be exported if the input value is null; no module
* should be exported if the input value is an empty collection.
*/
this.modules = (modules != null) ? ImmutableSet.copyOf(modules) : null;
}
void setRefSets(final Collection<String> refSets) {
/*
* All reference sets should be exported if the input value is null; no component
* should be exported if the input value is an empty collection.
*/
this.refSets = (refSets != null) ? ImmutableSet.copyOf(refSets) : null;
}
void setTransientEffectiveTime(final String transientEffectiveTime) {
if (Strings.isNullOrEmpty(transientEffectiveTime)) {
// Effective time columns should be left blank
this.transientEffectiveTime = "";
} else if ("NOW".equals(transientEffectiveTime)) {
// Special flag indicating "today"
this.transientEffectiveTime = EffectiveTimes.format(Dates.todayGmt(), DateFormats.SHORT);
} else {
// Otherwise, it should be a valid short date
Dates.parse(transientEffectiveTime, DateFormats.SHORT);
this.transientEffectiveTime = transientEffectiveTime;
}
}
void setExtensionOnly(final boolean extensionOnly) {
this.extensionOnly = extensionOnly;
}
@Override
public Attachment execute(final BranchContext context) {
final String referenceBranch = context.path();
// register export start time for later use
final long exportStartTime = Instant.now().toEpochMilli();
// Step 1: check if the export reference branch is a working branch path descendant
final CodeSystemEntry referenceCodeSystem = context.service(RepositoryCodeSystemProvider.class).get(referenceBranch);
// Step 2: retrieve code system versions that are visible from the reference branch
final TreeSet<CodeSystemVersionEntry> versionsToExport = getAllExportableCodeSystemVersions(context, referenceCodeSystem);
// Step 3: compute branches to export
final List<String> branchesToExport = computeBranchesToExport(referenceBranch, versionsToExport);
// Step 4: compute possible language codes
Multimap<String, String> availableLanguageCodes = getLanguageCodes(context, branchesToExport);
Path exportDirectory = null;
try {
final UUID exportId = UUID.randomUUID();
// create temporary export directory
exportDirectory = createExportDirectory(exportId);
// get archive effective time based on latest version effective / transient effective time / current date
final Date archiveEffectiveDate = getArchiveEffectiveTime(context, versionsToExport);
final String archiveEffectiveDateShort = Dates.format(archiveEffectiveDate, TimeZone.getTimeZone("UTC"), DateFormats.SHORT);
// create main folder including release status and archive effective date
final Path releaseDirectory = createReleaseDirectory(exportDirectory, archiveEffectiveDate);
final Set<String> visitedComponentEffectiveTimes = newHashSet();
final long effectiveTimeStart = startEffectiveTime != null ? startEffectiveTime : 0;
final long effectiveTimeEnd = endEffectiveTime != null ? endEffectiveTime : Long.MAX_VALUE;
// export content from the pre-computed version branches
for (String branch : branchesToExport) {
exportBranch(releaseDirectory,
context,
branch,
archiveEffectiveDateShort,
effectiveTimeStart,
effectiveTimeEnd,
visitedComponentEffectiveTimes,
availableLanguageCodes.get(branch));
}
// export content from reference branch
if (includePreReleaseContent) {
final String referenceBranchToExport = String.format("%s%s%s", referenceBranch, RevisionIndex.AT_CHAR, exportStartTime);
exportBranch(releaseDirectory,
context,
referenceBranchToExport,
archiveEffectiveDateShort,
EffectiveTimes.UNSET_EFFECTIVE_TIME,
EffectiveTimes.UNSET_EFFECTIVE_TIME,
visitedComponentEffectiveTimes,
availableLanguageCodes.get(referenceBranch));
}
// Step 6: compress to archive and upload to the file registry
final AttachmentRegistry fileRegistry = context.service(AttachmentRegistry.class);
registerResult(fileRegistry, exportId, exportDirectory);
final String fileName = releaseDirectory.getFileName() + ".zip";
return new Attachment(exportId, fileName);
} catch (final Exception e) {
throw new SnowowlRuntimeException("Failed to export terminology content to RF2.", e);
} finally {
if (exportDirectory != null) {
FileUtils.deleteDirectory(exportDirectory.toFile());
}
}
}
private Multimap<String, String> getLanguageCodes(BranchContext context, List<String> branchesToExport) {
final String referenceBranch = context.path();
List<String> branchesOrRanges = newArrayList(branchesToExport);
if (includePreReleaseContent) {
branchesOrRanges.add(referenceBranch);
}
Multimap<String, String> branchToLanguageCodes = HashMultimap.create();
Set<String> filteredLanguageCodes = Stream.of(Locale.getISOLanguages())
.filter(code -> !Locale.ENGLISH.getLanguage().equals(code))
.collect(toSet());
for (String branchOrRange : branchesOrRanges) {
String branch = getBranchOrRangeTarget(branchOrRange);
final Set<String> languageCodes = newHashSet();
// check if there are any english terms on the given branch / range
final Request<BranchContext, SnomedDescriptions> englishLanguageCodeRequest = SnomedRequests.prepareSearchDescription()
.setLimit(0)
.filterByLanguageCodes(singleton(Locale.ENGLISH.getLanguage()))
.build();
final SnomedDescriptions enDescriptions = execute(context, branch, englishLanguageCodeRequest);
if (enDescriptions.getTotal() > 0) {
languageCodes.add(Locale.ENGLISH.getLanguage());
}
// check if there are any terms other than english on the given branch / range
final Request<BranchContext, SnomedDescriptions> languageCodeRequest = SnomedRequests.prepareSearchDescription()
.all()
.filterByLanguageCodes(filteredLanguageCodes)
.setFields(SnomedRf2Headers.FIELD_ID, SnomedRf2Headers.FIELD_LANGUAGE_CODE)
.build();
final SnomedDescriptions descriptions = execute(context, branch, languageCodeRequest);
if (!descriptions.isEmpty()) {
languageCodes.addAll(descriptions.stream().map(SnomedDescription::getLanguageCode).collect(toSet()));
}
branchToLanguageCodes.putAll(branchOrRange, languageCodes);
}
return branchToLanguageCodes;
}
private List<String> computeBranchesToExport(final String referenceBranch, final TreeSet<CodeSystemVersionEntry> versionsToExport) {
final List<String> branchesToExport = newArrayList();
switch (releaseType) {
case FULL:
versionsToExport.stream()
.map(v -> v.getPath())
.filter(v -> !branchesToExport.contains(v))
.forEachOrdered(branchesToExport::add);
if (!branchesToExport.contains(referenceBranch)) {
branchesToExport.add(referenceBranch);
}
break;
case DELTA:
if (startEffectiveTime != null || endEffectiveTime != null || !includePreReleaseContent) {
versionsToExport.stream()
.map(v -> v.getPath())
.filter(v -> !branchesToExport.contains(v))
.forEachOrdered(branchesToExport::add);
if (!branchesToExport.contains(referenceBranch)) {
branchesToExport.add(referenceBranch);
}
}
break;
case SNAPSHOT:
branchesToExport.add(referenceBranch);
break;
}
Builder<String> branchRangesToExport = ImmutableList.builder();
for (int i = 0; i < branchesToExport.size(); i++) {
final String previousVersion = i == 0 ? null : branchesToExport.get(i - 1);
final String currentVersion = branchesToExport.get(i);
branchRangesToExport.add(previousVersion == null ? currentVersion : RevisionIndex.toRevisionRange(previousVersion, currentVersion));
}
return branchRangesToExport.build();
}
private Date getArchiveEffectiveTime(final RepositoryContext context, final TreeSet<CodeSystemVersionEntry> versionsToExport) {
Optional<CodeSystemVersionEntry> lastVersionToExport;
if (endEffectiveTime != null) {
lastVersionToExport = Optional.ofNullable(getVersionBefore(versionsToExport, endEffectiveTime));
} else {
lastVersionToExport = !versionsToExport.isEmpty() ? Optional.ofNullable(versionsToExport.last()) : Optional.empty();
}
Optional<Date> latestModuleEffectiveTime = lastVersionToExport.flatMap(version -> getLatestModuleEffectiveTime(context, version));
if (includePreReleaseContent) {
if (!transientEffectiveTime.isEmpty()) {
return adjustCurrentHour(Dates.parse(transientEffectiveTime, DateFormats.SHORT));
} else if (latestModuleEffectiveTime.isPresent()) {
return adjustCurrentHour(getNextEffectiveDate(latestModuleEffectiveTime.get().getTime()));
} else if (lastVersionToExport.isPresent()) {
return adjustCurrentHour(getNextEffectiveDate(lastVersionToExport.get().getEffectiveDate()));
}
} else {
if (latestModuleEffectiveTime.isPresent()) {
return adjustCurrentHour(new Date(latestModuleEffectiveTime.get().getTime()));
} else if (lastVersionToExport.isPresent()) {
return adjustCurrentHour(new Date(lastVersionToExport.get().getEffectiveDate()));
}
}
return adjustCurrentHour(Dates.parse(Dates.format(new Date(), TimeZone.getTimeZone("UTC"), DateFormats.DEFAULT)));
}
private CodeSystemVersionEntry getVersionBefore(final TreeSet<CodeSystemVersionEntry> versionsToExport, final long timestamp) {
CodeSystemVersionEntry versionBeforeEndEffectiveTime = null;
for (CodeSystemVersionEntry version : versionsToExport) {
if (version.getEffectiveDate() > timestamp) {
break;
}
versionBeforeEndEffectiveTime = version;
}
return versionBeforeEndEffectiveTime;
}
private Optional<Date> getLatestModuleEffectiveTime(final RepositoryContext context, final CodeSystemVersionEntry version) {
final Optional<Date> sourceEffectiveTime = getLatestModuleEffectiveTime(context, version, SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME);
final Optional<Date> targetEffectiveTime = getLatestModuleEffectiveTime(context, version, SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME);
if (!sourceEffectiveTime.isPresent() && !targetEffectiveTime.isPresent()) {
return Optional.empty();
} else if(!sourceEffectiveTime.isPresent()) {
return targetEffectiveTime;
} else if (!targetEffectiveTime.isPresent()) {
return sourceEffectiveTime;
} else {
final Date sourceDate = sourceEffectiveTime.get();
final Date targetDate = targetEffectiveTime.get();
if (sourceDate.after(targetDate)) {
return sourceEffectiveTime;
}
if (targetDate.after(sourceDate)) {
return targetEffectiveTime;
}
// they are the same date
return sourceEffectiveTime;
}
}
private Optional<Date> getLatestModuleEffectiveTime(final RepositoryContext context, final CodeSystemVersionEntry version, String field) {
SnomedRefSetMemberSearchRequestBuilder requestBuilder = SnomedRequests.prepareSearchMember()
.filterByRefSet(Concepts.REFSET_MODULE_DEPENDENCY_TYPE)
.filterByActive(true)
.sortBy(SortField.descending(field))
.setLimit(1);
// See the comment in setModules; a value of "null" means that all modules should be exported
if (modules != null) {
requestBuilder.filterByModules(modules);
}
final Optional<SnomedReferenceSetMember> moduleDependencyMember = requestBuilder
.build(context.id(), version.getPath())
.execute(context.service(IEventBus.class))
.getSync(1, TimeUnit.MINUTES)
.first();
return moduleDependencyMember.map(m -> {
return (Date) m.getProperties().get(field);
});
}
private Date adjustCurrentHour(final Date effectiveDate) {
final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
int currentHour = calendar.get(Calendar.HOUR_OF_DAY);
calendar.setTimeInMillis(effectiveDate.getTime());
calendar.set(Calendar.HOUR_OF_DAY, currentHour);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTime();
}
private Date getNextEffectiveDate(final long time) {
final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
calendar.setTimeInMillis(time);
calendar.roll(Calendar.DATE, true);
return calendar.getTime();
}
private TreeSet<CodeSystemVersionEntry> getAllExportableCodeSystemVersions(final BranchContext context, final CodeSystemEntry codeSystemEntry) {
final String referenceBranch = context.path();
final TreeSet<CodeSystemVersionEntry> visibleVersions = newTreeSet(EFFECTIVE_DATE_ORDERING);
collectExportableCodeSystemVersions(context, visibleVersions, codeSystemEntry, referenceBranch);
return visibleVersions;
}
private void collectExportableCodeSystemVersions(final RepositoryContext context, final Set<CodeSystemVersionEntry> versionsToExport, final CodeSystemEntry codeSystemEntry,
final String referenceBranch) {
final Collection<CodeSystemVersionEntry> candidateVersions = newArrayList(getCodeSystemVersions(context, codeSystemEntry.getShortName()));
if (candidateVersions.isEmpty()) {
return;
}
final String versionParentPath = candidateVersions.stream()
.map(CodeSystemVersionEntry::getParentBranchPath)
.findFirst()
.get();
final Set<String> versionNames = candidateVersions.stream()
.map(CodeSystemVersionEntry::getVersionId)
.collect(Collectors.toSet());
final Branches versionBranches = getBranches(context, versionParentPath, versionNames);
final Map<String, Branch> versionBranchesByName = Maps.uniqueIndex(versionBranches, Branch::name);
final Branch cutoffBranch = getBranch(context, referenceBranch);
final long cutoffBaseTimestamp = getCutoffBaseTimestamp(context, cutoffBranch, versionParentPath);
// Remove all code system versions which were created after the cut-off date, or don't have a corresponding branch
candidateVersions.removeIf(v -> false
|| !versionBranchesByName.containsKey(v.getVersionId())
|| versionBranchesByName.get(v.getVersionId()).baseTimestamp() > cutoffBaseTimestamp);
versionsToExport.addAll(candidateVersions);
// Exit early if only an extension code system should be exported, or we are already at the "base" code system
if (extensionOnly || Strings.isNullOrEmpty(codeSystemEntry.getExtensionOf())) {
return;
}
// Otherwise, collect applicable versions using this code system's working path
final CodeSystemEntry extensionEnty = CodeSystemRequests.getCodeSystem(context, codeSystemEntry.getExtensionOf());
collectExportableCodeSystemVersions(context, versionsToExport, extensionEnty, codeSystemEntry.getBranchPath());
}
private Path createExportDirectory(final UUID exportId) {
try {
return Files.createTempDirectory("export-" + exportId + "-");
} catch (final IOException e) {
throw new SnowowlRuntimeException("Failed to create working directory for export.", e);
}
}
private Path createReleaseDirectory(final Path exportDirectory, final Date archiveEffectiveTime) {
final String releaseStatus = includePreReleaseContent ? "BETA" : "PRODUCTION";
String effectiveDate = Dates.format(archiveEffectiveTime, TimeZone.getTimeZone("UTC"), DateFormats.ISO_8601_UTC);
final Path releaseDirectory = exportDirectory.resolve(String.format("SNOMEDCT_RF2_%s_%s", releaseStatus, effectiveDate));
try {
Files.createDirectories(releaseDirectory);
} catch (final IOException e) {
throw new SnowowlRuntimeException("Failed to create RF2 release directory for export.", e);
}
return releaseDirectory;
}
private void exportBranch(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Set<String> visitedComponentEffectiveTimes,
final Collection<String> languageCodes) throws IOException {
for (final String componentToExport : componentTypes) {
switch (componentToExport) {
case SnomedTerminologyComponentConstants.CONCEPT:
exportConcepts(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
visitedComponentEffectiveTimes);
break;
case SnomedTerminologyComponentConstants.DESCRIPTION:
for (final String languageCode : languageCodes) {
exportDescriptions(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCode,
visitedComponentEffectiveTimes);
}
break;
case SnomedTerminologyComponentConstants.RELATIONSHIP:
exportRelationships(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
visitedComponentEffectiveTimes);
break;
case SnomedTerminologyComponentConstants.REFSET_MEMBER:
if (Rf2RefSetExportLayout.COMBINED.equals(refSetExportLayout)) {
exportCombinedRefSets(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
visitedComponentEffectiveTimes);
} else {
exportIndividualRefSets(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
visitedComponentEffectiveTimes);
}
break;
default:
throw new IllegalStateException("Component type '" + componentToExport + "' can not be exported.");
}
}
if (Boolean.valueOf(System.getProperty("so.snomed.refsetdescriptor_preview", "false"))) {
exportRefSetDescriptor(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
visitedComponentEffectiveTimes);
}
}
private void exportRefSetDescriptor(Path releaseDirectory,
RepositoryContext context,
String branch,
String archiveEffectiveTime,
long effectiveTimeFilterStart,
long effectiveTimeFilterEnd,
Collection<String> languageCodes,
Set<String> visitedComponentEffectiveTimes) throws IOException {
final Rf2RefSetDescriptorRefSetExporter exporter = new Rf2RefSetDescriptorRefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules);
exporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private void exportConcepts(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Rf2ConceptExporter conceptExporter = new Rf2ConceptExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
includePreReleaseContent,
modules);
conceptExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private void exportDescriptions(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final String languageCode,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Set<String> descriptionTypes = execute(context, getBranchOrRangeTarget(branch), SnomedRequests.prepareSearchConcept()
.all()
.filterByEcl(DESCRIPTION_TYPES_EXCEPT_TEXT_DEFINITION)
.setFields(SnomedDescriptionIndexEntry.Fields.ID)
.build())
.stream()
.map(IComponent::getId)
.collect(Collectors.toSet());
final Rf2DescriptionExporter descriptionExporter = new Rf2DescriptionExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
descriptionTypes,
languageCode);
final Rf2DescriptionExporter textDefinitionExporter = new Rf2DescriptionExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
ImmutableSet.of(Concepts.TEXT_DEFINITION),
languageCode);
descriptionExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
textDefinitionExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private String getBranchOrRangeTarget(final String branch) {
return RevisionIndex.isRevRangePath(branch) ? RevisionIndex.getRevisionRangePaths(branch)[1] : branch;
}
private <R> R execute(RepositoryContext context, String branch, Request<BranchContext, R> next) {
return new BranchRequest<>(branch, new RevisionIndexReadRequest<>(next)).execute(context);
}
private void exportRelationships(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Set<String> characteristicTypes = execute(context, getBranchOrRangeTarget(branch), SnomedRequests.prepareSearchConcept()
.all()
.filterByEcl(NON_STATED_CHARACTERISTIC_TYPES)
.setFields(SnomedRelationshipIndexEntry.Fields.ID)
.build())
.stream()
.map(IComponent::getId)
.collect(Collectors.toSet());
final Rf2RelationshipExporter statedRelationshipExporter = new Rf2RelationshipExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
ImmutableSet.of(Concepts.STATED_RELATIONSHIP));
final Rf2RelationshipExporter relationshipExporter = new Rf2RelationshipExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
characteristicTypes);
statedRelationshipExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
relationshipExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private void exportCombinedRefSets(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Collection<String> languageCodes,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Multimap<SnomedRefSetType, SnomedConcept> referenceSetsByType = FluentIterable.from(getIdentifierConcepts(context, getBranchOrRangeTarget(branch)))
.index(c -> c.getReferenceSet().getType());
// Create single exporter instance for each reference set type
for (final SnomedRefSetType refSetType : referenceSetsByType.keySet()) {
// We will handle language reference sets separately
if (SnomedRefSetType.LANGUAGE.equals(refSetType)) {
continue;
}
final Rf2RefSetExporter refSetExporter = new Rf2RefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
refSetExportLayout,
refSetType,
referenceSetsByType.get(refSetType));
refSetExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
exportLanguageRefSets(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
referenceSetsByType.get(SnomedRefSetType.LANGUAGE),
visitedComponentEffectiveTimes);
}
private void exportIndividualRefSets(final Path releaseDirectory,
final RepositoryContext context,
final String revisionRange,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Collection<String> languageCodes,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Multimap<SnomedRefSetType, SnomedConcept> referenceSetsByType = FluentIterable.from(getIdentifierConcepts(context, getBranchOrRangeTarget(revisionRange)))
.index(c -> c.getReferenceSet().getType());
/*
* Create single exporter instance for each reference set type - reference set concept
* pair (so effectively one for each reference set)
*/
for (final Entry<SnomedRefSetType, SnomedConcept> entry : referenceSetsByType.entries()) {
// We will handle language reference sets separately
if (SnomedRefSetType.LANGUAGE.equals(entry.getKey())) {
continue;
}
final Rf2RefSetExporter refSetExporter = new Rf2RefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
refSetExportLayout,
entry.getKey(),
ImmutableSet.of(entry.getValue()));
refSetExporter.exportBranch(releaseDirectory, context, revisionRange, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
exportLanguageRefSets(releaseDirectory,
context,
revisionRange,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
referenceSetsByType.get(SnomedRefSetType.LANGUAGE),
visitedComponentEffectiveTimes);
}
private void exportLanguageRefSets(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Collection<String> languageCodes,
final Collection<SnomedConcept> languageRefSets,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
if (languageRefSets.isEmpty()) {
return;
}
for (final String languageCode : languageCodes) {
final Rf2LanguageRefSetExporter languageExporter = new Rf2LanguageRefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
SnomedRefSetType.LANGUAGE,
languageRefSets,
languageCode);
languageExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
}
private List<SnomedConcept> getIdentifierConcepts(final RepositoryContext context, final String currentVersion) {
final Collection<String> refSetsToLoad;
if (refSets == null) {
// Retrieve all reference sets if refSets is null
final Request<BranchContext, SnomedReferenceSets> refSetRequest = SnomedRequests.prepareSearchRefSet()
.all()
.build();
final SnomedReferenceSets allReferenceSets = execute(context, currentVersion, refSetRequest);
refSetsToLoad = allReferenceSets.stream()
.map(r -> r.getId())
.collect(Collectors.toSet());
} else {
refSetsToLoad = refSets;
}
final SnomedConceptSearchRequestBuilder refSetRequestBuilder = SnomedRequests.prepareSearchConcept()
.all()
.filterByIds(refSetsToLoad)
.setExpand("pt(),referenceSet()")
.setLocales(locales());
final Request<BranchContext, SnomedConcepts> request = refSetRequestBuilder.build();
final SnomedConcepts referenceSets = execute(context, currentVersion, request);
// Return only the identifier concepts which have an existing reference set on this branch
return referenceSets.stream()
.filter(c -> c.getReferenceSet() != null)
.collect(Collectors.toList());
}
private void registerResult(final AttachmentRegistry fileRegistry, final UUID exportId, final Path exportDirectory) {
File archiveFile = null;
try {
archiveFile = exportDirectory.resolveSibling(exportDirectory.getFileName() + ".zip").toFile();
FileUtils.createZipArchive(exportDirectory.toFile(), archiveFile);
fileRegistry.upload(exportId, new FileInputStream(archiveFile)); // lgtm[java/input-resource-leak]
} catch (final IOException e) {
throw new SnowowlRuntimeException("Failed to register archive file from export directory.", e);
} finally {
if (archiveFile != null) {
archiveFile.delete();
}
}
}
private static long getCutoffBaseTimestamp(final RepositoryContext context, final Branch cutoffBranch, final String versionParentPath) {
System.err.println("SnomedRf2ExportRequest.getCutoffBaseTimestamp(): branch[" + cutoffBranch.path() + "], branchParentPath: [" + cutoffBranch.parentPath() + "], versionParentPath: [" + versionParentPath + "]");
if (cutoffBranch.path().equals(versionParentPath)) {
// We are on the working branch of the code system, all versions are visible for export
return Long.MAX_VALUE;
} else if (cutoffBranch.parentPath().equals(versionParentPath)) {
// We are on a direct child of the working branch, versions should be limited according to the base timestamp
return cutoffBranch.baseTimestamp();
} else {
// Two or more levels down from a working branch, look "upwards"
return getCutoffBaseTimestamp(context, getBranch(context, cutoffBranch.parentPath()), versionParentPath);
}
}
private static Collection<CodeSystemVersionEntry> getCodeSystemVersions(final RepositoryContext context, final String shortName) {
return CodeSystemRequests.prepareSearchCodeSystemVersion()
.all()
.filterByCodeSystemShortName(shortName)
.build()
.execute(context)
.getItems();
}
private static Branch getBranch(final RepositoryContext context, final String path) {
System.err.println("SnomedRf2ExportRequest.getBranch(): " + path);
return RepositoryRequests.branching()
.prepareGet(path)
.build()
.execute(context);
}
private static Branches getBranches(final RepositoryContext context, final String parent, final Collection<String> paths) {
System.err.println("SnomedRf2ExportRequest.getBranches(): " + parent + ", paths: " + paths + "");
return RepositoryRequests.branching()
.prepareSearch()
.all()
.filterByParent(parent)
.filterByName(paths)
.build()
.execute(context);
}
@Override
public String getOperation() {
return Permission.EXPORT;
}
}
|
snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/request/rf2/SnomedRf2ExportRequest.java
|
/*
* Copyright 2017-2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.datastore.request.rf2;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Sets.newHashSet;
import static com.google.common.collect.Sets.newTreeSet;
import static java.util.Collections.singleton;
import static java.util.stream.Collectors.toSet;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Instant;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.b2international.commons.FileUtils;
import com.b2international.index.revision.RevisionIndex;
import com.b2international.snowowl.core.api.SnowowlRuntimeException;
import com.b2international.snowowl.core.attachments.Attachment;
import com.b2international.snowowl.core.attachments.AttachmentRegistry;
import com.b2international.snowowl.core.authorization.BranchAccessControl;
import com.b2international.snowowl.core.branch.Branch;
import com.b2international.snowowl.core.branch.Branches;
import com.b2international.snowowl.core.codesystem.CodeSystemEntry;
import com.b2international.snowowl.core.codesystem.CodeSystemRequests;
import com.b2international.snowowl.core.codesystem.CodeSystemVersionEntry;
import com.b2international.snowowl.core.date.DateFormats;
import com.b2international.snowowl.core.date.Dates;
import com.b2international.snowowl.core.date.EffectiveTimes;
import com.b2international.snowowl.core.domain.BranchContext;
import com.b2international.snowowl.core.domain.IComponent;
import com.b2international.snowowl.core.domain.RepositoryContext;
import com.b2international.snowowl.core.events.Request;
import com.b2international.snowowl.core.identity.Permission;
import com.b2international.snowowl.core.repository.RepositoryCodeSystemProvider;
import com.b2international.snowowl.core.repository.RepositoryRequests;
import com.b2international.snowowl.core.request.BranchRequest;
import com.b2international.snowowl.core.request.ResourceRequest;
import com.b2international.snowowl.core.request.RevisionIndexReadRequest;
import com.b2international.snowowl.core.request.SearchResourceRequest.SortField;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.snomed.common.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.common.SnomedRf2Headers;
import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
import com.b2international.snowowl.snomed.core.domain.Rf2RefSetExportLayout;
import com.b2international.snowowl.snomed.core.domain.Rf2ReleaseType;
import com.b2international.snowowl.snomed.core.domain.SnomedConcept;
import com.b2international.snowowl.snomed.core.domain.SnomedConcepts;
import com.b2international.snowowl.snomed.core.domain.SnomedDescription;
import com.b2international.snowowl.snomed.core.domain.SnomedDescriptions;
import com.b2international.snowowl.snomed.core.domain.refset.SnomedRefSetType;
import com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSetMember;
import com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSets;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedDescriptionIndexEntry;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRelationshipIndexEntry;
import com.b2international.snowowl.snomed.datastore.request.SnomedConceptSearchRequestBuilder;
import com.b2international.snowowl.snomed.datastore.request.SnomedRefSetMemberSearchRequestBuilder;
import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2ConceptExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2DescriptionExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2LanguageRefSetExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2RefSetDescriptorRefSetExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2RefSetExporter;
import com.b2international.snowowl.snomed.datastore.request.rf2.exporter.Rf2RelationshipExporter;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Strings;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableList.Builder;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Ordering;
/**
* @since 5.7
*/
final class SnomedRf2ExportRequest extends ResourceRequest<BranchContext, Attachment> implements BranchAccessControl {
private static final String DESCRIPTION_TYPES_EXCEPT_TEXT_DEFINITION = "<<" + Concepts.DESCRIPTION_TYPE_ROOT_CONCEPT + " MINUS " + Concepts.TEXT_DEFINITION;
private static final String NON_STATED_CHARACTERISTIC_TYPES = "<<" + Concepts.CHARACTERISTIC_TYPE + " MINUS " + Concepts.STATED_RELATIONSHIP;
private static final long serialVersionUID = 1L;
private static final Ordering<CodeSystemVersionEntry> EFFECTIVE_DATE_ORDERING = Ordering.natural()
.onResultOf(CodeSystemVersionEntry::getEffectiveDate);
@JsonProperty
@NotNull
private Rf2ReleaseType releaseType;
@JsonProperty
@NotNull
private Rf2RefSetExportLayout refSetExportLayout;
@JsonProperty
@NotEmpty
private String countryNamespaceElement;
@JsonProperty
private String namespaceFilter;
@JsonProperty
private Long startEffectiveTime;
@JsonProperty
private Long endEffectiveTime;
@JsonProperty
private boolean includePreReleaseContent;
@JsonProperty
private Collection<String> componentTypes;
@JsonProperty
private Collection<String> modules;
@JsonProperty
private Collection<String> refSets;
@JsonProperty
private String transientEffectiveTime;
@JsonProperty
private boolean extensionOnly;
SnomedRf2ExportRequest() {}
void setReleaseType(final Rf2ReleaseType releaseType) {
this.releaseType = releaseType;
}
void setRefSetExportLayout(final Rf2RefSetExportLayout refSetExportLayout) {
this.refSetExportLayout = refSetExportLayout;
}
void setCountryNamespaceElement(final String countryNamespaceElement) {
this.countryNamespaceElement = countryNamespaceElement;
}
void setNamespaceFilter(final String namespaceFilter) {
this.namespaceFilter = namespaceFilter;
}
void setStartEffectiveTime(final Long startEffectiveTime) {
this.startEffectiveTime = startEffectiveTime;
}
void setEndEffectiveTime(final Long endEffectiveTime) {
this.endEffectiveTime = endEffectiveTime;
}
void setIncludePreReleaseContent(final boolean includeUnpublished) {
this.includePreReleaseContent = includeUnpublished;
}
void setComponentTypes(final Collection<String> componentTypes) {
/*
* All component types should be exported if the input value is null; no
* component type should be exported if the input value is an empty collection.
*/
this.componentTypes = (componentTypes != null)
? ImmutableSet.copyOf(componentTypes)
: ImmutableSet.of(SnomedTerminologyComponentConstants.CONCEPT,
SnomedTerminologyComponentConstants.DESCRIPTION,
SnomedTerminologyComponentConstants.RELATIONSHIP,
SnomedTerminologyComponentConstants.REFSET_MEMBER);
}
void setModules(final Collection<String> modules) {
/*
* All modules should be exported if the input value is null; no module
* should be exported if the input value is an empty collection.
*/
this.modules = (modules != null) ? ImmutableSet.copyOf(modules) : null;
}
void setRefSets(final Collection<String> refSets) {
/*
* All reference sets should be exported if the input value is null; no component
* should be exported if the input value is an empty collection.
*/
this.refSets = (refSets != null) ? ImmutableSet.copyOf(refSets) : null;
}
void setTransientEffectiveTime(final String transientEffectiveTime) {
if (Strings.isNullOrEmpty(transientEffectiveTime)) {
// Effective time columns should be left blank
this.transientEffectiveTime = "";
} else if ("NOW".equals(transientEffectiveTime)) {
// Special flag indicating "today"
this.transientEffectiveTime = EffectiveTimes.format(Dates.todayGmt(), DateFormats.SHORT);
} else {
// Otherwise, it should be a valid short date
Dates.parse(transientEffectiveTime, DateFormats.SHORT);
this.transientEffectiveTime = transientEffectiveTime;
}
}
void setExtensionOnly(final boolean extensionOnly) {
this.extensionOnly = extensionOnly;
}
@Override
public Attachment execute(final BranchContext context) {
final String referenceBranch = context.path();
// register export start time for later use
final long exportStartTime = Instant.now().toEpochMilli();
// Step 1: check if the export reference branch is a working branch path descendant
final CodeSystemEntry referenceCodeSystem = context.service(RepositoryCodeSystemProvider.class).get(referenceBranch);
// Step 2: retrieve code system versions that are visible from the reference branch
final TreeSet<CodeSystemVersionEntry> versionsToExport = getAllExportableCodeSystemVersions(context, referenceCodeSystem);
// Step 3: compute branches to export
final List<String> branchesToExport = computeBranchesToExport(referenceBranch, versionsToExport);
// Step 4: compute possible language codes
Multimap<String, String> availableLanguageCodes = getLanguageCodes(context, branchesToExport);
Path exportDirectory = null;
try {
final UUID exportId = UUID.randomUUID();
// create temporary export directory
exportDirectory = createExportDirectory(exportId);
// get archive effective time based on latest version effective / transient effective time / current date
final Date archiveEffectiveDate = getArchiveEffectiveTime(context, versionsToExport);
final String archiveEffectiveDateShort = Dates.format(archiveEffectiveDate, TimeZone.getTimeZone("UTC"), DateFormats.SHORT);
// create main folder including release status and archive effective date
final Path releaseDirectory = createReleaseDirectory(exportDirectory, archiveEffectiveDate);
final Set<String> visitedComponentEffectiveTimes = newHashSet();
final long effectiveTimeStart = startEffectiveTime != null ? startEffectiveTime : 0;
final long effectiveTimeEnd = endEffectiveTime != null ? endEffectiveTime : Long.MAX_VALUE;
// export content from the pre-computed version branches
for (String branch : branchesToExport) {
exportBranch(releaseDirectory,
context,
branch,
archiveEffectiveDateShort,
effectiveTimeStart,
effectiveTimeEnd,
visitedComponentEffectiveTimes,
availableLanguageCodes.get(branch));
}
// export content from reference branch
if (includePreReleaseContent) {
final String referenceBranchToExport = String.format("%s%s%s", referenceBranch, RevisionIndex.AT_CHAR, exportStartTime);
exportBranch(releaseDirectory,
context,
referenceBranchToExport,
archiveEffectiveDateShort,
EffectiveTimes.UNSET_EFFECTIVE_TIME,
EffectiveTimes.UNSET_EFFECTIVE_TIME,
visitedComponentEffectiveTimes,
availableLanguageCodes.get(referenceBranch));
}
// Step 6: compress to archive and upload to the file registry
final AttachmentRegistry fileRegistry = context.service(AttachmentRegistry.class);
registerResult(fileRegistry, exportId, exportDirectory);
final String fileName = releaseDirectory.getFileName() + ".zip";
return new Attachment(exportId, fileName);
} catch (final Exception e) {
throw new SnowowlRuntimeException("Failed to export terminology content to RF2.", e);
} finally {
if (exportDirectory != null) {
FileUtils.deleteDirectory(exportDirectory.toFile());
}
}
}
private Multimap<String, String> getLanguageCodes(BranchContext context, List<String> branchesToExport) {
final String referenceBranch = context.path();
List<String> branchesOrRanges = newArrayList(branchesToExport);
if (includePreReleaseContent) {
branchesOrRanges.add(referenceBranch);
}
Multimap<String, String> branchToLanguageCodes = HashMultimap.create();
Set<String> filteredLanguageCodes = Stream.of(Locale.getISOLanguages())
.filter(code -> !Locale.ENGLISH.getLanguage().equals(code))
.collect(toSet());
for (String branchOrRange : branchesOrRanges) {
String branch = getBranchOrRangeTarget(branchOrRange);
final Set<String> languageCodes = newHashSet();
// check if there are any english terms on the given branch / range
final Request<BranchContext, SnomedDescriptions> englishLanguageCodeRequest = SnomedRequests.prepareSearchDescription()
.setLimit(0)
.filterByLanguageCodes(singleton(Locale.ENGLISH.getLanguage()))
.build();
final SnomedDescriptions enDescriptions = execute(context, branch, englishLanguageCodeRequest);
if (enDescriptions.getTotal() > 0) {
languageCodes.add(Locale.ENGLISH.getLanguage());
}
// check if there are any terms other than english on the given branch / range
final Request<BranchContext, SnomedDescriptions> languageCodeRequest = SnomedRequests.prepareSearchDescription()
.all()
.filterByLanguageCodes(filteredLanguageCodes)
.setFields(SnomedRf2Headers.FIELD_ID, SnomedRf2Headers.FIELD_LANGUAGE_CODE)
.build();
final SnomedDescriptions descriptions = execute(context, branch, languageCodeRequest);
if (!descriptions.isEmpty()) {
languageCodes.addAll(descriptions.stream().map(SnomedDescription::getLanguageCode).collect(toSet()));
}
branchToLanguageCodes.putAll(branchOrRange, languageCodes);
}
return branchToLanguageCodes;
}
private List<String> computeBranchesToExport(final String referenceBranch, final TreeSet<CodeSystemVersionEntry> versionsToExport) {
final List<String> branchesToExport = newArrayList();
switch (releaseType) {
case FULL:
versionsToExport.stream()
.map(v -> v.getPath())
.filter(v -> !branchesToExport.contains(v))
.forEachOrdered(branchesToExport::add);
if (!branchesToExport.contains(referenceBranch)) {
branchesToExport.add(referenceBranch);
}
break;
case DELTA:
if (startEffectiveTime != null || endEffectiveTime != null || !includePreReleaseContent) {
versionsToExport.stream()
.map(v -> v.getPath())
.filter(v -> !branchesToExport.contains(v))
.forEachOrdered(branchesToExport::add);
if (!branchesToExport.contains(referenceBranch)) {
branchesToExport.add(referenceBranch);
}
}
break;
case SNAPSHOT:
branchesToExport.add(referenceBranch);
break;
}
Builder<String> branchRangesToExport = ImmutableList.builder();
for (int i = 0; i < branchesToExport.size(); i++) {
final String previousVersion = i == 0 ? null : branchesToExport.get(i - 1);
final String currentVersion = branchesToExport.get(i);
branchRangesToExport.add(previousVersion == null ? currentVersion : RevisionIndex.toRevisionRange(previousVersion, currentVersion));
}
return branchRangesToExport.build();
}
private Date getArchiveEffectiveTime(final RepositoryContext context, final TreeSet<CodeSystemVersionEntry> versionsToExport) {
Optional<CodeSystemVersionEntry> lastVersionToExport;
if (endEffectiveTime != null) {
lastVersionToExport = Optional.ofNullable(getVersionBefore(versionsToExport, endEffectiveTime));
} else {
lastVersionToExport = !versionsToExport.isEmpty() ? Optional.ofNullable(versionsToExport.last()) : Optional.empty();
}
Optional<Date> latestModuleEffectiveTime = lastVersionToExport.flatMap(version -> getLatestModuleEffectiveTime(context, version));
if (includePreReleaseContent) {
if (!transientEffectiveTime.isEmpty()) {
return adjustCurrentHour(Dates.parse(transientEffectiveTime, DateFormats.SHORT));
} else if (latestModuleEffectiveTime.isPresent()) {
return adjustCurrentHour(getNextEffectiveDate(latestModuleEffectiveTime.get().getTime()));
} else if (lastVersionToExport.isPresent()) {
return adjustCurrentHour(getNextEffectiveDate(lastVersionToExport.get().getEffectiveDate()));
}
} else {
if (latestModuleEffectiveTime.isPresent()) {
return adjustCurrentHour(new Date(latestModuleEffectiveTime.get().getTime()));
} else if (lastVersionToExport.isPresent()) {
return adjustCurrentHour(new Date(lastVersionToExport.get().getEffectiveDate()));
}
}
return adjustCurrentHour(Dates.parse(Dates.format(new Date(), TimeZone.getTimeZone("UTC"), DateFormats.DEFAULT)));
}
private CodeSystemVersionEntry getVersionBefore(final TreeSet<CodeSystemVersionEntry> versionsToExport, final long timestamp) {
CodeSystemVersionEntry versionBeforeEndEffectiveTime = null;
for (CodeSystemVersionEntry version : versionsToExport) {
if (version.getEffectiveDate() > timestamp) {
break;
}
versionBeforeEndEffectiveTime = version;
}
return versionBeforeEndEffectiveTime;
}
private Optional<Date> getLatestModuleEffectiveTime(final RepositoryContext context, final CodeSystemVersionEntry version) {
final Optional<Date> sourceEffectiveTime = getLatestModuleEffectiveTime(context, version, SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME);
final Optional<Date> targetEffectiveTime = getLatestModuleEffectiveTime(context, version, SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME);
if (!sourceEffectiveTime.isPresent() && !targetEffectiveTime.isPresent()) {
return Optional.empty();
} else if(!sourceEffectiveTime.isPresent()) {
return targetEffectiveTime;
} else if (!targetEffectiveTime.isPresent()) {
return sourceEffectiveTime;
} else {
final Date sourceDate = sourceEffectiveTime.get();
final Date targetDate = targetEffectiveTime.get();
if (sourceDate.after(targetDate)) {
return sourceEffectiveTime;
}
if (targetDate.after(sourceDate)) {
return targetEffectiveTime;
}
// they are the same date
return sourceEffectiveTime;
}
}
private Optional<Date> getLatestModuleEffectiveTime(final RepositoryContext context, final CodeSystemVersionEntry version, String field) {
SnomedRefSetMemberSearchRequestBuilder requestBuilder = SnomedRequests.prepareSearchMember()
.filterByRefSet(Concepts.REFSET_MODULE_DEPENDENCY_TYPE)
.filterByActive(true)
.sortBy(SortField.descending(field))
.setLimit(1);
// See the comment in setModules; a value of "null" means that all modules should be exported
if (modules != null) {
requestBuilder.filterByModules(modules);
}
final Optional<SnomedReferenceSetMember> moduleDependencyMember = requestBuilder
.build(context.id(), version.getPath())
.execute(context.service(IEventBus.class))
.getSync(1, TimeUnit.MINUTES)
.first();
return moduleDependencyMember.map(m -> {
return (Date) m.getProperties().get(field);
});
}
private Date adjustCurrentHour(final Date effectiveDate) {
final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
int currentHour = calendar.get(Calendar.HOUR_OF_DAY);
calendar.setTimeInMillis(effectiveDate.getTime());
calendar.set(Calendar.HOUR_OF_DAY, currentHour);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTime();
}
private Date getNextEffectiveDate(final long time) {
final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
calendar.setTimeInMillis(time);
calendar.roll(Calendar.DATE, true);
return calendar.getTime();
}
private TreeSet<CodeSystemVersionEntry> getAllExportableCodeSystemVersions(final BranchContext context, final CodeSystemEntry codeSystemEntry) {
final String referenceBranch = context.path();
final TreeSet<CodeSystemVersionEntry> visibleVersions = newTreeSet(EFFECTIVE_DATE_ORDERING);
collectExportableCodeSystemVersions(context, visibleVersions, codeSystemEntry, referenceBranch);
return visibleVersions;
}
private void collectExportableCodeSystemVersions(final RepositoryContext context, final Set<CodeSystemVersionEntry> versionsToExport, final CodeSystemEntry codeSystemEntry,
final String referenceBranch) {
final Collection<CodeSystemVersionEntry> candidateVersions = newArrayList(getCodeSystemVersions(context, codeSystemEntry.getShortName()));
if (candidateVersions.isEmpty()) {
return;
}
final String versionParentPath = candidateVersions.stream()
.map(CodeSystemVersionEntry::getParentBranchPath)
.findFirst()
.get();
final Set<String> versionNames = candidateVersions.stream()
.map(CodeSystemVersionEntry::getVersionId)
.collect(Collectors.toSet());
final Branches versionBranches = getBranches(context, versionParentPath, versionNames);
final Map<String, Branch> versionBranchesByName = Maps.uniqueIndex(versionBranches, Branch::name);
final Branch cutoffBranch = getBranch(context, referenceBranch);
final long cutoffBaseTimestamp = getCutoffBaseTimestamp(context, cutoffBranch, versionParentPath);
// Remove all code system versions which were created after the cut-off date, or don't have a corresponding branch
candidateVersions.removeIf(v -> false
|| !versionBranchesByName.containsKey(v.getVersionId())
|| versionBranchesByName.get(v.getVersionId()).baseTimestamp() > cutoffBaseTimestamp);
versionsToExport.addAll(candidateVersions);
// Exit early if only an extension code system should be exported, or we are already at the "base" code system
if (extensionOnly || Strings.isNullOrEmpty(codeSystemEntry.getExtensionOf())) {
return;
}
// Otherwise, collect applicable versions using this code system's working path
final CodeSystemEntry extensionEnty = CodeSystemRequests.getCodeSystem(context, codeSystemEntry.getExtensionOf());
collectExportableCodeSystemVersions(context, versionsToExport, extensionEnty, codeSystemEntry.getBranchPath());
}
private Path createExportDirectory(final UUID exportId) {
try {
return Files.createTempDirectory("export-" + exportId + "-");
} catch (final IOException e) {
throw new SnowowlRuntimeException("Failed to create working directory for export.", e);
}
}
private Path createReleaseDirectory(final Path exportDirectory, final Date archiveEffectiveTime) {
final String releaseStatus = includePreReleaseContent ? "BETA" : "PRODUCTION";
String effectiveDate = Dates.format(archiveEffectiveTime, TimeZone.getTimeZone("UTC"), DateFormats.ISO_8601_UTC);
final Path releaseDirectory = exportDirectory.resolve(String.format("SNOMEDCT_RF2_%s_%s", releaseStatus, effectiveDate));
try {
Files.createDirectories(releaseDirectory);
} catch (final IOException e) {
throw new SnowowlRuntimeException("Failed to create RF2 release directory for export.", e);
}
return releaseDirectory;
}
private void exportBranch(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Set<String> visitedComponentEffectiveTimes,
final Collection<String> languageCodes) throws IOException {
for (final String componentToExport : componentTypes) {
switch (componentToExport) {
case SnomedTerminologyComponentConstants.CONCEPT:
exportConcepts(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
visitedComponentEffectiveTimes);
break;
case SnomedTerminologyComponentConstants.DESCRIPTION:
for (final String languageCode : languageCodes) {
exportDescriptions(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCode,
visitedComponentEffectiveTimes);
}
break;
case SnomedTerminologyComponentConstants.RELATIONSHIP:
exportRelationships(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
visitedComponentEffectiveTimes);
break;
case SnomedTerminologyComponentConstants.REFSET_MEMBER:
if (Rf2RefSetExportLayout.COMBINED.equals(refSetExportLayout)) {
exportCombinedRefSets(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
visitedComponentEffectiveTimes);
} else {
exportIndividualRefSets(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
visitedComponentEffectiveTimes);
}
break;
default:
throw new IllegalStateException("Component type '" + componentToExport + "' can not be exported.");
}
}
if (Boolean.valueOf(System.getProperty("so.snomed.refsetdescriptor_preview", "false"))) {
exportRefSetDescriptor(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
visitedComponentEffectiveTimes);
}
}
private void exportRefSetDescriptor(Path releaseDirectory,
RepositoryContext context,
String branch,
String archiveEffectiveTime,
long effectiveTimeFilterStart,
long effectiveTimeFilterEnd,
Collection<String> languageCodes,
Set<String> visitedComponentEffectiveTimes) throws IOException {
final Rf2RefSetDescriptorRefSetExporter exporter = new Rf2RefSetDescriptorRefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules);
exporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private void exportConcepts(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Rf2ConceptExporter conceptExporter = new Rf2ConceptExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
includePreReleaseContent,
modules);
conceptExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private void exportDescriptions(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final String languageCode,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Set<String> descriptionTypes = execute(context, getBranchOrRangeTarget(branch), SnomedRequests.prepareSearchConcept()
.all()
.filterByEcl(DESCRIPTION_TYPES_EXCEPT_TEXT_DEFINITION)
.setFields(SnomedDescriptionIndexEntry.Fields.ID)
.build())
.stream()
.map(IComponent::getId)
.collect(Collectors.toSet());
final Rf2DescriptionExporter descriptionExporter = new Rf2DescriptionExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
descriptionTypes,
languageCode);
final Rf2DescriptionExporter textDefinitionExporter = new Rf2DescriptionExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
ImmutableSet.of(Concepts.TEXT_DEFINITION),
languageCode);
descriptionExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
textDefinitionExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private String getBranchOrRangeTarget(final String branch) {
return RevisionIndex.isRevRangePath(branch) ? RevisionIndex.getRevisionRangePaths(branch)[1] : branch;
}
private <R> R execute(RepositoryContext context, String branch, Request<BranchContext, R> next) {
return new BranchRequest<>(branch, new RevisionIndexReadRequest<>(next)).execute(context);
}
private void exportRelationships(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Set<String> characteristicTypes = execute(context, getBranchOrRangeTarget(branch), SnomedRequests.prepareSearchConcept()
.all()
.filterByEcl(NON_STATED_CHARACTERISTIC_TYPES)
.setFields(SnomedRelationshipIndexEntry.Fields.ID)
.build())
.stream()
.map(IComponent::getId)
.collect(Collectors.toSet());
final Rf2RelationshipExporter statedRelationshipExporter = new Rf2RelationshipExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
ImmutableSet.of(Concepts.STATED_RELATIONSHIP));
final Rf2RelationshipExporter relationshipExporter = new Rf2RelationshipExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
characteristicTypes);
statedRelationshipExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
relationshipExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
private void exportCombinedRefSets(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Collection<String> languageCodes,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Multimap<SnomedRefSetType, SnomedConcept> referenceSetsByType = FluentIterable.from(getIdentifierConcepts(context, getBranchOrRangeTarget(branch)))
.index(c -> c.getReferenceSet().getType());
// Create single exporter instance for each reference set type
for (final SnomedRefSetType refSetType : referenceSetsByType.keySet()) {
// We will handle language reference sets separately
if (SnomedRefSetType.LANGUAGE.equals(refSetType)) {
continue;
}
final Rf2RefSetExporter refSetExporter = new Rf2RefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
refSetExportLayout,
refSetType,
referenceSetsByType.get(refSetType));
refSetExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
exportLanguageRefSets(releaseDirectory,
context,
branch,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
referenceSetsByType.get(SnomedRefSetType.LANGUAGE),
visitedComponentEffectiveTimes);
}
private void exportIndividualRefSets(final Path releaseDirectory,
final RepositoryContext context,
final String revisionRange,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Collection<String> languageCodes,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
final Multimap<SnomedRefSetType, SnomedConcept> referenceSetsByType = FluentIterable.from(getIdentifierConcepts(context, getBranchOrRangeTarget(revisionRange)))
.index(c -> c.getReferenceSet().getType());
/*
* Create single exporter instance for each reference set type - reference set concept
* pair (so effectively one for each reference set)
*/
for (final Entry<SnomedRefSetType, SnomedConcept> entry : referenceSetsByType.entries()) {
// We will handle language reference sets separately
if (SnomedRefSetType.LANGUAGE.equals(entry.getKey())) {
continue;
}
final Rf2RefSetExporter refSetExporter = new Rf2RefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
refSetExportLayout,
entry.getKey(),
ImmutableSet.of(entry.getValue()));
refSetExporter.exportBranch(releaseDirectory, context, revisionRange, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
exportLanguageRefSets(releaseDirectory,
context,
revisionRange,
archiveEffectiveTime,
effectiveTimeFilterStart,
effectiveTimeFilterEnd,
languageCodes,
referenceSetsByType.get(SnomedRefSetType.LANGUAGE),
visitedComponentEffectiveTimes);
}
private void exportLanguageRefSets(final Path releaseDirectory,
final RepositoryContext context,
final String branch,
final String archiveEffectiveTime,
final long effectiveTimeFilterStart,
final long effectiveTimeFilterEnd,
final Collection<String> languageCodes,
final Collection<SnomedConcept> languageRefSets,
final Set<String> visitedComponentEffectiveTimes) throws IOException {
if (languageRefSets.isEmpty()) {
return;
}
for (final String languageCode : languageCodes) {
final Rf2LanguageRefSetExporter languageExporter = new Rf2LanguageRefSetExporter(releaseType,
countryNamespaceElement,
namespaceFilter,
transientEffectiveTime,
archiveEffectiveTime,
modules,
SnomedRefSetType.LANGUAGE,
languageRefSets,
languageCode);
languageExporter.exportBranch(releaseDirectory, context, branch, effectiveTimeFilterStart, effectiveTimeFilterEnd, visitedComponentEffectiveTimes);
}
}
private List<SnomedConcept> getIdentifierConcepts(final RepositoryContext context, final String currentVersion) {
final Collection<String> refSetsToLoad;
if (refSets == null) {
// Retrieve all reference sets if refSets is null
final Request<BranchContext, SnomedReferenceSets> refSetRequest = SnomedRequests.prepareSearchRefSet()
.all()
.build();
final SnomedReferenceSets allReferenceSets = execute(context, currentVersion, refSetRequest);
refSetsToLoad = allReferenceSets.stream()
.map(r -> r.getId())
.collect(Collectors.toSet());
} else {
refSetsToLoad = refSets;
}
final SnomedConceptSearchRequestBuilder refSetRequestBuilder = SnomedRequests.prepareSearchConcept()
.all()
.filterByIds(refSetsToLoad)
.setExpand("pt(),referenceSet()")
.setLocales(locales());
final Request<BranchContext, SnomedConcepts> request = refSetRequestBuilder.build();
final SnomedConcepts referenceSets = execute(context, currentVersion, request);
// Return only the identifier concepts which have an existing reference set on this branch
return referenceSets.stream()
.filter(c -> c.getReferenceSet() != null)
.collect(Collectors.toList());
}
private void registerResult(final AttachmentRegistry fileRegistry, final UUID exportId, final Path exportDirectory) {
File archiveFile = null;
try {
archiveFile = exportDirectory.resolveSibling(exportDirectory.getFileName() + ".zip").toFile();
FileUtils.createZipArchive(exportDirectory.toFile(), archiveFile);
fileRegistry.upload(exportId, new FileInputStream(archiveFile)); // lgtm[java/input-resource-leak]
} catch (final IOException e) {
throw new SnowowlRuntimeException("Failed to register archive file from export directory.", e);
} finally {
if (archiveFile != null) {
archiveFile.delete();
}
}
}
private static long getCutoffBaseTimestamp(final RepositoryContext context, final Branch cutoffBranch, final String versionParentPath) {
System.err.println("SnomedRf2ExportRequest.getCutoffBaseTimestamp(): branch[" + cutoffBranch.path() + "], branchParentPath: [" + cutoffBranch.parentPath() + "], versionParentPath: [" + versionParentPath + "]");
if (cutoffBranch.path().equals(versionParentPath)) {
// We are on the working branch of the code system, all versions are visible for export
return Long.MAX_VALUE;
} else if (cutoffBranch.parentPath().equals(versionParentPath)) {
// We are on a direct child of the working branch, versions should be limited according to the base timestamp
return cutoffBranch.baseTimestamp();
} else {
// Two or more levels down from a working branch, look "upwards"
return getCutoffBaseTimestamp(context, getBranch(context, cutoffBranch.parentPath()), versionParentPath);
}
}
private static Collection<CodeSystemVersionEntry> getCodeSystemVersions(final RepositoryContext context, final String shortName) {
return CodeSystemRequests.prepareSearchCodeSystemVersion()
.all()
.filterByCodeSystemShortName(shortName)
.build()
.execute(context)
.getItems();
}
private static Branch getBranch(final RepositoryContext context, final String path) {
System.err.println("SnomedRf2ExportRequest.getBranch(): " + path);
return RepositoryRequests.branching()
.prepareGet(path)
.build()
.execute(context);
}
private static Branches getBranches(final RepositoryContext context, final String parent, final Collection<String> paths) {
return RepositoryRequests.branching()
.prepareSearch()
.all()
.filterByParent(parent)
.filterByName(paths)
.build()
.execute(context);
}
@Override
public String getOperation() {
return Permission.EXPORT;
}
}
|
[export] trigger another build by adding more systrace
|
snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/request/rf2/SnomedRf2ExportRequest.java
|
[export] trigger another build by adding more systrace
|
|
Java
|
apache-2.0
|
9d6a573a91407a1be9fe1c74d6b0d6c95ad22a0e
| 0
|
hyracoidea/hyrax-backend,hyracoidea/hyrax-backend
|
package com.hyrax.spring.boot.starter.authentication.rest.service.impl;
import com.hyrax.spring.boot.starter.authentication.model.SecuredAccountDetails;
import com.hyrax.spring.boot.starter.authentication.rest.client.SecuredAccountRESTClient;
import com.hyrax.spring.boot.starter.authentication.rest.service.SecuredAccountRESTService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.Response;
import java.util.Objects;
import java.util.Optional;
public class SecuredAccountRESTServiceImpl implements SecuredAccountRESTService {
private static final Logger LOGGER = LoggerFactory.getLogger(SecuredAccountRESTServiceImpl.class);
private final SecuredAccountRESTClient securedAccountRESTClient;
public SecuredAccountRESTServiceImpl(final SecuredAccountRESTClient securedAccountRESTClient) {
this.securedAccountRESTClient = securedAccountRESTClient;
}
@Override
public Optional<SecuredAccountDetails> retrieveSecuredAccount(final String username) {
SecuredAccountDetails result = null;
Response response = null;
try {
response = securedAccountRESTClient.callRetrieveSecuredAccountEndpoint(username);
result = processResponse(response);
} catch (final Exception e) {
LOGGER.error("Unexpected exception happened under the REST call", e);
} finally {
if (Objects.nonNull(response)) {
response.close();
}
}
return Optional.ofNullable(result);
}
private SecuredAccountDetails processResponse(final Response response) {
SecuredAccountDetails result = null;
switch (response.getStatusInfo().getFamily()) {
case SUCCESSFUL:
result = response.readEntity(SecuredAccountDetails.class);
break;
case CLIENT_ERROR:
response.readEntity(String.class);
break;
case SERVER_ERROR:
response.readEntity(String.class);
break;
default:
response.readEntity(String.class);
break;
}
return result;
}
}
|
spring-boot-starter/authentication/authentication-service/src/main/java/com/hyrax/spring/boot/starter/authentication/rest/service/impl/SecuredAccountRESTServiceImpl.java
|
package com.hyrax.spring.boot.starter.authentication.rest.service.impl;
import com.hyrax.spring.boot.starter.authentication.model.SecuredAccountDetails;
import com.hyrax.spring.boot.starter.authentication.rest.client.SecuredAccountRESTClient;
import com.hyrax.spring.boot.starter.authentication.rest.service.SecuredAccountRESTService;
import javax.ws.rs.core.Response;
import java.util.Objects;
import java.util.Optional;
public class SecuredAccountRESTServiceImpl implements SecuredAccountRESTService {
private final SecuredAccountRESTClient securedAccountRESTClient;
public SecuredAccountRESTServiceImpl(final SecuredAccountRESTClient securedAccountRESTClient) {
this.securedAccountRESTClient = securedAccountRESTClient;
}
@Override
public Optional<SecuredAccountDetails> retrieveSecuredAccount(final String username) {
SecuredAccountDetails result = null;
Response response = null;
try {
response = securedAccountRESTClient.callRetrieveSecuredAccountEndpoint(username);
result = processResponse(response);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (Objects.nonNull(response)) {
response.close();
}
}
return Optional.ofNullable(result);
}
private SecuredAccountDetails processResponse(final Response response) {
SecuredAccountDetails result = null;
switch (response.getStatusInfo().getFamily()) {
case SUCCESSFUL:
result = response.readEntity(SecuredAccountDetails.class);
break;
case CLIENT_ERROR:
response.readEntity(String.class);
break;
case SERVER_ERROR:
response.readEntity(String.class);
break;
default:
response.readEntity(String.class);
break;
}
return result;
}
}
|
#36 - Added Logger to the SecuredAccountRESTServiceImpl
|
spring-boot-starter/authentication/authentication-service/src/main/java/com/hyrax/spring/boot/starter/authentication/rest/service/impl/SecuredAccountRESTServiceImpl.java
|
#36 - Added Logger to the SecuredAccountRESTServiceImpl
|
|
Java
|
apache-2.0
|
3b649327e7c62f9e718f52104aae36bc8672002f
| 0
|
leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.shardingjdbc.spring.namespace.parser;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import org.apache.shardingsphere.api.config.encryptor.EncryptRuleConfiguration;
import org.apache.shardingsphere.api.config.encryptor.EncryptorRuleConfiguration;
import org.apache.shardingsphere.api.config.masterslave.MasterSlaveRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.shardingjdbc.spring.datasource.SpringShardingDataSource;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.EncryptDataSourceBeanDefinitionParserTag;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.EncryptorRuleBeanDefinitionParserTag;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.MasterSlaveDataSourceBeanDefinitionParserTag;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.ShardingDataSourceBeanDefinitionParserTag;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Sharding data source parser for spring namespace.
*
* @author caohao
* @author panjuan
*/
public final class ShardingDataSourceBeanDefinitionParser extends AbstractBeanDefinitionParser {
@Override
protected AbstractBeanDefinition parseInternal(final Element element, final ParserContext parserContext) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(SpringShardingDataSource.class);
factory.addConstructorArgValue(parseDataSources(element));
factory.addConstructorArgValue(parseShardingRuleConfiguration(element));
factory.addConstructorArgValue(parseProperties(element, parserContext));
factory.setDestroyMethodName("close");
return factory.getBeanDefinition();
}
private Map<String, RuntimeBeanReference> parseDataSources(final Element element) {
Element shardingRuleElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.SHARDING_RULE_CONFIG_TAG);
List<String> dataSources = Splitter.on(",").trimResults().splitToList(shardingRuleElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DATA_SOURCE_NAMES_TAG));
Map<String, RuntimeBeanReference> result = new ManagedMap<>(dataSources.size());
for (String each : dataSources) {
result.put(each, new RuntimeBeanReference(each));
}
return result;
}
private BeanDefinition parseShardingRuleConfiguration(final Element element) {
Element shardingRuleElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.SHARDING_RULE_CONFIG_TAG);
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(ShardingRuleConfiguration.class);
parseDefaultDataSource(factory, shardingRuleElement);
parseDefaultDatabaseShardingStrategy(factory, shardingRuleElement);
parseDefaultTableShardingStrategy(factory, shardingRuleElement);
factory.addPropertyValue("tableRuleConfigs", parseTableRulesConfiguration(shardingRuleElement));
factory.addPropertyValue("masterSlaveRuleConfigs", parseMasterSlaveRulesConfiguration(shardingRuleElement));
factory.addPropertyValue("bindingTableGroups", parseBindingTablesConfiguration(shardingRuleElement));
factory.addPropertyValue("broadcastTables", parseBroadcastTables(shardingRuleElement));
parseDefaultKeyGenerator(factory, shardingRuleElement);
parseEncryptRuleConfiguration(factory, shardingRuleElement);
return factory.getBeanDefinition();
}
private void parseDefaultKeyGenerator(final BeanDefinitionBuilder factory, final Element element) {
String defaultKeyGeneratorConfig = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_KEY_GENERATOR_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(defaultKeyGeneratorConfig)) {
factory.addPropertyReference("defaultKeyGeneratorConfig", defaultKeyGeneratorConfig);
}
}
private void parseDefaultDataSource(final BeanDefinitionBuilder factory, final Element element) {
String defaultDataSource = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_DATA_SOURCE_NAME_TAG);
if (!Strings.isNullOrEmpty(defaultDataSource)) {
factory.addPropertyValue("defaultDataSourceName", defaultDataSource);
}
}
private void parseDefaultDatabaseShardingStrategy(final BeanDefinitionBuilder factory, final Element element) {
String defaultDatabaseShardingStrategy = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_DATABASE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(defaultDatabaseShardingStrategy)) {
factory.addPropertyReference("defaultDatabaseShardingStrategyConfig", defaultDatabaseShardingStrategy);
}
}
private void parseDefaultTableShardingStrategy(final BeanDefinitionBuilder factory, final Element element) {
String defaultTableShardingStrategy = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_TABLE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(defaultTableShardingStrategy)) {
factory.addPropertyReference("defaultTableShardingStrategyConfig", defaultTableShardingStrategy);
}
}
private List<BeanDefinition> parseMasterSlaveRulesConfiguration(final Element element) {
Element masterSlaveRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.MASTER_SLAVE_RULES_TAG);
if (null == masterSlaveRulesElement) {
return new LinkedList<>();
}
List<Element> masterSlaveRuleElements = DomUtils.getChildElementsByTagName(masterSlaveRulesElement, ShardingDataSourceBeanDefinitionParserTag.MASTER_SLAVE_RULE_TAG);
List<BeanDefinition> result = new ManagedList<>(masterSlaveRuleElements.size());
for (Element each : masterSlaveRuleElements) {
result.add(parseMasterSlaveRuleConfiguration(each));
}
return result;
}
private BeanDefinition parseMasterSlaveRuleConfiguration(final Element masterSlaveElement) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(MasterSlaveRuleConfiguration.class);
factory.addConstructorArgValue(masterSlaveElement.getAttribute(ID_ATTRIBUTE));
factory.addConstructorArgValue(masterSlaveElement.getAttribute(MasterSlaveDataSourceBeanDefinitionParserTag.MASTER_DATA_SOURCE_NAME_ATTRIBUTE));
factory.addConstructorArgValue(parseSlaveDataSourcesRef(masterSlaveElement));
parseMasterSlaveRuleLoadBalanceAlgorithm(masterSlaveElement, factory);
return factory.getBeanDefinition();
}
private void parseMasterSlaveRuleLoadBalanceAlgorithm(final Element masterSlaveElement, final BeanDefinitionBuilder factory) {
// TODO process LOAD_BALANCE_ALGORITHM_REF_ATTRIBUTE
// String loadBalanceAlgorithmRef = masterSlaveElement.getAttribute(MasterSlaveDataSourceBeanDefinitionParserTag.LOAD_BALANCE_ALGORITHM_REF_ATTRIBUTE);
// if (!Strings.isNullOrEmpty(loadBalanceAlgorithmRef)) {
// factory.addConstructorArgReference(loadBalanceAlgorithmRef);
// } else {
// factory.addConstructorArgValue(new MasterSlaveLoadBalanceAlgorithmServiceLoader().newService());
// }
}
private Collection<String> parseSlaveDataSourcesRef(final Element element) {
List<String> slaveDataSources = Splitter.on(",").trimResults().splitToList(element.getAttribute(MasterSlaveDataSourceBeanDefinitionParserTag.SLAVE_DATA_SOURCE_NAMES_ATTRIBUTE));
Collection<String> result = new ManagedList<>(slaveDataSources.size());
result.addAll(slaveDataSources);
return result;
}
private void parseEncryptRuleConfiguration(final BeanDefinitionBuilder factory, final Element element) {
Element encryptRuleElement = DomUtils.getChildElementByTagName(element, EncryptDataSourceBeanDefinitionParserTag.ENCRYPT_RULE_CONFIG_TAG);
if (null != encryptRuleElement) {
factory.addPropertyValue("encryptRuleConfig", parseEncryptRuleConfiguration(encryptRuleElement));
}
}
private BeanDefinition parseEncryptRuleConfiguration(final Element element) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(EncryptRuleConfiguration.class);
factory.addConstructorArgValue(parseEncryptorRulesConfiguration(element));
return factory.getBeanDefinition();
}
private Map<String, BeanDefinition> parseEncryptorRulesConfiguration(final Element element) {
List<Element> encryptorRuleElements = DomUtils.getChildElementsByTagName(element, EncryptDataSourceBeanDefinitionParserTag.ENCRYPTOR_RULE_CONFIG_TAG);
Map<String, BeanDefinition> result = new ManagedMap<>(encryptorRuleElements.size());
for (Element each : encryptorRuleElements) {
result.put(each.getAttribute(ID_ATTRIBUTE), parseEncryptorRuleConfiguration(each));
}
return result;
}
private AbstractBeanDefinition parseEncryptorRuleConfiguration(final Element element) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(EncryptorRuleConfiguration.class);
factory.addConstructorArgValue(element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_TYPE_ATTRIBUTE));
factory.addConstructorArgValue(element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_QUALIFIED_COLUMNS_ATTRIBUTE));
parseAssistedQueryColumns(element, factory);
parseProperties(element, factory);
return factory.getBeanDefinition();
}
private void parseAssistedQueryColumns(final Element element, final BeanDefinitionBuilder factory) {
String assistedQueryColumns = element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_ASSISTED_QUERY_COLUMNS_ATTRIBUTE);
if (!Strings.isNullOrEmpty(assistedQueryColumns)) {
factory.addConstructorArgValue(assistedQueryColumns);
}
}
private void parseProperties(final Element element, final BeanDefinitionBuilder factory) {
String properties = element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_PROPERTY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(properties)) {
factory.addConstructorArgReference(properties);
} else {
factory.addConstructorArgValue(new Properties());
}
}
private List<BeanDefinition> parseTableRulesConfiguration(final Element element) {
Element tableRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.TABLE_RULES_TAG);
List<Element> tableRuleElements = DomUtils.getChildElementsByTagName(tableRulesElement, ShardingDataSourceBeanDefinitionParserTag.TABLE_RULE_TAG);
List<BeanDefinition> result = new ManagedList<>(tableRuleElements.size());
for (Element each : tableRuleElements) {
result.add(parseTableRuleConfiguration(each));
}
return result;
}
private BeanDefinition parseTableRuleConfiguration(final Element tableElement) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(TableRuleConfiguration.class);
factory.addConstructorArgValue(tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.LOGIC_TABLE_ATTRIBUTE));
parseActualDataNodes(tableElement, factory);
parseDatabaseShardingStrategyConfiguration(tableElement, factory);
parseTableShardingStrategyConfiguration(tableElement, factory);
parseKeyGeneratorConfiguration(tableElement, factory);
parseLogicIndex(tableElement, factory);
return factory.getBeanDefinition();
}
private void parseActualDataNodes(final Element tableElement, final BeanDefinitionBuilder factory) {
String actualDataNodes = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.ACTUAL_DATA_NODES_ATTRIBUTE);
if (!Strings.isNullOrEmpty(actualDataNodes)) {
factory.addConstructorArgValue(actualDataNodes);
}
}
private void parseDatabaseShardingStrategyConfiguration(final Element tableElement, final BeanDefinitionBuilder factory) {
String databaseStrategy = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DATABASE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(databaseStrategy)) {
factory.addPropertyReference("databaseShardingStrategyConfig", databaseStrategy);
}
}
private void parseTableShardingStrategyConfiguration(final Element tableElement, final BeanDefinitionBuilder factory) {
String tableStrategy = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.TABLE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(tableStrategy)) {
factory.addPropertyReference("tableShardingStrategyConfig", tableStrategy);
}
}
private void parseKeyGeneratorConfiguration(final Element tableElement, final BeanDefinitionBuilder factory) {
String keyGenerator = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.KEY_GENERATOR_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(keyGenerator)) {
factory.addPropertyReference("keyGeneratorConfig", keyGenerator);
}
}
private void parseLogicIndex(final Element tableElement, final BeanDefinitionBuilder factory) {
String logicIndex = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.LOGIC_INDEX);
if (!Strings.isNullOrEmpty(logicIndex)) {
factory.addPropertyValue("logicIndex", logicIndex);
}
}
private List<String> parseBindingTablesConfiguration(final Element element) {
Element bindingTableRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.BINDING_TABLE_RULES_TAG);
if (null == bindingTableRulesElement) {
return Collections.emptyList();
}
List<Element> bindingTableRuleElements = DomUtils.getChildElementsByTagName(bindingTableRulesElement, ShardingDataSourceBeanDefinitionParserTag.BINDING_TABLE_RULE_TAG);
List<String> result = new LinkedList<>();
for (Element each : bindingTableRuleElements) {
result.add(each.getAttribute(ShardingDataSourceBeanDefinitionParserTag.LOGIC_TABLES_ATTRIBUTE));
}
return result;
}
private List<String> parseBroadcastTables(final Element element) {
Element broadcastTableRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.BROADCAST_TABLE_RULES_TAG);
if (null == broadcastTableRulesElement) {
return Collections.emptyList();
}
List<Element> broadcastTableRuleElements = DomUtils.getChildElementsByTagName(broadcastTableRulesElement, ShardingDataSourceBeanDefinitionParserTag.BROADCAST_TABLE_RULE_TAG);
List<String> result = new LinkedList<>();
for (Element each : broadcastTableRuleElements) {
result.add(each.getAttribute(ShardingDataSourceBeanDefinitionParserTag.TABLE_ATTRIBUTE));
}
return result;
}
private Properties parseProperties(final Element element, final ParserContext parserContext) {
Element propsElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.PROPS_TAG);
return null == propsElement ? new Properties() : parserContext.getDelegate().parsePropsElement(propsElement);
}
}
|
sharding-spring/sharding-jdbc-spring/sharding-jdbc-spring-namespace/src/main/java/org/apache/shardingsphere/shardingjdbc/spring/namespace/parser/ShardingDataSourceBeanDefinitionParser.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.shardingjdbc.spring.namespace.parser;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import org.apache.shardingsphere.api.config.encryptor.EncryptRuleConfiguration;
import org.apache.shardingsphere.api.config.encryptor.EncryptorRuleConfiguration;
import org.apache.shardingsphere.api.config.masterslave.MasterSlaveRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.ShardingRuleConfiguration;
import org.apache.shardingsphere.api.config.sharding.TableRuleConfiguration;
import org.apache.shardingsphere.shardingjdbc.spring.datasource.SpringShardingDataSource;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.EncryptDataSourceBeanDefinitionParserTag;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.EncryptorRuleBeanDefinitionParserTag;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.MasterSlaveDataSourceBeanDefinitionParserTag;
import org.apache.shardingsphere.shardingjdbc.spring.namespace.constants.ShardingDataSourceBeanDefinitionParserTag;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.ManagedMap;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.util.xml.DomUtils;
import org.w3c.dom.Element;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Sharding data source parser for spring namespace.
*
* @author caohao
* @author panjuan
*/
public final class ShardingDataSourceBeanDefinitionParser extends AbstractBeanDefinitionParser {
@Override
protected AbstractBeanDefinition parseInternal(final Element element, final ParserContext parserContext) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(SpringShardingDataSource.class);
factory.addConstructorArgValue(parseDataSources(element));
factory.addConstructorArgValue(parseShardingRuleConfiguration(element));
factory.addConstructorArgValue(parseProperties(element, parserContext));
factory.setDestroyMethodName("close");
return factory.getBeanDefinition();
}
private Map<String, RuntimeBeanReference> parseDataSources(final Element element) {
Element shardingRuleElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.SHARDING_RULE_CONFIG_TAG);
List<String> dataSources = Splitter.on(",").trimResults().splitToList(shardingRuleElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DATA_SOURCE_NAMES_TAG));
Map<String, RuntimeBeanReference> result = new ManagedMap<>(dataSources.size());
for (String each : dataSources) {
result.put(each, new RuntimeBeanReference(each));
}
return result;
}
private BeanDefinition parseShardingRuleConfiguration(final Element element) {
Element shardingRuleElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.SHARDING_RULE_CONFIG_TAG);
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(ShardingRuleConfiguration.class);
parseDefaultDataSource(factory, shardingRuleElement);
parseDefaultDatabaseShardingStrategy(factory, shardingRuleElement);
parseDefaultTableShardingStrategy(factory, shardingRuleElement);
factory.addPropertyValue("tableRuleConfigs", parseTableRulesConfiguration(shardingRuleElement));
factory.addPropertyValue("masterSlaveRuleConfigs", parseMasterSlaveRulesConfiguration(shardingRuleElement));
factory.addPropertyValue("bindingTableGroups", parseBindingTablesConfiguration(shardingRuleElement));
factory.addPropertyValue("broadcastTables", parseBroadcastTables(shardingRuleElement));
parseDefaultKeyGenerator(factory, shardingRuleElement);
parseEncryptRuleConfiguration(factory, shardingRuleElement);
return factory.getBeanDefinition();
}
private void parseDefaultKeyGenerator(final BeanDefinitionBuilder factory, final Element element) {
String defaultKeyGeneratorConfig = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_KEY_GENERATOR_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(defaultKeyGeneratorConfig)) {
factory.addPropertyReference("defaultKeyGeneratorConfig", defaultKeyGeneratorConfig);
}
}
private void parseDefaultDataSource(final BeanDefinitionBuilder factory, final Element element) {
String defaultDataSource = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_DATA_SOURCE_NAME_TAG);
if (!Strings.isNullOrEmpty(defaultDataSource)) {
factory.addPropertyValue("defaultDataSourceName", defaultDataSource);
}
}
private void parseDefaultDatabaseShardingStrategy(final BeanDefinitionBuilder factory, final Element element) {
String defaultDatabaseShardingStrategy = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_DATABASE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(defaultDatabaseShardingStrategy)) {
factory.addPropertyReference("defaultDatabaseShardingStrategyConfig", defaultDatabaseShardingStrategy);
}
}
private void parseDefaultTableShardingStrategy(final BeanDefinitionBuilder factory, final Element element) {
String defaultTableShardingStrategy = element.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DEFAULT_TABLE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(defaultTableShardingStrategy)) {
factory.addPropertyReference("defaultTableShardingStrategyConfig", defaultTableShardingStrategy);
}
}
private List<BeanDefinition> parseMasterSlaveRulesConfiguration(final Element element) {
Element masterSlaveRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.MASTER_SLAVE_RULES_TAG);
if (null == masterSlaveRulesElement) {
return new LinkedList<>();
}
List<Element> masterSlaveRuleElements = DomUtils.getChildElementsByTagName(masterSlaveRulesElement, ShardingDataSourceBeanDefinitionParserTag.MASTER_SLAVE_RULE_TAG);
List<BeanDefinition> result = new ManagedList<>(masterSlaveRuleElements.size());
for (Element each : masterSlaveRuleElements) {
result.add(parseMasterSlaveRuleConfiguration(each));
}
return result;
}
private BeanDefinition parseMasterSlaveRuleConfiguration(final Element masterSlaveElement) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(MasterSlaveRuleConfiguration.class);
factory.addConstructorArgValue(masterSlaveElement.getAttribute(ID_ATTRIBUTE));
factory.addConstructorArgValue(masterSlaveElement.getAttribute(MasterSlaveDataSourceBeanDefinitionParserTag.MASTER_DATA_SOURCE_NAME_ATTRIBUTE));
factory.addConstructorArgValue(parseSlaveDataSourcesRef(masterSlaveElement));
parseMasterSlaveRuleLoadBalanceAlgorithm(masterSlaveElement, factory);
return factory.getBeanDefinition();
}
private void parseMasterSlaveRuleLoadBalanceAlgorithm(final Element masterSlaveElement, final BeanDefinitionBuilder factory) {
// TODO process LOAD_BALANCE_ALGORITHM_REF_ATTRIBUTE
// String loadBalanceAlgorithmRef = masterSlaveElement.getAttribute(MasterSlaveDataSourceBeanDefinitionParserTag.LOAD_BALANCE_ALGORITHM_REF_ATTRIBUTE);
// if (!Strings.isNullOrEmpty(loadBalanceAlgorithmRef)) {
// factory.addConstructorArgReference(loadBalanceAlgorithmRef);
// } else {
// factory.addConstructorArgValue(new MasterSlaveLoadBalanceAlgorithmServiceLoader().newService());
// }
}
private Collection<String> parseSlaveDataSourcesRef(final Element element) {
List<String> slaveDataSources = Splitter.on(",").trimResults().splitToList(element.getAttribute(MasterSlaveDataSourceBeanDefinitionParserTag.SLAVE_DATA_SOURCE_NAMES_ATTRIBUTE));
Collection<String> result = new ManagedList<>(slaveDataSources.size());
result.addAll(slaveDataSources);
return result;
}
private void parseEncryptRuleConfiguration(final BeanDefinitionBuilder factory, final Element element) {
Element encryptRuleElement = DomUtils.getChildElementByTagName(element, EncryptDataSourceBeanDefinitionParserTag.ENCRYPT_RULE_CONFIG_TAG);
if (null != encryptRuleElement) {
factory.addPropertyValue("encryptRuleConfig", parseEncryptRuleConfiguration(encryptRuleElement));
}
}
private BeanDefinition parseEncryptRuleConfiguration(final Element element) {
Element encryptRuleElement = DomUtils.getChildElementByTagName(element, EncryptDataSourceBeanDefinitionParserTag.ENCRYPT_RULE_CONFIG_TAG);
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(EncryptRuleConfiguration.class);
factory.addConstructorArgValue(parseEncryptorRulesConfiguration(encryptRuleElement));
return factory.getBeanDefinition();
}
private Map<String, BeanDefinition> parseEncryptorRulesConfiguration(final Element element) {
List<Element> encryptorRuleElements = DomUtils.getChildElementsByTagName(element, EncryptDataSourceBeanDefinitionParserTag.ENCRYPTOR_RULE_CONFIG_TAG);
Map<String, BeanDefinition> result = new ManagedMap<>(encryptorRuleElements.size());
for (Element each : encryptorRuleElements) {
result.put(each.getAttribute(ID_ATTRIBUTE), parseEncryptorRuleConfiguration(each));
}
return result;
}
private AbstractBeanDefinition parseEncryptorRuleConfiguration(final Element element) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(EncryptorRuleConfiguration.class);
factory.addConstructorArgValue(element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_TYPE_ATTRIBUTE));
factory.addConstructorArgValue(element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_QUALIFIED_COLUMNS_ATTRIBUTE));
parseAssistedQueryColumns(element, factory);
parseProperties(element, factory);
return factory.getBeanDefinition();
}
private void parseAssistedQueryColumns(final Element element, final BeanDefinitionBuilder factory) {
String assistedQueryColumns = element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_ASSISTED_QUERY_COLUMNS_ATTRIBUTE);
if (!Strings.isNullOrEmpty(assistedQueryColumns)) {
factory.addConstructorArgValue(assistedQueryColumns);
}
}
private void parseProperties(final Element element, final BeanDefinitionBuilder factory) {
String properties = element.getAttribute(EncryptorRuleBeanDefinitionParserTag.ENCRYPTOR_PROPERTY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(properties)) {
factory.addConstructorArgReference(properties);
} else {
factory.addConstructorArgValue(new Properties());
}
}
private List<BeanDefinition> parseTableRulesConfiguration(final Element element) {
Element tableRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.TABLE_RULES_TAG);
List<Element> tableRuleElements = DomUtils.getChildElementsByTagName(tableRulesElement, ShardingDataSourceBeanDefinitionParserTag.TABLE_RULE_TAG);
List<BeanDefinition> result = new ManagedList<>(tableRuleElements.size());
for (Element each : tableRuleElements) {
result.add(parseTableRuleConfiguration(each));
}
return result;
}
private BeanDefinition parseTableRuleConfiguration(final Element tableElement) {
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(TableRuleConfiguration.class);
factory.addConstructorArgValue(tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.LOGIC_TABLE_ATTRIBUTE));
parseActualDataNodes(tableElement, factory);
parseDatabaseShardingStrategyConfiguration(tableElement, factory);
parseTableShardingStrategyConfiguration(tableElement, factory);
parseKeyGeneratorConfiguration(tableElement, factory);
parseLogicIndex(tableElement, factory);
return factory.getBeanDefinition();
}
private void parseActualDataNodes(final Element tableElement, final BeanDefinitionBuilder factory) {
String actualDataNodes = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.ACTUAL_DATA_NODES_ATTRIBUTE);
if (!Strings.isNullOrEmpty(actualDataNodes)) {
factory.addConstructorArgValue(actualDataNodes);
}
}
private void parseDatabaseShardingStrategyConfiguration(final Element tableElement, final BeanDefinitionBuilder factory) {
String databaseStrategy = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.DATABASE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(databaseStrategy)) {
factory.addPropertyReference("databaseShardingStrategyConfig", databaseStrategy);
}
}
private void parseTableShardingStrategyConfiguration(final Element tableElement, final BeanDefinitionBuilder factory) {
String tableStrategy = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.TABLE_STRATEGY_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(tableStrategy)) {
factory.addPropertyReference("tableShardingStrategyConfig", tableStrategy);
}
}
private void parseKeyGeneratorConfiguration(final Element tableElement, final BeanDefinitionBuilder factory) {
String keyGenerator = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.KEY_GENERATOR_REF_ATTRIBUTE);
if (!Strings.isNullOrEmpty(keyGenerator)) {
factory.addPropertyReference("keyGeneratorConfig", keyGenerator);
}
}
private void parseLogicIndex(final Element tableElement, final BeanDefinitionBuilder factory) {
String logicIndex = tableElement.getAttribute(ShardingDataSourceBeanDefinitionParserTag.LOGIC_INDEX);
if (!Strings.isNullOrEmpty(logicIndex)) {
factory.addPropertyValue("logicIndex", logicIndex);
}
}
private List<String> parseBindingTablesConfiguration(final Element element) {
Element bindingTableRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.BINDING_TABLE_RULES_TAG);
if (null == bindingTableRulesElement) {
return Collections.emptyList();
}
List<Element> bindingTableRuleElements = DomUtils.getChildElementsByTagName(bindingTableRulesElement, ShardingDataSourceBeanDefinitionParserTag.BINDING_TABLE_RULE_TAG);
List<String> result = new LinkedList<>();
for (Element each : bindingTableRuleElements) {
result.add(each.getAttribute(ShardingDataSourceBeanDefinitionParserTag.LOGIC_TABLES_ATTRIBUTE));
}
return result;
}
private List<String> parseBroadcastTables(final Element element) {
Element broadcastTableRulesElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.BROADCAST_TABLE_RULES_TAG);
if (null == broadcastTableRulesElement) {
return Collections.emptyList();
}
List<Element> broadcastTableRuleElements = DomUtils.getChildElementsByTagName(broadcastTableRulesElement, ShardingDataSourceBeanDefinitionParserTag.BROADCAST_TABLE_RULE_TAG);
List<String> result = new LinkedList<>();
for (Element each : broadcastTableRuleElements) {
result.add(each.getAttribute(ShardingDataSourceBeanDefinitionParserTag.TABLE_ATTRIBUTE));
}
return result;
}
private Properties parseProperties(final Element element, final ParserContext parserContext) {
Element propsElement = DomUtils.getChildElementByTagName(element, ShardingDataSourceBeanDefinitionParserTag.PROPS_TAG);
return null == propsElement ? new Properties() : parserContext.getDelegate().parsePropsElement(propsElement);
}
}
|
parseEncryptRuleConfiguration()
|
sharding-spring/sharding-jdbc-spring/sharding-jdbc-spring-namespace/src/main/java/org/apache/shardingsphere/shardingjdbc/spring/namespace/parser/ShardingDataSourceBeanDefinitionParser.java
|
parseEncryptRuleConfiguration()
|
|
Java
|
apache-2.0
|
ec6eba7254e09250e7cf941441a58d4a7138f34b
| 0
|
material-foundation/material-remixer-android
|
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.libraries.remixer;
import java.lang.ref.WeakReference;
/**
* An item that can be displayed on Remixer's interfaces.
*
* <p>These are either {@link Variable}es or {@link Trigger}s.
*/
public abstract class RemixerItem {
/**
* The name to display in the UI for this remixer item.
*/
private final String title;
/**
* The key to use to identify this item across storage and all the interfaces.
*/
private final String key;
/**
* The layout to inflate to display this remixer item. If set to 0, the default layout associated
* with the remixer item type will be used.
*/
private final int layoutId;
/**
* A weak reference to the object that created this RemixerItem.
*
* <p>It should be a reference to an activity, but it isn't since remixer_core cannot depend on
* Android classes. It is a weak reference in order not to leak the activity accidentally.
*/
@SuppressWarnings("unchecked")
private final WeakReference parentObject;
/**
* A copy of the parent object's class object. This will be necessary to know whether an object
* is of the same class as the parent object, even after the parent object has been reclaimed by
* the Garbage Collector.
*/
@SuppressWarnings("unchecked")
private final Class parentObjectClass;
/**
* Constructs a new RemixerItem with the given key, title and layoutId.
*/
@SuppressWarnings("unchecked")
protected RemixerItem(String title, String key, Object parentObject, int layoutId) {
this.title = title;
this.key = key;
this.parentObject = new WeakReference(parentObject);
this.parentObjectClass = parentObject.getClass();
this.layoutId = layoutId;
}
/**
* Checks whether the parent object is the same as the parameter.
*/
public boolean isParentObject(Object object) {
Object localParentObject = parentObject.get();
if (object == null) {
return false;
}
return localParentObject == object;
}
/**
* Checks whether the parameter is of the same class as the (possibly already reclaimed) parent
* object.
*/
public boolean isSameClassAsParentObject(Object object) {
return parentObjectClass.equals(object.getClass());
}
public String getTitle() {
return title;
}
public String getKey() {
return key;
}
/**
* Returns the layout id to inflate when displaying this Remixer item.
*/
public int getLayoutId() {
return layoutId;
}
abstract void clearCallback();
}
|
remixer_core/src/main/java/com/google/android/libraries/remixer/RemixerItem.java
|
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.libraries.remixer;
import java.lang.ref.WeakReference;
/**
* An item that can be displayed on Remixer's interfaces.
*
* <p>These are either {@link Variable}es or {@link Trigger}s.
*/
public abstract class RemixerItem {
/**
* The name to display in the UI for this remixer item.
*/
private final String title;
/**
* The key to use to identify this item across storage and all the interfaces.
*/
private final String key;
/**
* The layout to inflate to display this remixer item. If set to 0, the default layout associated
* with the remixer item type will be used.
*/
private final int layoutId;
/**
* A weak reference to the object that created this RemixerItem.
*
* <p>It should be a reference to an activity, but it isn't since remixer_core cannot depend on
* Android classes. It is a weak reference in order not to leak the activity accidentally.
*/
@SuppressWarnings("unchecked")
private final WeakReference parentObject;
/**
* A copy of the parent object's class object. This will be necessary to know whether an object
* is of the same class as the parent object, even after the parent object has been reclaimed by
* the Garbage Collector.
*/
@SuppressWarnings("unchecked")
private final Class parentObjectClass;
/**
* Constructs a new RemixerItem with the given key, title and layoutId.
*/
@SuppressWarnings("unchecked")
protected RemixerItem(String title, String key, Object parentObject, int layoutId) {
this.title = title;
this.key = key;
this.parentObject = new WeakReference(parentObject);
this.parentObjectClass = parentObject.getClass();
this.layoutId = layoutId;
}
/**
* Checks whether the parent object is the same as the parameter.
*/
public boolean isParentObject(Object object) {
Object localParentObject = parentObject.get();
if (localParentObject == null) {
return false;
}
return localParentObject.equals(parentObject);
}
/**
* Checks whether the parameter is of the same class as the (possibly already reclaimed) parent
* object.
*/
public boolean isSameClassAsParentObject(Object object) {
return parentObjectClass.equals(object.getClass());
}
public String getTitle() {
return title;
}
public String getKey() {
return key;
}
/**
* Returns the layout id to inflate when displaying this Remixer item.
*/
public int getLayoutId() {
return layoutId;
}
abstract void clearCallback();
}
|
Fix logic for checking reference equality.
|
remixer_core/src/main/java/com/google/android/libraries/remixer/RemixerItem.java
|
Fix logic for checking reference equality.
|
|
Java
|
apache-2.0
|
51c2a61e890aa93bb5f00154a6ee6f2f358f2a39
| 0
|
francescomari/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,code-distillery/jackrabbit-oak,chetanmeh/jackrabbit-oak,yesil/jackrabbit-oak,chetanmeh/jackrabbit-oak,catholicon/jackrabbit-oak,catholicon/jackrabbit-oak,alexparvulescu/jackrabbit-oak,alexkli/jackrabbit-oak,anchela/jackrabbit-oak,code-distillery/jackrabbit-oak,yesil/jackrabbit-oak,alexkli/jackrabbit-oak,anchela/jackrabbit-oak,alexparvulescu/jackrabbit-oak,alexparvulescu/jackrabbit-oak,stillalex/jackrabbit-oak,alexkli/jackrabbit-oak,yesil/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,chetanmeh/jackrabbit-oak,alexkli/jackrabbit-oak,francescomari/jackrabbit-oak,code-distillery/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,code-distillery/jackrabbit-oak,chetanmeh/jackrabbit-oak,chetanmeh/jackrabbit-oak,stillalex/jackrabbit-oak,alexkli/jackrabbit-oak,francescomari/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,stillalex/jackrabbit-oak,code-distillery/jackrabbit-oak,francescomari/jackrabbit-oak,anchela/jackrabbit-oak,catholicon/jackrabbit-oak,yesil/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,stillalex/jackrabbit-oak,catholicon/jackrabbit-oak,anchela/jackrabbit-oak,alexparvulescu/jackrabbit-oak,catholicon/jackrabbit-oak,stillalex/jackrabbit-oak,alexparvulescu/jackrabbit-oak,francescomari/jackrabbit-oak,anchela/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.segment.tool;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions.defaultGCOptions;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.segment.file.JournalReader;
/**
* Perform an offline compaction of an existing segment store.
*/
public class Compact implements Runnable {
/**
* Create a builder for the {@link Compact} command.
*
* @return an instance of {@link Builder}.
*/
public static Builder builder() {
return new Builder();
}
/**
* Collect options for the {@link Compact} command.
*/
public static class Builder {
private File path;
private boolean force;
private Builder() {
// Prevent external instantiation.
}
/**
* The path to an existing segment store. This parameter is required.
*
* @param path the path to an existing segment store.
* @return this builder.
*/
public Builder withPath(File path) {
this.path = checkNotNull(path);
return this;
}
/**
* Set whether or not to force compact concurrent commits on top of
* already compacted commits after the maximum number of retries has
* been reached. Force committing tries to exclusively write lock the
* node store.
*
* @param force {@code true} to force an exclusive commit of the
* compacted state, {@code false} otherwise.
* @return this builder.
*/
public Builder withForce(boolean force) {
this.force = force;
return this;
}
/**
* Create an executable version of the {@link Compact} command.
*
* @return an instance of {@link Runnable}.
*/
public Runnable build() {
checkNotNull(path);
return new Compact(this);
}
}
private final File path;
private Compact(Builder builder) {
this.path = builder.path;
}
@Override
public void run() {
try {
compact();
} catch (Exception e) {
e.printStackTrace();
}
}
private void compact() throws IOException, InvalidFileStoreVersionException {
try (FileStore store = newFileStore()) {
store.compact();
}
System.out.println(" -> cleaning up");
try (FileStore store = newFileStore()) {
store.cleanup();
File journal = new File(path, "journal.log");
String head;
try (JournalReader journalReader = new JournalReader(journal)) {
head = journalReader.next() + " root " + System.currentTimeMillis() + "\n";
}
try (RandomAccessFile journalFile = new RandomAccessFile(journal, "rw")) {
System.out.println(" -> writing new " + journal.getName() + ": " + head);
journalFile.setLength(0);
journalFile.writeBytes(head);
journalFile.getChannel().force(false);
}
}
}
private FileStore newFileStore() throws IOException, InvalidFileStoreVersionException {
return fileStoreBuilder(path.getAbsoluteFile()).withGCOptions(defaultGCOptions().setOffline()).build();
}
}
|
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/tool/Compact.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.segment.tool;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions.defaultGCOptions;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.segment.file.JournalReader;
/**
* Perform an offline compaction of an existing segment store.
*/
public class Compact implements Runnable {
/**
* Create a builder for the {@link Compact} command.
*
* @return an instance of {@link Builder}.
*/
public static Builder builder() {
return new Builder();
}
/**
* Collect options for the {@link Compact} command.
*/
public static class Builder {
private File path;
private boolean force;
private Builder() {
// Prevent external instantiation.
}
/**
* The path to an existing segment store. This parameter is required.
*
* @param path the path to an existing segment store.
* @return this builder.
*/
public Builder withPath(File path) {
this.path = checkNotNull(path);
return this;
}
/**
* Set whether or not to force compact concurrent commits on top of
* already compacted commits after the maximum number of retries has
* been reached. Force committing tries to exclusively write lock the
* node store.
*
* @param force {@code true} to force an exclusive commit of the
* compacted state, {@code false} otherwise.
* @return this builder.
*/
public Builder withForce(boolean force) {
this.force = force;
return this;
}
/**
* Create an executable version of the {@link Compact} command.
*
* @return an instance of {@link Runnable}.
*/
public Runnable build() {
checkNotNull(path);
return new Compact(this);
}
}
private final File path;
private final boolean force;
private Compact(Builder builder) {
this.path = builder.path;
this.force = builder.force;
}
@Override
public void run() {
try {
compact();
} catch (Exception e) {
e.printStackTrace();
}
}
private void compact() throws IOException, InvalidFileStoreVersionException {
try (FileStore store = newFileStore()) {
store.compact();
}
System.out.println(" -> cleaning up");
try (FileStore store = newFileStore()) {
store.cleanup();
File journal = new File(path, "journal.log");
String head;
try (JournalReader journalReader = new JournalReader(journal)) {
head = journalReader.next() + " root " + System.currentTimeMillis() + "\n";
}
try (RandomAccessFile journalFile = new RandomAccessFile(journal, "rw")) {
System.out.println(" -> writing new " + journal.getName() + ": " + head);
journalFile.setLength(0);
journalFile.writeBytes(head);
journalFile.getChannel().force(false);
}
}
}
private FileStore newFileStore() throws IOException, InvalidFileStoreVersionException {
return fileStoreBuilder(path.getAbsoluteFile()).withGCOptions(defaultGCOptions().setOffline()).build();
}
}
|
OAK-5293: Static code analysis and code cleanup
Remove unused fields
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1774211 13f79535-47bb-0310-9956-ffa450edef68
|
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/tool/Compact.java
|
OAK-5293: Static code analysis and code cleanup Remove unused fields
|
|
Java
|
apache-2.0
|
781d4e7812d0eaf789c287ba69374d625a13e32c
| 0
|
apache/ignite,andrey-kuznetsov/ignite,SomeFire/ignite,BiryukovVA/ignite,andrey-kuznetsov/ignite,NSAmelchev/ignite,BiryukovVA/ignite,shroman/ignite,daradurvs/ignite,xtern/ignite,nizhikov/ignite,NSAmelchev/ignite,apache/ignite,samaitra/ignite,daradurvs/ignite,ascherbakoff/ignite,BiryukovVA/ignite,NSAmelchev/ignite,shroman/ignite,SomeFire/ignite,daradurvs/ignite,ptupitsyn/ignite,daradurvs/ignite,nizhikov/ignite,samaitra/ignite,ptupitsyn/ignite,shroman/ignite,shroman/ignite,shroman/ignite,SomeFire/ignite,ascherbakoff/ignite,nizhikov/ignite,NSAmelchev/ignite,ascherbakoff/ignite,chandresh-pancholi/ignite,chandresh-pancholi/ignite,apache/ignite,BiryukovVA/ignite,nizhikov/ignite,chandresh-pancholi/ignite,NSAmelchev/ignite,chandresh-pancholi/ignite,andrey-kuznetsov/ignite,BiryukovVA/ignite,ptupitsyn/ignite,chandresh-pancholi/ignite,ilantukh/ignite,NSAmelchev/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,ilantukh/ignite,chandresh-pancholi/ignite,xtern/ignite,ptupitsyn/ignite,shroman/ignite,andrey-kuznetsov/ignite,BiryukovVA/ignite,ptupitsyn/ignite,nizhikov/ignite,apache/ignite,xtern/ignite,ptupitsyn/ignite,apache/ignite,ascherbakoff/ignite,shroman/ignite,andrey-kuznetsov/ignite,chandresh-pancholi/ignite,ptupitsyn/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,SomeFire/ignite,samaitra/ignite,SomeFire/ignite,nizhikov/ignite,NSAmelchev/ignite,SomeFire/ignite,apache/ignite,chandresh-pancholi/ignite,ascherbakoff/ignite,nizhikov/ignite,SomeFire/ignite,ilantukh/ignite,BiryukovVA/ignite,ilantukh/ignite,samaitra/ignite,apache/ignite,shroman/ignite,ptupitsyn/ignite,BiryukovVA/ignite,samaitra/ignite,ilantukh/ignite,samaitra/ignite,ilantukh/ignite,samaitra/ignite,ptupitsyn/ignite,xtern/ignite,ptupitsyn/ignite,xtern/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,nizhikov/ignite,xtern/ignite,shroman/ignite,ascherbakoff/ignite,NSAmelchev/ignite,samaitra/ignite,shroman/ignite,daradurvs/ignite,xtern/ignite,apache/ignite,xtern/ignite,andrey-kuznetsov/ignite,SomeFire/ignite,nizhikov/ignite,xtern/ignite,ilantukh/ignite,samaitra/ignite,BiryukovVA/ignite,SomeFire/ignite,apache/ignite,samaitra/ignite,ascherbakoff/ignite,ascherbakoff/ignite,andrey-kuznetsov/ignite,ascherbakoff/ignite,daradurvs/ignite,daradurvs/ignite,chandresh-pancholi/ignite,daradurvs/ignite,NSAmelchev/ignite,BiryukovVA/ignite,SomeFire/ignite,daradurvs/ignite,daradurvs/ignite
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.File;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.compute.ComputeJob;
import org.apache.ignite.compute.ComputeJobAdapter;
import org.apache.ignite.internal.processors.igfs.IgfsUtils;
import org.apache.ignite.internal.util.lang.GridPeerDeployAware;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteProductVersion;
import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.http.GridEmbeddedHttpServer;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import org.jetbrains.annotations.Nullable;
import static org.junit.Assert.assertArrayEquals;
/**
* Grid utils tests.
*/
@GridCommonTest(group = "Utils")
public class IgniteUtilsSelfTest extends GridCommonAbstractTest {
/** */
public static final int[] EMPTY = new int[0];
/**
* @return 120 character length string.
*/
private String text120() {
char[] chs = new char[120];
Arrays.fill(chs, 'x');
return new String(chs);
}
/**
*
*/
public void testIsPow2() {
assertTrue(U.isPow2(1));
assertTrue(U.isPow2(2));
assertTrue(U.isPow2(4));
assertTrue(U.isPow2(8));
assertTrue(U.isPow2(16));
assertTrue(U.isPow2(16 * 16));
assertTrue(U.isPow2(32 * 32));
assertFalse(U.isPow2(-4));
assertFalse(U.isPow2(-3));
assertFalse(U.isPow2(-2));
assertFalse(U.isPow2(-1));
assertFalse(U.isPow2(0));
assertFalse(U.isPow2(3));
assertFalse(U.isPow2(5));
assertFalse(U.isPow2(6));
assertFalse(U.isPow2(7));
assertFalse(U.isPow2(9));
}
/**
* @throws Exception If failed.
*/
public void testAllLocalIps() throws Exception {
Collection<String> ips = U.allLocalIps();
System.out.println("All local IPs: " + ips);
}
/**
* @throws Exception If failed.
*/
public void testAllLocalMACs() throws Exception {
Collection<String> macs = U.allLocalMACs();
System.out.println("All local MACs: " + macs);
}
/**
* On linux NetworkInterface.getHardwareAddress() returns null from time to time.
*
* @throws Exception If failed.
*/
public void testAllLocalMACsMultiThreaded() throws Exception {
GridTestUtils.runMultiThreaded(new Runnable() {
@Override public void run() {
for (int i = 0; i < 30; i++) {
Collection<String> macs = U.allLocalMACs();
assertTrue("Mac address are not defined.", !macs.isEmpty());
}
}
}, 32, "thread");
}
/**
* @throws Exception If failed.
*/
public void testByteArray2String() throws Exception {
assertEquals("{0x0A,0x14,0x1E,0x28,0x32,0x3C,0x46,0x50,0x5A}",
U.byteArray2String(new byte[]{10, 20, 30, 40, 50, 60, 70, 80, 90}, "0x%02X", ",0x%02X"));
}
/**
* @throws Exception If failed.
*/
public void testFormatMins() throws Exception {
printFormatMins(0);
printFormatMins(1);
printFormatMins(2);
printFormatMins(59);
printFormatMins(60);
printFormatMins(61);
printFormatMins(60 * 24 - 1);
printFormatMins(60 * 24);
printFormatMins(60 * 24 + 1);
printFormatMins(5 * 60 * 24 - 1);
printFormatMins(5 * 60 * 24);
printFormatMins(5 * 60 * 24 + 1);
}
/**
* Helper method for {@link #testFormatMins()}
*
* @param mins Minutes to test.
*/
private void printFormatMins(long mins) {
System.out.println("For " + mins + " minutes: " + X.formatMins(mins));
}
/**
* @throws Exception If failed.
*/
public void testDownloadUrlFromHttp() throws Exception {
GridEmbeddedHttpServer srv = null;
try {
String urlPath = "/testDownloadUrl/";
srv = GridEmbeddedHttpServer.startHttpServer().withFileDownloadingHandler(urlPath,
GridTestUtils.resolveIgnitePath("/modules/core/src/test/config/tests.properties"));
File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file");
file = U.downloadUrl(new URL(srv.getBaseUrl() + urlPath), file);
assert file.exists();
assert file.delete();
}
finally {
if (srv != null)
srv.stop(1);
}
}
/**
* @throws Exception If failed.
*/
public void testDownloadUrlFromHttps() throws Exception {
GridEmbeddedHttpServer srv = null;
try {
String urlPath = "/testDownloadUrl/";
srv = GridEmbeddedHttpServer.startHttpsServer().withFileDownloadingHandler(urlPath,
GridTestUtils.resolveIgnitePath("modules/core/src/test/config/tests.properties"));
File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file");
file = U.downloadUrl(new URL(srv.getBaseUrl() + urlPath), file);
assert file.exists();
assert file.delete();
}
finally {
if (srv != null)
srv.stop(1);
}
}
/**
* @throws Exception If failed.
*/
public void testDownloadUrlFromLocalFile() throws Exception {
File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file");
file = U.downloadUrl(
GridTestUtils.resolveIgnitePath("modules/core/src/test/config/tests.properties").toURI().toURL(), file);
assert file.exists();
assert file.delete();
}
/**
* @throws Exception If failed.
*/
public void testOs() throws Exception {
System.out.println("OS string: " + U.osString());
System.out.println("JDK string: " + U.jdkString());
System.out.println("OS/JDK string: " + U.osJdkString());
System.out.println("Is Windows: " + U.isWindows());
System.out.println("Is Windows 95: " + U.isWindows95());
System.out.println("Is Windows 98: " + U.isWindows98());
System.out.println("Is Windows NT: " + U.isWindowsNt());
System.out.println("Is Windows 2000: " + U.isWindows2k());
System.out.println("Is Windows 2003: " + U.isWindows2003());
System.out.println("Is Windows XP: " + U.isWindowsXp());
System.out.println("Is Windows Vista: " + U.isWindowsVista());
System.out.println("Is Linux: " + U.isLinux());
System.out.println("Is Mac OS: " + U.isMacOs());
System.out.println("Is Netware: " + U.isNetWare());
System.out.println("Is Solaris: " + U.isSolaris());
System.out.println("Is Solaris SPARC: " + U.isSolarisSparc());
System.out.println("Is Solaris x86: " + U.isSolarisX86());
System.out.println("Is Windows7: " + U.isWindows7());
System.out.println("Is Sufficiently Tested OS: " + U.isSufficientlyTestedOs());
}
/**
* @throws Exception If failed.
*/
public void testJavaSerialization() throws Exception {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
ObjectOutputStream objOut = new ObjectOutputStream(byteOut);
objOut.writeObject(new byte[] {1, 2, 3, 4, 5, 5});
objOut.flush();
byte[] sBytes = byteOut.toByteArray();
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(sBytes));
in.readObject();
}
/**
*
*/
public void testHidePassword() {
Collection<String> uriList = new ArrayList<>();
uriList.add("ftp://anonymous:111111;freq=5000@unknown.host:21/pub/gg-test");
uriList.add("ftp://anonymous:111111;freq=5000@localhost:21/pub/gg-test");
uriList.add("http://freq=5000@localhost/tasks");
uriList.add("http://freq=5000@unknownhost.host/tasks");
for (String uri : uriList)
X.println(uri + " -> " + U.hidePassword(uri));
}
/**
* Test job to test possible indefinite recursion in detecting peer deploy aware.
*/
@SuppressWarnings({"UnusedDeclaration"})
private class SelfReferencedJob extends ComputeJobAdapter implements GridPeerDeployAware {
/** */
private SelfReferencedJob ref;
/** */
private SelfReferencedJob[] arr;
/** */
private Collection<SelfReferencedJob> col;
/** */
private ClusterNode node;
/** */
private ClusterGroup subGrid;
/**
* @param ignite Grid.
*/
private SelfReferencedJob(Ignite ignite) throws IgniteCheckedException {
node = ignite.cluster().localNode();
ref = this;
arr = new SelfReferencedJob[] {this, this};
col = Arrays.asList(this, this, this);
newContext();
subGrid = ignite.cluster().forNodes(Collections.singleton(node));
}
/** {@inheritDoc} */
@Override public Object execute() {
return null;
}
/** {@inheritDoc} */
@Override public Class<?> deployClass() {
return getClass();
}
/** {@inheritDoc} */
@Override public ClassLoader classLoader() {
return getClass().getClassLoader();
}
}
/**
* @throws Exception If test fails.
*/
public void testDetectPeerDeployAwareInfiniteRecursion() throws Exception {
Ignite g = startGrid(1);
try {
final SelfReferencedJob job = new SelfReferencedJob(g);
GridPeerDeployAware d = U.detectPeerDeployAware(U.peerDeployAware(job));
assert d != null;
assert SelfReferencedJob.class == d.deployClass();
assert d.classLoader() == SelfReferencedJob.class.getClassLoader();
}
finally {
stopGrid(1);
}
}
/**
* @param r Runnable.
* @return Job created for given runnable.
*/
private static ComputeJob job(final Runnable r) {
return new ComputeJobAdapter() {
@Nullable @Override public Object execute() {
r.run();
return null;
}
};
}
/**
*
* @throws Exception If failed.
*/
public void testParseIsoDate() throws Exception {
Calendar cal = U.parseIsoDate("2009-12-08T13:30:44.000Z");
assert cal.get(Calendar.YEAR) == 2009;
assert cal.get(Calendar.MONTH) == 11;
assert cal.get(Calendar.DAY_OF_MONTH) == 8;
assert cal.get(Calendar.HOUR_OF_DAY) == 13;
assert cal.get(Calendar.MINUTE) == 30;
assert cal.get(Calendar.SECOND) == 44;
assert cal.get(Calendar.MILLISECOND) == 0;
assert cal.get(Calendar.ZONE_OFFSET) == 0 :
"Unexpected value: " + cal.get(Calendar.ZONE_OFFSET);
cal = U.parseIsoDate("2009-12-08T13:30:44.000+03:00");
assert cal.get(Calendar.YEAR) == 2009;
assert cal.get(Calendar.MONTH) == 11;
assert cal.get(Calendar.DAY_OF_MONTH) == 8;
assert cal.get(Calendar.HOUR_OF_DAY) == 13;
assert cal.get(Calendar.MINUTE) == 30;
assert cal.get(Calendar.SECOND) == 44;
assert cal.get(Calendar.MILLISECOND) == 0;
assert cal.get(Calendar.ZONE_OFFSET) == 3 * 60 * 60 * 1000 :
"Unexpected value: " + cal.get(Calendar.ZONE_OFFSET);
cal = U.parseIsoDate("2009-12-08T13:30:44.000+0300");
assert cal.get(Calendar.YEAR) == 2009;
assert cal.get(Calendar.MONTH) == 11;
assert cal.get(Calendar.DAY_OF_MONTH) == 8;
assert cal.get(Calendar.HOUR_OF_DAY) == 13;
assert cal.get(Calendar.MINUTE) == 30;
assert cal.get(Calendar.SECOND) == 44;
assert cal.get(Calendar.MILLISECOND) == 0;
assert cal.get(Calendar.ZONE_OFFSET) == 3 * 60 * 60 * 1000 :
"Unexpected value: " + cal.get(Calendar.ZONE_OFFSET);
}
/**
* @throws Exception If test failed.
*/
public void testPeerDeployAware0() throws Exception {
Collection<Object> col = new ArrayList<>();
col.add(null);
col.add(null);
col.add(null);
GridPeerDeployAware pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add(null);
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add(null);
col.add("Test");
col.add(null);
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add("Test");
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add("Test");
col.add(this);
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add(null);
col.add("Test");
col.add(null);
col.add(this);
col.add(null);
pda = U.peerDeployAware0(col);
assert pda != null;
}
/**
* Test UUID to bytes array conversion.
*/
public void testsGetBytes() {
for (int i = 0; i < 100; i++) {
UUID id = UUID.randomUUID();
byte[] bytes = U.uuidToBytes(id);
BigInteger n = new BigInteger(bytes);
assert n.shiftRight(Long.SIZE).longValue() == id.getMostSignificantBits();
assert n.longValue() == id.getLeastSignificantBits();
}
}
/**
*
*/
@SuppressWarnings("ZeroLengthArrayAllocation")
public void testReadByteArray() {
assertTrue(Arrays.equals(new byte[0], U.readByteArray(ByteBuffer.allocate(0))));
assertTrue(Arrays.equals(new byte[0], U.readByteArray(ByteBuffer.allocate(0), ByteBuffer.allocate(0))));
Random rnd = new Random();
byte[] bytes = new byte[13];
rnd.nextBytes(bytes);
assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.wrap(bytes))));
assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.wrap(bytes), ByteBuffer.allocate(0))));
assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.allocate(0), ByteBuffer.wrap(bytes))));
for (int i = 0; i < 1000; i++) {
int n = rnd.nextInt(100);
bytes = new byte[n];
rnd.nextBytes(bytes);
ByteBuffer[] bufs = new ByteBuffer[1 + rnd.nextInt(10)];
int x = 0;
for (int j = 0; j < bufs.length - 1; j++) {
int size = x == n ? 0 : rnd.nextInt(n - x);
bufs[j] = (ByteBuffer)ByteBuffer.wrap(bytes).position(x).limit(x += size);
}
bufs[bufs.length - 1] = (ByteBuffer)ByteBuffer.wrap(bytes).position(x).limit(n);
assertTrue(Arrays.equals(bytes, U.readByteArray(bufs)));
}
}
/**
*
*/
@SuppressWarnings("ZeroLengthArrayAllocation")
public void testHashCodeFromBuffers() {
assertEquals(Arrays.hashCode(new byte[0]), U.hashCode(ByteBuffer.allocate(0)));
assertEquals(Arrays.hashCode(new byte[0]), U.hashCode(ByteBuffer.allocate(0), ByteBuffer.allocate(0)));
Random rnd = new Random();
for (int i = 0; i < 1000; i++) {
ByteBuffer[] bufs = new ByteBuffer[1 + rnd.nextInt(15)];
for (int j = 0; j < bufs.length; j++) {
byte[] bytes = new byte[rnd.nextInt(25)];
rnd.nextBytes(bytes);
bufs[j] = ByteBuffer.wrap(bytes);
}
assertEquals(U.hashCode(bufs), Arrays.hashCode(U.readByteArray(bufs)));
}
}
/**
* Test annotation look up.
*/
public void testGetAnnotations() {
assert U.getAnnotation(A1.class, Ann1.class) != null;
assert U.getAnnotation(A2.class, Ann1.class) != null;
assert U.getAnnotation(A1.class, Ann2.class) != null;
assert U.getAnnotation(A2.class, Ann2.class) != null;
assert U.getAnnotation(A3.class, Ann1.class) == null;
assert U.getAnnotation(A3.class, Ann2.class) != null;
}
/**
*
*/
public void testUnique() {
int[][][] arrays = new int[][][]{
new int[][]{EMPTY, EMPTY, EMPTY},
new int[][]{new int[]{1, 2, 3}, EMPTY, new int[]{1, 2, 3}},
new int[][]{new int[]{1, 2, 3}, new int[]{1, 2, 3}, new int[]{1, 2, 3}},
new int[][]{new int[]{1, 2, 3}, new int[]{1, 3}, new int[]{1, 2, 3}},
new int[][]{new int[]{1, 2, 30, 40, 50}, new int[]{2, 40}, new int[]{1, 2, 30, 40, 50}},
new int[][]{new int[]{-100, -13, 1, 2, 5, 30, 40, 50}, new int[]{1, 2, 6, 100, 113},
new int[]{-100, -13, 1, 2, 5, 6, 30, 40, 50, 100, 113}}
};
for (int[][] a : arrays) {
assertArrayEquals(a[2], U.unique(a[0], a[0].length, a[1], a[1].length));
assertArrayEquals(a[2], U.unique(a[1], a[1].length, a[0], a[0].length));
}
assertArrayEquals(new int[]{1, 2, 3, 4}, U.unique(new int[]{1, 2, 3, 8}, 3, new int[]{2, 4, 5}, 2));
assertArrayEquals(new int[]{2, 4}, U.unique(new int[]{1, 2, 3, 8}, 0, new int[]{2, 4, 5}, 2));
assertArrayEquals(new int[]{1, 2, 4, 5}, U.unique(new int[]{1, 2, 3, 8}, 2, new int[]{2, 4, 5, 6}, 3));
assertArrayEquals(new int[]{1, 2}, U.unique(new int[]{1, 2, 3, 8}, 2, new int[]{2, 4, 5, 6}, 0));
}
/**
*
*/
public void testDifference() {
int[][][] arrays = new int[][][]{
new int[][]{EMPTY, EMPTY, EMPTY},
new int[][]{new int[]{1, 2, 3}, EMPTY, new int[]{1, 2, 3}},
new int[][]{EMPTY, new int[]{1, 2, 3}, EMPTY},
new int[][]{new int[]{1, 2, 3}, new int[]{1, 2, 3}, EMPTY},
new int[][]{new int[]{-100, -50, 1, 2, 3}, new int[]{-50, -1, 1, 3}, new int[]{-100, 2}},
new int[][]{new int[]{-100, 1, 2, 30, 40, 50}, new int[]{2, 40}, new int[]{-100, 1, 30, 50}},
new int[][]{new int[]{-1, 1, 2, 30, 40, 50}, new int[]{1, 2, 100, 113}, new int[]{-1, 30, 40, 50}}
};
for (int[][] a : arrays)
assertArrayEquals(a[2], U.difference(a[0], a[0].length, a[1], a[1].length));
assertArrayEquals(new int[]{1, 2}, U.difference(new int[]{1, 2, 30, 40, 50}, 3, new int[]{30, 40}, 2));
assertArrayEquals(EMPTY, U.difference(new int[]{1, 2, 30, 40, 50}, 0, new int[]{30, 40}, 2));
assertArrayEquals(new int[]{1, 2, 40}, U.difference(new int[]{1, 2, 30, 40, 50}, 4, new int[]{30, 40}, 1));
assertArrayEquals(new int[]{1, 2, 30, 40}, U.difference(new int[]{1, 2, 30, 40, 50}, 4, new int[]{30, 40}, 0));
}
/**
*
*/
public void testCopyIfExceeded() {
int[][] arrays = new int[][]{new int[]{13, 14, 17, 11}, new int[]{13}, EMPTY};
for (int[] a : arrays) {
int[] b = Arrays.copyOf(a, a.length);
assertEquals(a, U.copyIfExceeded(a, a.length));
assertArrayEquals(b, U.copyIfExceeded(a, a.length));
for (int j = 0; j < a.length - 1; j++)
assertArrayEquals(Arrays.copyOf(b, j), U.copyIfExceeded(a, j));
}
}
/**
*
*/
public void testIsIncreasingArray() {
assertTrue(U.isIncreasingArray(EMPTY, 0));
assertTrue(U.isIncreasingArray(new int[]{Integer.MIN_VALUE, -10, 1, 13, Integer.MAX_VALUE}, 5));
assertTrue(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 0));
assertTrue(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 3));
assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 4));
assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 5));
assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, 3, 5}, 4));
assertTrue(U.isIncreasingArray(new int[]{1, -1}, 1));
assertFalse(U.isIncreasingArray(new int[]{1, -1}, 2));
assertTrue(U.isIncreasingArray(new int[]{13, 13, 13}, 1));
assertFalse(U.isIncreasingArray(new int[]{13, 13, 13}, 2));
assertFalse(U.isIncreasingArray(new int[]{13, 13, 13}, 3));
}
/**
*
*/
public void testIsNonDecreasingArray() {
assertTrue(U.isNonDecreasingArray(EMPTY, 0));
assertTrue(U.isNonDecreasingArray(new int[]{Integer.MIN_VALUE, -10, 1, 13, Integer.MAX_VALUE}, 5));
assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 0));
assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 3));
assertFalse(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 4));
assertFalse(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 5));
assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, 3, 5}, 4));
assertTrue(U.isNonDecreasingArray(new int[]{1, -1}, 1));
assertFalse(U.isNonDecreasingArray(new int[]{1, -1}, 2));
assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 1));
assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 2));
assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 3));
}
/**
* Test InetAddress Comparator.
*/
public void testInetAddressesComparator() {
List<InetSocketAddress> ips = new ArrayList<InetSocketAddress>() {
{
add(new InetSocketAddress("127.0.0.1", 1));
add(new InetSocketAddress("10.0.0.1", 1));
add(new InetSocketAddress("172.16.0.1", 1));
add(new InetSocketAddress("192.168.0.1", 1));
add(new InetSocketAddress("100.0.0.1", 1));
add(new InetSocketAddress("XXX", 1));
}
};
Collections.sort(ips, U.inetAddressesComparator(true));
assertTrue(ips.get(0).getAddress().isLoopbackAddress());
assertTrue(ips.get(ips.size() - 1).isUnresolved());
Collections.sort(ips, U.inetAddressesComparator(false));
assertTrue(ips.get(ips.size() - 2).getAddress().isLoopbackAddress());
assertTrue(ips.get(ips.size() - 1).isUnresolved());
}
public void testMD5Calculation() throws Exception {
String md5 = U.calculateMD5(new ByteArrayInputStream("Corrupted information.".getBytes()));
assertEquals("d7dbe555be2eee7fa658299850169fa1", md5);
}
/**
* @throws Exception If failed.
*/
public void testResolveLocalAddresses() throws Exception {
InetAddress inetAddress = InetAddress.getByName("0.0.0.0");
IgniteBiTuple<Collection<String>, Collection<String>> addrs = U.resolveLocalAddresses(inetAddress);
Collection<String> hostNames = addrs.get2();
assertFalse(hostNames.contains(null));
assertFalse(hostNames.contains(""));
assertFalse(hostNames.contains("127.0.0.1"));
assertFalse(F.exist(hostNames, new IgnitePredicate<String>() {
@Override public boolean apply(String hostName) {
return hostName.contains("localhost") || hostName.contains("0:0:0:0:0:0:0:1");
}
}));
}
/**
*
*/
public void testToSocketAddressesNoDuplicates() {
Collection<String> addrs = new ArrayList<>();
addrs.add("127.0.0.1");
addrs.add("localhost");
Collection<String> hostNames = new ArrayList<>();
int port = 1234;
assertEquals(1, U.toSocketAddresses(addrs, hostNames, port).size());
}
/**
* Composes a test String of given tlength.
*
* @param len The length.
* @return The String.
*/
private static String composeString(int len) {
StringBuilder sb = new StringBuilder();
for (int i=0; i<len; i++)
sb.append((char)i);
String x = sb.toString();
assertEquals(len, x.length());
return x;
}
/**
* Writes the given String to a DataOutput, reads from DataInput, then checks if they are the same.
*
* @param s0 The String to check serialization for.
* @throws Exception On error.
*/
private static void checkString(String s0) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput dout = new DataOutputStream(baos);
IgfsUtils.writeUTF(dout, s0);
DataInput din = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
String s1 = IgfsUtils.readUTF(din);
assertEquals(s0, s1);
}
/**
* Tests long String serialization/deserialization,
*
* @throws Exception If failed.
*/
public void testLongStringWriteUTF() throws Exception {
checkString(null);
checkString("");
checkString("a");
checkString("Quick brown fox jumps over the lazy dog.");
String x = composeString(0xFFFF / 4 - 1);
checkString(x);
x = composeString(0xFFFF / 4);
checkString(x);
x = composeString(0xFFFF / 4 + 1);
checkString(x);
}
/**
*
*/
public void testCeilPow2() throws Exception {
assertEquals(2, U.ceilPow2(2));
assertEquals(4, U.ceilPow2(3));
assertEquals(4, U.ceilPow2(4));
assertEquals(8, U.ceilPow2(5));
assertEquals(8, U.ceilPow2(6));
assertEquals(8, U.ceilPow2(7));
assertEquals(8, U.ceilPow2(8));
assertEquals(16, U.ceilPow2(9));
assertEquals(1 << 15, U.ceilPow2((1 << 15) - 1));
assertEquals(1 << 15, U.ceilPow2(1 << 15));
assertEquals(1 << 16, U.ceilPow2((1 << 15) + 1));
assertEquals(1 << 26, U.ceilPow2((1 << 26) - 100));
assertEquals(1 << 26, U.ceilPow2(1 << 26));
assertEquals(1 << 27, U.ceilPow2((1 << 26) + 100));
for (int i = (int)Math.pow(2, 30); i < Integer.MAX_VALUE; i++)
assertEquals((int)Math.pow(2, 30), U.ceilPow2(i));
for (int i = Integer.MIN_VALUE; i < 0; i++)
assertEquals(0, U.ceilPow2(i));
}
/**
*
*/
public void testIsOldestNodeVersionAtLeast() {
IgniteProductVersion v240 = IgniteProductVersion.fromString("2.4.0");
IgniteProductVersion v241 = IgniteProductVersion.fromString("2.4.1");
IgniteProductVersion v250 = IgniteProductVersion.fromString("2.5.0");
IgniteProductVersion v250ts = IgniteProductVersion.fromString("2.5.0-b1-3");
TcpDiscoveryNode node240 = new TcpDiscoveryNode();
node240.version(v240);
TcpDiscoveryNode node241 = new TcpDiscoveryNode();
node241.version(v241);
TcpDiscoveryNode node250 = new TcpDiscoveryNode();
node250.version(v250);
TcpDiscoveryNode node250ts = new TcpDiscoveryNode();
node250ts.version(v250ts);
assertTrue(U.isOldestNodeVersionAtLeast(v240, Arrays.asList(node240, node241, node250, node250ts)));
assertFalse(U.isOldestNodeVersionAtLeast(v241, Arrays.asList(node240, node241, node250, node250ts)));
assertTrue(U.isOldestNodeVersionAtLeast(v250, Arrays.asList(node250, node250ts)));
assertTrue(U.isOldestNodeVersionAtLeast(v250ts, Arrays.asList(node250, node250ts)));
}
/**
*
*/
public void testDoInParallel() throws Throwable {
CyclicBarrier barrier = new CyclicBarrier(3);
IgniteUtils.doInParallel(3,
Executors.newFixedThreadPool(3),
Arrays.asList(1, 2, 3),
i -> {
try {
barrier.await(1, TimeUnit.SECONDS);
}
catch (Exception e) {
throw new IgniteCheckedException(e);
}
}
);
}
/**
*
*/
public void testDoInParallelBatch() {
CyclicBarrier barrier = new CyclicBarrier(3);
try {
IgniteUtils.doInParallel(2,
Executors.newFixedThreadPool(3),
Arrays.asList(1, 2, 3),
i -> {
try {
barrier.await(400, TimeUnit.MILLISECONDS);
}
catch (Exception e) {
throw new IgniteCheckedException(e);
}
}
);
fail("Should throw timeout exception");
}
catch (Exception e) {
assertTrue(e.toString(), X.hasCause(e, TimeoutException.class));
}
}
/**
*
*/
public void testDoInParallelException() {
String expectedException = "ExpectedException";
try {
IgniteUtils.doInParallel(3,
Executors.newFixedThreadPool(1),
Arrays.asList(1, 2, 3),
i -> {
if (i == 1)
throw new IgniteCheckedException(expectedException);
}
);
fail("Should throw ParallelExecutionException");
}
catch (IgniteCheckedException e) {
assertEquals(expectedException, e.getMessage());
}
}
/**
* Test enum.
*/
private enum TestEnum {
E1,
E2,
E3
}
@Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE)
private @interface Ann1 {}
@Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE)
private @interface Ann2 {}
private static class A1 implements I3, I5 {}
private static class A2 extends A1 {}
private static class A3 implements I5 {}
@Ann1 private interface I1 {}
private interface I2 extends I1 {}
private interface I3 extends I2 {}
@Ann2 private interface I4 {}
private interface I5 extends I4 {}
}
|
modules/core/src/test/java/org/apache/ignite/internal/util/IgniteUtilsSelfTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.File;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.compute.ComputeJob;
import org.apache.ignite.compute.ComputeJobAdapter;
import org.apache.ignite.internal.processors.igfs.IgfsUtils;
import org.apache.ignite.internal.util.lang.GridPeerDeployAware;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteProductVersion;
import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.http.GridEmbeddedHttpServer;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import org.jetbrains.annotations.Nullable;
import static org.junit.Assert.assertArrayEquals;
/**
* Grid utils tests.
*/
@GridCommonTest(group = "Utils")
public class IgniteUtilsSelfTest extends GridCommonAbstractTest {
/** */
public static final int[] EMPTY = new int[0];
/**
* @return 120 character length string.
*/
private String text120() {
char[] chs = new char[120];
Arrays.fill(chs, 'x');
return new String(chs);
}
/**
*
*/
public void testIsPow2() {
assertTrue(U.isPow2(1));
assertTrue(U.isPow2(2));
assertTrue(U.isPow2(4));
assertTrue(U.isPow2(8));
assertTrue(U.isPow2(16));
assertTrue(U.isPow2(16 * 16));
assertTrue(U.isPow2(32 * 32));
assertFalse(U.isPow2(-4));
assertFalse(U.isPow2(-3));
assertFalse(U.isPow2(-2));
assertFalse(U.isPow2(-1));
assertFalse(U.isPow2(0));
assertFalse(U.isPow2(3));
assertFalse(U.isPow2(5));
assertFalse(U.isPow2(6));
assertFalse(U.isPow2(7));
assertFalse(U.isPow2(9));
}
/**
* @throws Exception If failed.
*/
public void testAllLocalIps() throws Exception {
Collection<String> ips = U.allLocalIps();
System.out.println("All local IPs: " + ips);
}
/**
* @throws Exception If failed.
*/
public void testAllLocalMACs() throws Exception {
Collection<String> macs = U.allLocalMACs();
System.out.println("All local MACs: " + macs);
}
/**
* On linux NetworkInterface.getHardwareAddress() returns null from time to time.
*
* @throws Exception If failed.
*/
public void testAllLocalMACsMultiThreaded() throws Exception {
GridTestUtils.runMultiThreaded(new Runnable() {
@Override public void run() {
for (int i = 0; i < 30; i++) {
Collection<String> macs = U.allLocalMACs();
assertTrue("Mac address are not defined.", !macs.isEmpty());
}
}
}, 32, "thread");
}
/**
* @throws Exception If failed.
*/
public void testByteArray2String() throws Exception {
assertEquals("{0x0A,0x14,0x1E,0x28,0x32,0x3C,0x46,0x50,0x5A}",
U.byteArray2String(new byte[]{10, 20, 30, 40, 50, 60, 70, 80, 90}, "0x%02X", ",0x%02X"));
}
/**
* @throws Exception If failed.
*/
public void testFormatMins() throws Exception {
printFormatMins(0);
printFormatMins(1);
printFormatMins(2);
printFormatMins(59);
printFormatMins(60);
printFormatMins(61);
printFormatMins(60 * 24 - 1);
printFormatMins(60 * 24);
printFormatMins(60 * 24 + 1);
printFormatMins(5 * 60 * 24 - 1);
printFormatMins(5 * 60 * 24);
printFormatMins(5 * 60 * 24 + 1);
}
/**
* Helper method for {@link #testFormatMins()}
*
* @param mins Minutes to test.
*/
private void printFormatMins(long mins) {
System.out.println("For " + mins + " minutes: " + X.formatMins(mins));
}
/**
* @throws Exception If failed.
*/
public void testDownloadUrlFromHttp() throws Exception {
GridEmbeddedHttpServer srv = null;
try {
String urlPath = "/testDownloadUrl/";
srv = GridEmbeddedHttpServer.startHttpServer().withFileDownloadingHandler(urlPath,
GridTestUtils.resolveIgnitePath("/modules/core/src/test/config/tests.properties"));
File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file");
file = U.downloadUrl(new URL(srv.getBaseUrl() + urlPath), file);
assert file.exists();
assert file.delete();
}
finally {
if (srv != null)
srv.stop(1);
}
}
/**
* @throws Exception If failed.
*/
public void testDownloadUrlFromHttps() throws Exception {
GridEmbeddedHttpServer srv = null;
try {
String urlPath = "/testDownloadUrl/";
srv = GridEmbeddedHttpServer.startHttpsServer().withFileDownloadingHandler(urlPath,
GridTestUtils.resolveIgnitePath("modules/core/src/test/config/tests.properties"));
File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file");
file = U.downloadUrl(new URL(srv.getBaseUrl() + urlPath), file);
assert file.exists();
assert file.delete();
}
finally {
if (srv != null)
srv.stop(1);
}
}
/**
* @throws Exception If failed.
*/
public void testDownloadUrlFromLocalFile() throws Exception {
File file = new File(System.getProperty("java.io.tmpdir") + File.separator + "url-http.file");
file = U.downloadUrl(
GridTestUtils.resolveIgnitePath("modules/core/src/test/config/tests.properties").toURI().toURL(), file);
assert file.exists();
assert file.delete();
}
/**
* @throws Exception If failed.
*/
public void testOs() throws Exception {
System.out.println("OS string: " + U.osString());
System.out.println("JDK string: " + U.jdkString());
System.out.println("OS/JDK string: " + U.osJdkString());
System.out.println("Is Windows: " + U.isWindows());
System.out.println("Is Windows 95: " + U.isWindows95());
System.out.println("Is Windows 98: " + U.isWindows98());
System.out.println("Is Windows NT: " + U.isWindowsNt());
System.out.println("Is Windows 2000: " + U.isWindows2k());
System.out.println("Is Windows 2003: " + U.isWindows2003());
System.out.println("Is Windows XP: " + U.isWindowsXp());
System.out.println("Is Windows Vista: " + U.isWindowsVista());
System.out.println("Is Linux: " + U.isLinux());
System.out.println("Is Mac OS: " + U.isMacOs());
System.out.println("Is Netware: " + U.isNetWare());
System.out.println("Is Solaris: " + U.isSolaris());
System.out.println("Is Solaris SPARC: " + U.isSolarisSparc());
System.out.println("Is Solaris x86: " + U.isSolarisX86());
System.out.println("Is Windows7: " + U.isWindows7());
System.out.println("Is Sufficiently Tested OS: " + U.isSufficientlyTestedOs());
}
/**
* @throws Exception If failed.
*/
public void testJavaSerialization() throws Exception {
ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
ObjectOutputStream objOut = new ObjectOutputStream(byteOut);
objOut.writeObject(new byte[] {1, 2, 3, 4, 5, 5});
objOut.flush();
byte[] sBytes = byteOut.toByteArray();
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(sBytes));
in.readObject();
}
/**
*
*/
public void testHidePassword() {
Collection<String> uriList = new ArrayList<>();
uriList.add("ftp://anonymous:111111;freq=5000@unknown.host:21/pub/gg-test");
uriList.add("ftp://anonymous:111111;freq=5000@localhost:21/pub/gg-test");
uriList.add("http://freq=5000@localhost/tasks");
uriList.add("http://freq=5000@unknownhost.host/tasks");
for (String uri : uriList)
X.println(uri + " -> " + U.hidePassword(uri));
}
/**
* Test job to test possible indefinite recursion in detecting peer deploy aware.
*/
@SuppressWarnings({"UnusedDeclaration"})
private class SelfReferencedJob extends ComputeJobAdapter implements GridPeerDeployAware {
/** */
private SelfReferencedJob ref;
/** */
private SelfReferencedJob[] arr;
/** */
private Collection<SelfReferencedJob> col;
/** */
private ClusterNode node;
/** */
private ClusterGroup subGrid;
/**
* @param ignite Grid.
*/
private SelfReferencedJob(Ignite ignite) throws IgniteCheckedException {
node = ignite.cluster().localNode();
ref = this;
arr = new SelfReferencedJob[] {this, this};
col = Arrays.asList(this, this, this);
newContext();
subGrid = ignite.cluster().forNodes(Collections.singleton(node));
}
/** {@inheritDoc} */
@Override public Object execute() {
return null;
}
/** {@inheritDoc} */
@Override public Class<?> deployClass() {
return getClass();
}
/** {@inheritDoc} */
@Override public ClassLoader classLoader() {
return getClass().getClassLoader();
}
}
/**
* @throws Exception If test fails.
*/
public void testDetectPeerDeployAwareInfiniteRecursion() throws Exception {
Ignite g = startGrid(1);
try {
final SelfReferencedJob job = new SelfReferencedJob(g);
GridPeerDeployAware d = U.detectPeerDeployAware(U.peerDeployAware(job));
assert d != null;
assert SelfReferencedJob.class == d.deployClass();
assert d.classLoader() == SelfReferencedJob.class.getClassLoader();
}
finally {
stopGrid(1);
}
}
/**
* @param r Runnable.
* @return Job created for given runnable.
*/
private static ComputeJob job(final Runnable r) {
return new ComputeJobAdapter() {
@Nullable @Override public Object execute() {
r.run();
return null;
}
};
}
/**
*
* @throws Exception If failed.
*/
public void testParseIsoDate() throws Exception {
Calendar cal = U.parseIsoDate("2009-12-08T13:30:44.000Z");
assert cal.get(Calendar.YEAR) == 2009;
assert cal.get(Calendar.MONTH) == 11;
assert cal.get(Calendar.DAY_OF_MONTH) == 8;
assert cal.get(Calendar.HOUR_OF_DAY) == 13;
assert cal.get(Calendar.MINUTE) == 30;
assert cal.get(Calendar.SECOND) == 44;
assert cal.get(Calendar.MILLISECOND) == 0;
assert cal.get(Calendar.ZONE_OFFSET) == 0 :
"Unexpected value: " + cal.get(Calendar.ZONE_OFFSET);
cal = U.parseIsoDate("2009-12-08T13:30:44.000+03:00");
assert cal.get(Calendar.YEAR) == 2009;
assert cal.get(Calendar.MONTH) == 11;
assert cal.get(Calendar.DAY_OF_MONTH) == 8;
assert cal.get(Calendar.HOUR_OF_DAY) == 13;
assert cal.get(Calendar.MINUTE) == 30;
assert cal.get(Calendar.SECOND) == 44;
assert cal.get(Calendar.MILLISECOND) == 0;
assert cal.get(Calendar.ZONE_OFFSET) == 3 * 60 * 60 * 1000 :
"Unexpected value: " + cal.get(Calendar.ZONE_OFFSET);
cal = U.parseIsoDate("2009-12-08T13:30:44.000+0300");
assert cal.get(Calendar.YEAR) == 2009;
assert cal.get(Calendar.MONTH) == 11;
assert cal.get(Calendar.DAY_OF_MONTH) == 8;
assert cal.get(Calendar.HOUR_OF_DAY) == 13;
assert cal.get(Calendar.MINUTE) == 30;
assert cal.get(Calendar.SECOND) == 44;
assert cal.get(Calendar.MILLISECOND) == 0;
assert cal.get(Calendar.ZONE_OFFSET) == 3 * 60 * 60 * 1000 :
"Unexpected value: " + cal.get(Calendar.ZONE_OFFSET);
}
/**
* @throws Exception If test failed.
*/
public void testPeerDeployAware0() throws Exception {
Collection<Object> col = new ArrayList<>();
col.add(null);
col.add(null);
col.add(null);
GridPeerDeployAware pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add(null);
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add(null);
col.add("Test");
col.add(null);
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add("Test");
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add("Test");
col.add(this);
pda = U.peerDeployAware0(col);
assert pda != null;
col.clear();
col.add(null);
col.add("Test");
col.add(null);
col.add(this);
col.add(null);
pda = U.peerDeployAware0(col);
assert pda != null;
}
/**
* Test UUID to bytes array conversion.
*/
public void testsGetBytes() {
for (int i = 0; i < 100; i++) {
UUID id = UUID.randomUUID();
byte[] bytes = U.uuidToBytes(id);
BigInteger n = new BigInteger(bytes);
assert n.shiftRight(Long.SIZE).longValue() == id.getMostSignificantBits();
assert n.longValue() == id.getLeastSignificantBits();
}
}
/**
*
*/
@SuppressWarnings("ZeroLengthArrayAllocation")
public void testReadByteArray() {
assertTrue(Arrays.equals(new byte[0], U.readByteArray(ByteBuffer.allocate(0))));
assertTrue(Arrays.equals(new byte[0], U.readByteArray(ByteBuffer.allocate(0), ByteBuffer.allocate(0))));
Random rnd = new Random();
byte[] bytes = new byte[13];
rnd.nextBytes(bytes);
assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.wrap(bytes))));
assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.wrap(bytes), ByteBuffer.allocate(0))));
assertTrue(Arrays.equals(bytes, U.readByteArray(ByteBuffer.allocate(0), ByteBuffer.wrap(bytes))));
for (int i = 0; i < 1000; i++) {
int n = rnd.nextInt(100);
bytes = new byte[n];
rnd.nextBytes(bytes);
ByteBuffer[] bufs = new ByteBuffer[1 + rnd.nextInt(10)];
int x = 0;
for (int j = 0; j < bufs.length - 1; j++) {
int size = x == n ? 0 : rnd.nextInt(n - x);
bufs[j] = (ByteBuffer)ByteBuffer.wrap(bytes).position(x).limit(x += size);
}
bufs[bufs.length - 1] = (ByteBuffer)ByteBuffer.wrap(bytes).position(x).limit(n);
assertTrue(Arrays.equals(bytes, U.readByteArray(bufs)));
}
}
/**
*
*/
@SuppressWarnings("ZeroLengthArrayAllocation")
public void testHashCodeFromBuffers() {
assertEquals(Arrays.hashCode(new byte[0]), U.hashCode(ByteBuffer.allocate(0)));
assertEquals(Arrays.hashCode(new byte[0]), U.hashCode(ByteBuffer.allocate(0), ByteBuffer.allocate(0)));
Random rnd = new Random();
for (int i = 0; i < 1000; i++) {
ByteBuffer[] bufs = new ByteBuffer[1 + rnd.nextInt(15)];
for (int j = 0; j < bufs.length; j++) {
byte[] bytes = new byte[rnd.nextInt(25)];
rnd.nextBytes(bytes);
bufs[j] = ByteBuffer.wrap(bytes);
}
assertEquals(U.hashCode(bufs), Arrays.hashCode(U.readByteArray(bufs)));
}
}
/**
* Test annotation look up.
*/
public void testGetAnnotations() {
assert U.getAnnotation(A1.class, Ann1.class) != null;
assert U.getAnnotation(A2.class, Ann1.class) != null;
assert U.getAnnotation(A1.class, Ann2.class) != null;
assert U.getAnnotation(A2.class, Ann2.class) != null;
assert U.getAnnotation(A3.class, Ann1.class) == null;
assert U.getAnnotation(A3.class, Ann2.class) != null;
}
/**
*
*/
public void testUnique() {
int[][][] arrays = new int[][][]{
new int[][]{EMPTY, EMPTY, EMPTY},
new int[][]{new int[]{1, 2, 3}, EMPTY, new int[]{1, 2, 3}},
new int[][]{new int[]{1, 2, 3}, new int[]{1, 2, 3}, new int[]{1, 2, 3}},
new int[][]{new int[]{1, 2, 3}, new int[]{1, 3}, new int[]{1, 2, 3}},
new int[][]{new int[]{1, 2, 30, 40, 50}, new int[]{2, 40}, new int[]{1, 2, 30, 40, 50}},
new int[][]{new int[]{-100, -13, 1, 2, 5, 30, 40, 50}, new int[]{1, 2, 6, 100, 113},
new int[]{-100, -13, 1, 2, 5, 6, 30, 40, 50, 100, 113}}
};
for (int[][] a : arrays) {
assertArrayEquals(a[2], U.unique(a[0], a[0].length, a[1], a[1].length));
assertArrayEquals(a[2], U.unique(a[1], a[1].length, a[0], a[0].length));
}
assertArrayEquals(new int[]{1, 2, 3, 4}, U.unique(new int[]{1, 2, 3, 8}, 3, new int[]{2, 4, 5}, 2));
assertArrayEquals(new int[]{2, 4}, U.unique(new int[]{1, 2, 3, 8}, 0, new int[]{2, 4, 5}, 2));
assertArrayEquals(new int[]{1, 2, 4, 5}, U.unique(new int[]{1, 2, 3, 8}, 2, new int[]{2, 4, 5, 6}, 3));
assertArrayEquals(new int[]{1, 2}, U.unique(new int[]{1, 2, 3, 8}, 2, new int[]{2, 4, 5, 6}, 0));
}
/**
*
*/
public void testDifference() {
int[][][] arrays = new int[][][]{
new int[][]{EMPTY, EMPTY, EMPTY},
new int[][]{new int[]{1, 2, 3}, EMPTY, new int[]{1, 2, 3}},
new int[][]{EMPTY, new int[]{1, 2, 3}, EMPTY},
new int[][]{new int[]{1, 2, 3}, new int[]{1, 2, 3}, EMPTY},
new int[][]{new int[]{-100, -50, 1, 2, 3}, new int[]{-50, -1, 1, 3}, new int[]{-100, 2}},
new int[][]{new int[]{-100, 1, 2, 30, 40, 50}, new int[]{2, 40}, new int[]{-100, 1, 30, 50}},
new int[][]{new int[]{-1, 1, 2, 30, 40, 50}, new int[]{1, 2, 100, 113}, new int[]{-1, 30, 40, 50}}
};
for (int[][] a : arrays)
assertArrayEquals(a[2], U.difference(a[0], a[0].length, a[1], a[1].length));
assertArrayEquals(new int[]{1, 2}, U.difference(new int[]{1, 2, 30, 40, 50}, 3, new int[]{30, 40}, 2));
assertArrayEquals(EMPTY, U.difference(new int[]{1, 2, 30, 40, 50}, 0, new int[]{30, 40}, 2));
assertArrayEquals(new int[]{1, 2, 40}, U.difference(new int[]{1, 2, 30, 40, 50}, 4, new int[]{30, 40}, 1));
assertArrayEquals(new int[]{1, 2, 30, 40}, U.difference(new int[]{1, 2, 30, 40, 50}, 4, new int[]{30, 40}, 0));
}
/**
*
*/
public void testCopyIfExceeded() {
int[][] arrays = new int[][]{new int[]{13, 14, 17, 11}, new int[]{13}, EMPTY};
for (int[] a : arrays) {
int[] b = Arrays.copyOf(a, a.length);
assertEquals(a, U.copyIfExceeded(a, a.length));
assertArrayEquals(b, U.copyIfExceeded(a, a.length));
for (int j = 0; j < a.length - 1; j++)
assertArrayEquals(Arrays.copyOf(b, j), U.copyIfExceeded(a, j));
}
}
/**
*
*/
public void testIsIncreasingArray() {
assertTrue(U.isIncreasingArray(EMPTY, 0));
assertTrue(U.isIncreasingArray(new int[]{Integer.MIN_VALUE, -10, 1, 13, Integer.MAX_VALUE}, 5));
assertTrue(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 0));
assertTrue(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 3));
assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 4));
assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, -1, 5}, 5));
assertFalse(U.isIncreasingArray(new int[]{1, 2, 3, 3, 5}, 4));
assertTrue(U.isIncreasingArray(new int[]{1, -1}, 1));
assertFalse(U.isIncreasingArray(new int[]{1, -1}, 2));
assertTrue(U.isIncreasingArray(new int[]{13, 13, 13}, 1));
assertFalse(U.isIncreasingArray(new int[]{13, 13, 13}, 2));
assertFalse(U.isIncreasingArray(new int[]{13, 13, 13}, 3));
}
/**
*
*/
public void testIsNonDecreasingArray() {
assertTrue(U.isNonDecreasingArray(EMPTY, 0));
assertTrue(U.isNonDecreasingArray(new int[]{Integer.MIN_VALUE, -10, 1, 13, Integer.MAX_VALUE}, 5));
assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 0));
assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 3));
assertFalse(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 4));
assertFalse(U.isNonDecreasingArray(new int[]{1, 2, 3, -1, 5}, 5));
assertTrue(U.isNonDecreasingArray(new int[]{1, 2, 3, 3, 5}, 4));
assertTrue(U.isNonDecreasingArray(new int[]{1, -1}, 1));
assertFalse(U.isNonDecreasingArray(new int[]{1, -1}, 2));
assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 1));
assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 2));
assertTrue(U.isNonDecreasingArray(new int[]{13, 13, 13}, 3));
}
/**
* Test InetAddress Comparator.
*/
public void testInetAddressesComparator() {
List<InetSocketAddress> ips = new ArrayList<InetSocketAddress>() {
{
add(new InetSocketAddress("127.0.0.1", 1));
add(new InetSocketAddress("10.0.0.1", 1));
add(new InetSocketAddress("172.16.0.1", 1));
add(new InetSocketAddress("192.168.0.1", 1));
add(new InetSocketAddress("100.0.0.1", 1));
add(new InetSocketAddress("XXX", 1));
}
};
Collections.sort(ips, U.inetAddressesComparator(true));
assertTrue(ips.get(0).getAddress().isLoopbackAddress());
assertTrue(ips.get(ips.size() - 1).isUnresolved());
Collections.sort(ips, U.inetAddressesComparator(false));
assertTrue(ips.get(ips.size() - 2).getAddress().isLoopbackAddress());
assertTrue(ips.get(ips.size() - 1).isUnresolved());
}
public void testMD5Calculation() throws Exception {
String md5 = U.calculateMD5(new ByteArrayInputStream("Corrupted information.".getBytes()));
assertEquals("d7dbe555be2eee7fa658299850169fa1", md5);
}
/**
* @throws Exception If failed.
*/
public void testResolveLocalAddresses() throws Exception {
InetAddress inetAddress = InetAddress.getByName("0.0.0.0");
IgniteBiTuple<Collection<String>, Collection<String>> addrs = U.resolveLocalAddresses(inetAddress);
Collection<String> hostNames = addrs.get2();
assertFalse(hostNames.contains(null));
assertFalse(hostNames.contains(""));
assertFalse(hostNames.contains("127.0.0.1"));
assertFalse(F.exist(hostNames, new IgnitePredicate<String>() {
@Override public boolean apply(String hostName) {
return hostName.contains("localhost") || hostName.contains("0:0:0:0:0:0:0:1");
}
}));
}
/**
*
*/
public void testToSocketAddressesNoDuplicates() {
Collection<String> addrs = new ArrayList<>();
addrs.add("127.0.0.1");
addrs.add("localhost");
Collection<String> hostNames = new ArrayList<>();
int port = 1234;
assertEquals(1, U.toSocketAddresses(addrs, hostNames, port).size());
}
/**
* Composes a test String of given tlength.
*
* @param len The length.
* @return The String.
*/
private static String composeString(int len) {
StringBuilder sb = new StringBuilder();
for (int i=0; i<len; i++)
sb.append((char)i);
String x = sb.toString();
assertEquals(len, x.length());
return x;
}
/**
* Writes the given String to a DataOutput, reads from DataInput, then checks if they are the same.
*
* @param s0 The String to check serialization for.
* @throws Exception On error.
*/
private static void checkString(String s0) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutput dout = new DataOutputStream(baos);
IgfsUtils.writeUTF(dout, s0);
DataInput din = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
String s1 = IgfsUtils.readUTF(din);
assertEquals(s0, s1);
}
/**
* Tests long String serialization/deserialization,
*
* @throws Exception If failed.
*/
public void testLongStringWriteUTF() throws Exception {
checkString(null);
checkString("");
checkString("a");
checkString("Quick brown fox jumps over the lazy dog.");
String x = composeString(0xFFFF / 4 - 1);
checkString(x);
x = composeString(0xFFFF / 4);
checkString(x);
x = composeString(0xFFFF / 4 + 1);
checkString(x);
}
/**
*
*/
public void testCeilPow2() throws Exception {
assertEquals(2, U.ceilPow2(2));
assertEquals(4, U.ceilPow2(3));
assertEquals(4, U.ceilPow2(4));
assertEquals(8, U.ceilPow2(5));
assertEquals(8, U.ceilPow2(6));
assertEquals(8, U.ceilPow2(7));
assertEquals(8, U.ceilPow2(8));
assertEquals(16, U.ceilPow2(9));
assertEquals(1 << 15, U.ceilPow2((1 << 15) - 1));
assertEquals(1 << 15, U.ceilPow2(1 << 15));
assertEquals(1 << 16, U.ceilPow2((1 << 15) + 1));
assertEquals(1 << 26, U.ceilPow2((1 << 26) - 100));
assertEquals(1 << 26, U.ceilPow2(1 << 26));
assertEquals(1 << 27, U.ceilPow2((1 << 26) + 100));
for (int i = (int)Math.pow(2, 30); i < Integer.MAX_VALUE; i++)
assertEquals((int)Math.pow(2, 30), U.ceilPow2(i));
for (int i = Integer.MIN_VALUE; i < 0; i++)
assertEquals(0, U.ceilPow2(i));
}
/**
*
*/
public void testIsOldestNodeVersionAtLeast() {
IgniteProductVersion v240 = IgniteProductVersion.fromString("2.4.0");
IgniteProductVersion v241 = IgniteProductVersion.fromString("2.4.1");
IgniteProductVersion v250 = IgniteProductVersion.fromString("2.5.0");
IgniteProductVersion v250ts = IgniteProductVersion.fromString("2.5.0-b1-3");
TcpDiscoveryNode node240 = new TcpDiscoveryNode();
node240.version(v240);
TcpDiscoveryNode node241 = new TcpDiscoveryNode();
node241.version(v241);
TcpDiscoveryNode node250 = new TcpDiscoveryNode();
node250.version(v250);
TcpDiscoveryNode node250ts = new TcpDiscoveryNode();
node250ts.version(v250ts);
assertTrue(U.isOldestNodeVersionAtLeast(v240, Arrays.asList(node240, node241, node250, node250ts)));
assertFalse(U.isOldestNodeVersionAtLeast(v241, Arrays.asList(node240, node241, node250, node250ts)));
assertTrue(U.isOldestNodeVersionAtLeast(v250, Arrays.asList(node250, node250ts)));
assertTrue(U.isOldestNodeVersionAtLeast(v250ts, Arrays.asList(node250, node250ts)));
}
/**
*
*/
public void testDoInParallel() throws Throwable {
CyclicBarrier barrier = new CyclicBarrier(3);
IgniteUtils.doInParallel(3,
Executors.newFixedThreadPool(3),
Arrays.asList(1, 2, 3),
i -> {
try {
barrier.await(1, TimeUnit.SECONDS);
}
catch (Exception e) {
throw new IgniteCheckedException(e);
}
}
);
}
/**
*
*/
public void testDoInParallelBatch() {
CyclicBarrier barrier = new CyclicBarrier(3);
try {
IgniteUtils.doInParallel(2,
Executors.newFixedThreadPool(3),
Arrays.asList(1, 2, 3),
i -> {
try {
barrier.await(400, TimeUnit.MILLISECONDS);
}
catch (Exception e) {
throw new IgniteCheckedException(e);
}
}
);
fail("Should throw timeout exception");
}
catch (Exception e) {
assertTrue(e.getCause() instanceof TimeoutException);
}
}
/**
*
*/
public void testDoInParallelException() {
String expectedException = "ExpectedException";
try {
IgniteUtils.doInParallel(3,
Executors.newFixedThreadPool(1),
Arrays.asList(1, 2, 3),
i -> {
if (i == 1)
throw new IgniteCheckedException(expectedException);
}
);
fail("Should throw ParallelExecutionException");
}
catch (IgniteCheckedException e) {
assertEquals(expectedException, e.getMessage());
}
}
/**
* Test enum.
*/
private enum TestEnum {
E1,
E2,
E3
}
@Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE)
private @interface Ann1 {}
@Documented @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE)
private @interface Ann2 {}
private static class A1 implements I3, I5 {}
private static class A2 extends A1 {}
private static class A3 implements I5 {}
@Ann1 private interface I1 {}
private interface I2 extends I1 {}
private interface I3 extends I2 {}
@Ann2 private interface I4 {}
private interface I5 extends I4 {}
}
|
GNITE-8006 Fixed flaky test
|
modules/core/src/test/java/org/apache/ignite/internal/util/IgniteUtilsSelfTest.java
|
GNITE-8006 Fixed flaky test
|
|
Java
|
apache-2.0
|
d72788959af684e77db588f8aff768bfc8bdac45
| 0
|
EronWright/Canova,tripiana/Canova,ZenDevelopmentSystems/Canova,EronWright/Canova,ZenDevelopmentSystems/Canova,tripiana/Canova
|
package org.canova.api.formats.output.impl;
import org.canova.api.conf.Configuration;
import org.canova.api.exceptions.CanovaException;
import org.canova.api.formats.output.OutputFormat;
import org.canova.api.records.writer.RecordWriter;
import org.canova.api.records.writer.impl.SVMLightRecordWriter;
import java.io.File;
import java.io.FileNotFoundException;
/**
* Created by agibsonccc on 1/11/15.
*/
public class SVMLightOutputFormat implements OutputFormat {
@Override
public RecordWriter createWriter(Configuration conf) throws CanovaException {
String outputPath = conf.get(OutputFormat.OUTPUT_PATH,".");
try {
//return new LineRecordWriter(new File(outputPath));
return new SVMLightRecordWriter(new File(outputPath));
} catch (Exception e) {
throw new CanovaException(e);
}
}
}
|
canova-api/src/main/java/org/canova/api/formats/output/impl/SVMLightOutputFormat.java
|
package org.canova.api.formats.output.impl;
import org.canova.api.conf.Configuration;
import org.canova.api.exceptions.CanovaException;
import org.canova.api.formats.output.OutputFormat;
import org.canova.api.records.writer.RecordWriter;
import org.canova.api.records.writer.impl.LineRecordWriter;
import java.io.File;
import java.io.FileNotFoundException;
/**
* Created by agibsonccc on 1/11/15.
*/
public class SVMLightOutputFormat implements OutputFormat {
@Override
public RecordWriter createWriter(Configuration conf) throws CanovaException {
String outputPath = conf.get(OutputFormat.OUTPUT_PATH,".");
try {
return new LineRecordWriter(new File(outputPath));
} catch (Exception e) {
throw new CanovaException(e);
}
}
}
|
updated the returned output record writer to be the correct one - SVMLightRecordWriter
|
canova-api/src/main/java/org/canova/api/formats/output/impl/SVMLightOutputFormat.java
|
updated the returned output record writer to be the correct one - SVMLightRecordWriter
|
|
Java
|
apache-2.0
|
a1393714e48b528f160e59169647ebafd6697fb9
| 0
|
dmvolod/camel,nikhilvibhav/camel,oalles/camel,nicolaferraro/camel,scranton/camel,DariusX/camel,snurmine/camel,RohanHart/camel,bhaveshdt/camel,tadayosi/camel,driseley/camel,yuruki/camel,pkletsko/camel,isavin/camel,scranton/camel,apache/camel,salikjan/camel,pkletsko/camel,pmoerenhout/camel,rmarting/camel,sverkera/camel,Fabryprog/camel,ullgren/camel,alvinkwekel/camel,w4tson/camel,dmvolod/camel,nboukhed/camel,FingolfinTEK/camel,nboukhed/camel,jamesnetherton/camel,curso007/camel,tdiesler/camel,w4tson/camel,tdiesler/camel,lburgazzoli/apache-camel,oalles/camel,jkorab/camel,tdiesler/camel,edigrid/camel,jonmcewen/camel,chirino/camel,pkletsko/camel,tlehoux/camel,w4tson/camel,pmoerenhout/camel,akhettar/camel,JYBESSON/camel,CodeSmell/camel,christophd/camel,zregvart/camel,veithen/camel,cunningt/camel,pax95/camel,chirino/camel,anoordover/camel,tlehoux/camel,FingolfinTEK/camel,sverkera/camel,snurmine/camel,anton-k11/camel,isavin/camel,bhaveshdt/camel,adessaigne/camel,gilfernandes/camel,chirino/camel,akhettar/camel,anton-k11/camel,curso007/camel,tkopczynski/camel,tlehoux/camel,jonmcewen/camel,yuruki/camel,apache/camel,nikvaessen/camel,NickCis/camel,objectiser/camel,veithen/camel,Thopap/camel,scranton/camel,FingolfinTEK/camel,jlpedrosa/camel,apache/camel,gautric/camel,arnaud-deprez/camel,allancth/camel,curso007/camel,hqstevenson/camel,drsquidop/camel,ullgren/camel,Fabryprog/camel,RohanHart/camel,jamesnetherton/camel,nikvaessen/camel,mgyongyosi/camel,snurmine/camel,rmarting/camel,nikhilvibhav/camel,jkorab/camel,acartapanis/camel,anoordover/camel,chirino/camel,snurmine/camel,arnaud-deprez/camel,gilfernandes/camel,christophd/camel,edigrid/camel,borcsokj/camel,kevinearls/camel,gnodet/camel,tdiesler/camel,anoordover/camel,mcollovati/camel,Thopap/camel,jonmcewen/camel,neoramon/camel,lburgazzoli/apache-camel,drsquidop/camel,cunningt/camel,YoshikiHigo/camel,arnaud-deprez/camel,Thopap/camel,dmvolod/camel,borcsokj/camel,chirino/camel,jlpedrosa/camel,acartapanis/camel,isavin/camel,acartapanis/camel,CodeSmell/camel,tkopczynski/camel,bgaudaen/camel,gautric/camel,pkletsko/camel,jmandawg/camel,sverkera/camel,jmandawg/camel,zregvart/camel,sabre1041/camel,sirlatrom/camel,mgyongyosi/camel,pkletsko/camel,mcollovati/camel,scranton/camel,NickCis/camel,ssharma/camel,driseley/camel,gilfernandes/camel,christophd/camel,onders86/camel,NickCis/camel,anton-k11/camel,mgyongyosi/camel,jamesnetherton/camel,yuruki/camel,lburgazzoli/camel,edigrid/camel,anoordover/camel,ssharma/camel,mgyongyosi/camel,tkopczynski/camel,sabre1041/camel,tadayosi/camel,prashant2402/camel,pax95/camel,bhaveshdt/camel,JYBESSON/camel,onders86/camel,lburgazzoli/apache-camel,acartapanis/camel,edigrid/camel,CodeSmell/camel,curso007/camel,pax95/camel,sabre1041/camel,jamesnetherton/camel,jmandawg/camel,gnodet/camel,cunningt/camel,nikvaessen/camel,jarst/camel,akhettar/camel,jmandawg/camel,apache/camel,zregvart/camel,sabre1041/camel,borcsokj/camel,alvinkwekel/camel,jlpedrosa/camel,sabre1041/camel,lburgazzoli/camel,pmoerenhout/camel,yuruki/camel,onders86/camel,nikhilvibhav/camel,hqstevenson/camel,jkorab/camel,hqstevenson/camel,gnodet/camel,neoramon/camel,sabre1041/camel,tadayosi/camel,pmoerenhout/camel,gautric/camel,pax95/camel,tlehoux/camel,adessaigne/camel,drsquidop/camel,lburgazzoli/camel,punkhorn/camel-upstream,sirlatrom/camel,kevinearls/camel,tkopczynski/camel,edigrid/camel,mgyongyosi/camel,anton-k11/camel,YoshikiHigo/camel,chirino/camel,bhaveshdt/camel,davidkarlsen/camel,pax95/camel,hqstevenson/camel,lburgazzoli/camel,jarst/camel,arnaud-deprez/camel,jlpedrosa/camel,cunningt/camel,nicolaferraro/camel,RohanHart/camel,apache/camel,kevinearls/camel,DariusX/camel,YoshikiHigo/camel,mcollovati/camel,tlehoux/camel,apache/camel,drsquidop/camel,gilfernandes/camel,neoramon/camel,objectiser/camel,bgaudaen/camel,snurmine/camel,prashant2402/camel,ssharma/camel,gnodet/camel,dmvolod/camel,gilfernandes/camel,anton-k11/camel,anoordover/camel,nikhilvibhav/camel,anton-k11/camel,jlpedrosa/camel,CodeSmell/camel,onders86/camel,tadayosi/camel,onders86/camel,oalles/camel,w4tson/camel,jmandawg/camel,jamesnetherton/camel,gautric/camel,pax95/camel,objectiser/camel,adessaigne/camel,snurmine/camel,bgaudaen/camel,yuruki/camel,ssharma/camel,RohanHart/camel,nicolaferraro/camel,lburgazzoli/apache-camel,pmoerenhout/camel,w4tson/camel,tlehoux/camel,oalles/camel,isavin/camel,tdiesler/camel,veithen/camel,FingolfinTEK/camel,sverkera/camel,mcollovati/camel,nboukhed/camel,prashant2402/camel,rmarting/camel,gautric/camel,scranton/camel,zregvart/camel,pmoerenhout/camel,sirlatrom/camel,hqstevenson/camel,edigrid/camel,ullgren/camel,akhettar/camel,neoramon/camel,curso007/camel,lburgazzoli/camel,jamesnetherton/camel,NickCis/camel,bgaudaen/camel,allancth/camel,allancth/camel,davidkarlsen/camel,jlpedrosa/camel,tkopczynski/camel,arnaud-deprez/camel,DariusX/camel,sverkera/camel,RohanHart/camel,alvinkwekel/camel,punkhorn/camel-upstream,jkorab/camel,driseley/camel,ssharma/camel,mgyongyosi/camel,allancth/camel,NickCis/camel,neoramon/camel,tkopczynski/camel,allancth/camel,gnodet/camel,prashant2402/camel,punkhorn/camel-upstream,hqstevenson/camel,JYBESSON/camel,RohanHart/camel,borcsokj/camel,jarst/camel,kevinearls/camel,cunningt/camel,jarst/camel,FingolfinTEK/camel,davidkarlsen/camel,ullgren/camel,curso007/camel,arnaud-deprez/camel,borcsokj/camel,driseley/camel,sirlatrom/camel,adessaigne/camel,DariusX/camel,scranton/camel,anoordover/camel,jarst/camel,dmvolod/camel,jmandawg/camel,jonmcewen/camel,JYBESSON/camel,christophd/camel,nikvaessen/camel,nicolaferraro/camel,nikvaessen/camel,Thopap/camel,gautric/camel,lburgazzoli/apache-camel,pkletsko/camel,nboukhed/camel,YoshikiHigo/camel,veithen/camel,sirlatrom/camel,bgaudaen/camel,nikvaessen/camel,tdiesler/camel,alvinkwekel/camel,rmarting/camel,adessaigne/camel,jkorab/camel,objectiser/camel,kevinearls/camel,lburgazzoli/camel,akhettar/camel,cunningt/camel,driseley/camel,isavin/camel,bgaudaen/camel,FingolfinTEK/camel,tadayosi/camel,NickCis/camel,prashant2402/camel,Fabryprog/camel,drsquidop/camel,acartapanis/camel,acartapanis/camel,oalles/camel,JYBESSON/camel,oalles/camel,adessaigne/camel,driseley/camel,davidkarlsen/camel,jkorab/camel,tadayosi/camel,JYBESSON/camel,gilfernandes/camel,veithen/camel,akhettar/camel,jarst/camel,dmvolod/camel,rmarting/camel,christophd/camel,onders86/camel,salikjan/camel,lburgazzoli/apache-camel,neoramon/camel,bhaveshdt/camel,YoshikiHigo/camel,Fabryprog/camel,ssharma/camel,Thopap/camel,borcsokj/camel,punkhorn/camel-upstream,nboukhed/camel,allancth/camel,Thopap/camel,sirlatrom/camel,prashant2402/camel,sverkera/camel,nboukhed/camel,YoshikiHigo/camel,christophd/camel,w4tson/camel,jonmcewen/camel,drsquidop/camel,veithen/camel,yuruki/camel,bhaveshdt/camel,kevinearls/camel,isavin/camel,jonmcewen/camel,rmarting/camel
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.csv;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.Exchange;
import org.apache.camel.NoTypeConversionAvailableException;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
/**
* This class marshal data into a CSV format.
*/
abstract class CsvMarshaller {
private final CSVFormat format;
private CsvMarshaller(CSVFormat format) {
this.format = format;
}
/**
* Creates a new instance.
*
* @param format CSV format
* @param dataFormat Camel CSV data format
* @return New instance
*/
public static CsvMarshaller create(CSVFormat format, CsvDataFormat dataFormat) {
// If we don't want the header record, clear it
if (format.getSkipHeaderRecord()) {
format = format.withHeader((String[]) null);
}
String[] fixedColumns = dataFormat.getHeader();
if (fixedColumns != null && fixedColumns.length > 0) {
return new FixedColumnsMarshaller(format, fixedColumns);
}
return new DynamicColumnsMarshaller(format);
}
/**
* Marshals the given object into the given stream.
*
* @param exchange Exchange (used for access to type conversion)
* @param object Body to marshal
* @param outputStream Output stream of the CSV
* @throws NoTypeConversionAvailableException if the body cannot be converted
* @throws IOException if we cannot write into the given stream
*/
public void marshal(Exchange exchange, Object object, OutputStream outputStream) throws NoTypeConversionAvailableException, IOException {
CSVPrinter printer = new CSVPrinter(new OutputStreamWriter(outputStream, IOHelper.getCharsetName(exchange)), format);
try {
Iterator it = ObjectHelper.createIterator(object);
while (it.hasNext()) {
Object child = it.next();
printer.printRecord(getRecordValues(exchange, child));
}
} finally {
IOHelper.close(printer);
}
}
private Iterable<?> getRecordValues(Exchange exchange, Object data) throws NoTypeConversionAvailableException {
// each row must be a map or list based
Map<?, ?> map = exchange.getContext().getTypeConverter().tryConvertTo(Map.class, exchange, data);
if (map != null) {
return getMapRecordValues(map);
}
return ExchangeHelper.convertToMandatoryType(exchange, List.class, data);
}
/**
* Gets the CSV record values of the given map.
*
* @param map Input map
* @return CSV record values of the given map
*/
protected abstract Iterable<?> getMapRecordValues(Map<?, ?> map);
//region Implementations
/**
* This marshaller has fixed columns
*/
private static final class FixedColumnsMarshaller extends CsvMarshaller {
private final String[] fixedColumns;
private FixedColumnsMarshaller(CSVFormat format, String[] fixedColumns) {
super(format);
this.fixedColumns = Arrays.copyOf(fixedColumns, fixedColumns.length);
}
@Override
protected Iterable<?> getMapRecordValues(Map<?, ?> map) {
List<Object> result = new ArrayList<Object>(fixedColumns.length);
for (String key : fixedColumns) {
result.add(map.get(key));
}
return result;
}
}
/**
* This marshaller adapts the columns but always keep them in the same order
*/
private static final class DynamicColumnsMarshaller extends CsvMarshaller {
private DynamicColumnsMarshaller(CSVFormat format) {
super(format);
}
@Override
protected Iterable<?> getMapRecordValues(Map<?, ?> map) {
List<Object> result = new ArrayList<Object>(map.size());
for (Object key : map.keySet()) {
result.add(map.get(key));
}
return result;
}
}
//endregion
}
|
components/camel-csv/src/main/java/org/apache/camel/dataformat/csv/CsvMarshaller.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.csv;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.Exchange;
import org.apache.camel.NoTypeConversionAvailableException;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
/**
* This class marshal data into a CSV format.
*/
abstract class CsvMarshaller {
private final CSVFormat format;
private CsvMarshaller(CSVFormat format) {
this.format = format;
}
/**
* Creates a new instance.
*
* @param format CSV format
* @param dataFormat Camel CSV data format
* @return New instance
*/
public static CsvMarshaller create(CSVFormat format, CsvDataFormat dataFormat) {
// If we don't want the header record, clear it
if (format.getSkipHeaderRecord()) {
format = format.withHeader((String[]) null);
}
String[] fixedColumns = dataFormat.getHeader();
if (fixedColumns != null && fixedColumns.length > 0) {
return new FixedColumnsMarshaller(format, fixedColumns);
}
return new DynamicColumnsMarshaller(format);
}
/**
* Marshals the given object into the given stream.
*
* @param exchange Exchange (used for access to type conversion)
* @param object Body to marshal
* @param outputStream Output stream of the CSV
* @throws NoTypeConversionAvailableException if the body cannot be converted
* @throws IOException if we cannot write into the given stream
*/
public void marshal(Exchange exchange, Object object, OutputStream outputStream) throws NoTypeConversionAvailableException, IOException {
CSVPrinter printer = new CSVPrinter(new OutputStreamWriter(outputStream, IOHelper.getCharsetName(exchange)), format);
try {
Iterator it = ObjectHelper.createIterator(object);
while (it.hasNext()) {
Object child = it.next();
printer.printRecord(getRecordValues(exchange, child));
}
} finally {
IOHelper.close(printer);
}
}
private Iterable<?> getRecordValues(Exchange exchange, Object data) throws NoTypeConversionAvailableException {
// each row must be a map or list based
Map<?, ?> map = exchange.getContext().getTypeConverter().tryConvertTo(Map.class, exchange, data);
if (map != null) {
return getMapRecordValues(map);
}
return ExchangeHelper.convertToMandatoryType(exchange, List.class, data);
}
/**
* Gets the CSV record values of the given map.
*
* @param map Input map
* @return CSV record values of the given map
*/
protected abstract Iterable<?> getMapRecordValues(Map<?, ?> map);
//region Implementations
/**
* This marshaller has fixed columns
*/
private static final class FixedColumnsMarshaller extends CsvMarshaller {
private final String[] fixedColumns;
private FixedColumnsMarshaller(CSVFormat format, String[] fixedColumns) {
super(format);
this.fixedColumns = Arrays.copyOf(fixedColumns, fixedColumns.length);
}
@Override
protected Iterable<?> getMapRecordValues(Map<?, ?> map) {
List<Object> result = new ArrayList<Object>(fixedColumns.length);
for (String key : fixedColumns) {
result.add(map.get(key));
}
return result;
}
}
/**
* This marshaller adapts the columns but always keep them in the same order
*/
private static final class DynamicColumnsMarshaller extends CsvMarshaller {
private DynamicColumnsMarshaller(CSVFormat format) {
super(format);
}
@Override
protected Iterable<?> getMapRecordValues(Map<?, ?> map) {
List<Object> result = new ArrayList<Object>(map.size());
for (Object key : map.keySet()) {
result.add(map.get(key));
}
return result;
}
}
//endregion
}
|
remove unused import
|
components/camel-csv/src/main/java/org/apache/camel/dataformat/csv/CsvMarshaller.java
|
remove unused import
|
|
Java
|
apache-2.0
|
61f2aa3078d7d7594530ffe3a7bd4ffc890b754c
| 0
|
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
|
package org.apache.lucene.benchmark.byTask.feeds;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Locale;
import java.util.zip.GZIPInputStream;
import org.apache.lucene.benchmark.byTask.utils.Config;
/**
* A DocMaker using the (compressed) Trec collection for its input.
* <p>
* Config properties:<ul>
* <li>work.dir=<path to the root of docs and indexes dirs| Default: work></li>
* <li>docs.dir=<path to the docs dir| Default: trec></li>
* </ul>
*/
public class TrecDocMaker extends BasicDocMaker {
private static final String newline = System.getProperty("line.separator");
protected ThreadLocal dateFormat = new ThreadLocal();
protected File dataDir = null;
protected ArrayList inputFiles = new ArrayList();
protected int nextFile = 0;
protected int iteration=0;
protected BufferedReader reader;
private GZIPInputStream zis;
private static final String DATE_FORMATS [] = {
"EEE, dd MMM yyyy kk:mm:ss z", //Tue, 09 Dec 2003 22:39:08 GMT
"EEE MMM dd kk:mm:ss yyyy z", //Tue Dec 09 16:45:08 2003 EST
"EEE, dd-MMM-':'y kk:mm:ss z", //Tue, 09 Dec 2003 22:39:08 GMT
"EEE, dd-MMM-yyy kk:mm:ss z", //Tue, 09 Dec 2003 22:39:08 GMT
};
/* (non-Javadoc)
* @see SimpleDocMaker#setConfig(java.util.Properties)
*/
public void setConfig(Config config) {
super.setConfig(config);
File workDir = new File(config.get("work.dir","work"));
String d = config.get("docs.dir","trec");
dataDir = new File(d);
if (!dataDir.isAbsolute()) {
dataDir = new File(workDir, d);
}
collectFiles(dataDir,inputFiles);
if (inputFiles.size()==0) {
throw new RuntimeException("No txt files in dataDir: "+dataDir.getAbsolutePath());
}
}
protected void openNextFile() throws NoMoreDataException, Exception {
closeInputs();
int retries = 0;
while (true) {
File f = null;
synchronized (this) {
if (nextFile >= inputFiles.size()) {
// exhausted files, start a new round, unless forever set to false.
if (!forever) {
throw new NoMoreDataException();
}
nextFile = 0;
iteration++;
}
f = (File) inputFiles.get(nextFile++);
}
System.out.println("opening: "+f+" length: "+f.length());
try {
zis = new GZIPInputStream(new BufferedInputStream(new FileInputStream(f)));
reader = new BufferedReader(new InputStreamReader(zis));
return;
} catch (Exception e) {
retries++;
if (retries<20) {
System.out.println("Skipping 'bad' file "+f.getAbsolutePath()+" #retries="+retries);
continue;
} else {
throw new NoMoreDataException();
}
}
}
}
protected void closeInputs() {
if (zis!=null) {
try {
zis.close();
} catch (IOException e) {
System.out.println("closeInputs(): Ingnoring error: "+e);
e.printStackTrace();
}
zis = null;
}
if (reader!=null) {
try {
reader.close();
} catch (IOException e) {
System.out.println("closeInputs(): Ingnoring error: "+e);
e.printStackTrace();
}
reader = null;
}
}
// read until finding a line that starts with the specified prefix
protected StringBuffer read (String prefix, StringBuffer sb, boolean collectMatchLine, boolean collectAll) throws Exception {
sb = (sb==null ? new StringBuffer() : sb);
String sep = "";
while (true) {
String line = reader.readLine();
if (line==null) {
openNextFile();
continue;
}
if (line.startsWith(prefix)) {
if (collectMatchLine) {
sb.append(sep+line);
sep = newline;
}
break;
}
if (collectAll) {
sb.append(sep+line);
sep = newline;
}
}
//System.out.println("read: "+sb);
return sb;
}
protected synchronized DocData getNextDocData() throws NoMoreDataException, Exception {
if (reader==null) {
openNextFile();
}
// 1. skip until doc start
read("<DOC>",null,false,false);
// 2. name
StringBuffer sb = read("<DOCNO>",null,true,false);
String name = sb.substring("<DOCNO>".length());
name = name.substring(0,name.indexOf("</DOCNO>"))+"_"+iteration;
// 3. skip until doc header
read("<DOCHDR>",null,false,false);
// 4. date
sb = read("Date: ",null,true,false);
String dateStr = sb.substring("Date: ".length());
// 5. skip until end of doc header
read("</DOCHDR>",null,false,false);
// 6. collect until end of doc
sb = read("</DOC>",null,false,true);
// this is the next document, so parse it
Date date = parseDate(dateStr);
HTMLParser p = getHtmlParser();
DocData docData = p.parse(name, date, sb, getDateFormat(0));
addBytes(sb.length()); // count char length of parsed html text (larger than the plain doc body text).
return docData;
}
protected DateFormat getDateFormat(int n) {
DateFormat df[] = (DateFormat[]) dateFormat.get();
if (df == null) {
df = new SimpleDateFormat[DATE_FORMATS.length];
for (int i = 0; i < df.length; i++) {
df[i] = new SimpleDateFormat(DATE_FORMATS[i],Locale.US);
df[i].setLenient(true);
}
dateFormat.set(df);
}
return df[n];
}
protected Date parseDate(String dateStr) {
Date date = null;
for (int i=0; i<DATE_FORMATS.length; i++) {
try {
date = getDateFormat(i).parse(dateStr.trim());
return date;
} catch (ParseException e) {
}
}
// do not fail test just because a date could not be parsed
System.out.println("ignoring date parse exception (assigning 'now') for: "+dateStr);
date = new Date(); // now
return date;
}
/*
* (non-Javadoc)
* @see DocMaker#resetIinputs()
*/
public synchronized void resetInputs() {
super.resetInputs();
closeInputs();
nextFile = 0;
iteration = 0;
}
/*
* (non-Javadoc)
* @see DocMaker#numUniqueTexts()
*/
public int numUniqueTexts() {
return inputFiles.size();
}
}
|
contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/TrecDocMaker.java
|
package org.apache.lucene.benchmark.byTask.feeds;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Locale;
import java.util.zip.GZIPInputStream;
import org.apache.lucene.benchmark.byTask.utils.Config;
/**
* A DocMaker using the (compressed) Trec collection for its input.
* <p>
* Config properties:<ul>
* <li>work.dir=<path to the root of docs and indexes dirs| Default: work></li>
* <li>docs.dir=<path to the docs dir| Default: trec></li>
* </ul>
*/
public class TrecDocMaker extends BasicDocMaker {
private static final String newline = System.getProperty("line.separator");
private ThreadLocal dateFormat = new ThreadLocal();
private File dataDir = null;
private ArrayList inputFiles = new ArrayList();
private int nextFile = 0;
private int iteration=0;
private BufferedReader reader;
private GZIPInputStream zis;
private static final String DATE_FORMATS [] = {
"EEE, dd MMM yyyy kk:mm:ss z", //Tue, 09 Dec 2003 22:39:08 GMT
"EEE MMM dd kk:mm:ss yyyy z", //Tue Dec 09 16:45:08 2003 EST
"EEE, dd-MMM-':'y kk:mm:ss z", //Tue, 09 Dec 2003 22:39:08 GMT
"EEE, dd-MMM-yyy kk:mm:ss z", //Tue, 09 Dec 2003 22:39:08 GMT
};
/* (non-Javadoc)
* @see SimpleDocMaker#setConfig(java.util.Properties)
*/
public void setConfig(Config config) {
super.setConfig(config);
File workDir = new File(config.get("work.dir","work"));
String d = config.get("docs.dir","trec");
dataDir = new File(d);
if (!dataDir.isAbsolute()) {
dataDir = new File(workDir, d);
}
collectFiles(dataDir,inputFiles);
if (inputFiles.size()==0) {
throw new RuntimeException("No txt files in dataDir: "+dataDir.getAbsolutePath());
}
}
private void openNextFile() throws NoMoreDataException, Exception {
closeInputs();
int retries = 0;
while (true) {
File f = null;
synchronized (this) {
if (nextFile >= inputFiles.size()) {
// exhausted files, start a new round, unless forever set to false.
if (!forever) {
throw new NoMoreDataException();
}
nextFile = 0;
iteration++;
}
f = (File) inputFiles.get(nextFile++);
}
System.out.println("opening: "+f+" length: "+f.length());
try {
zis = new GZIPInputStream(new BufferedInputStream(new FileInputStream(f)));
reader = new BufferedReader(new InputStreamReader(zis));
return;
} catch (Exception e) {
retries++;
if (retries<20) {
System.out.println("Skipping 'bad' file "+f.getAbsolutePath()+" #retries="+retries);
continue;
} else {
throw new NoMoreDataException();
}
}
}
}
private void closeInputs() {
if (zis!=null) {
try {
zis.close();
} catch (IOException e) {
System.out.println("closeInputs(): Ingnoring error: "+e);
e.printStackTrace();
}
zis = null;
}
if (reader!=null) {
try {
reader.close();
} catch (IOException e) {
System.out.println("closeInputs(): Ingnoring error: "+e);
e.printStackTrace();
}
reader = null;
}
}
// read until finding a line that starts with the specified prefix
private StringBuffer read (String prefix, StringBuffer sb, boolean collectMatchLine, boolean collectAll) throws Exception {
sb = (sb==null ? new StringBuffer() : sb);
String sep = "";
while (true) {
String line = reader.readLine();
if (line==null) {
openNextFile();
continue;
}
if (line.startsWith(prefix)) {
if (collectMatchLine) {
sb.append(sep+line);
sep = newline;
}
break;
}
if (collectAll) {
sb.append(sep+line);
sep = newline;
}
}
//System.out.println("read: "+sb);
return sb;
}
protected synchronized DocData getNextDocData() throws NoMoreDataException, Exception {
if (reader==null) {
openNextFile();
}
// 1. skip until doc start
read("<DOC>",null,false,false);
// 2. name
StringBuffer sb = read("<DOCNO>",null,true,false);
String name = sb.substring("<DOCNO>".length());
name = name.substring(0,name.indexOf("</DOCNO>"))+"_"+iteration;
// 3. skip until doc header
read("<DOCHDR>",null,false,false);
// 4. date
sb = read("Date: ",null,true,false);
String dateStr = sb.substring("Date: ".length());
// 5. skip until end of doc header
read("</DOCHDR>",null,false,false);
// 6. collect until end of doc
sb = read("</DOC>",null,false,true);
// this is the next document, so parse it
Date date = parseDate(dateStr);
HTMLParser p = getHtmlParser();
DocData docData = p.parse(name, date, sb, getDateFormat(0));
addBytes(sb.length()); // count char length of parsed html text (larger than the plain doc body text).
return docData;
}
private DateFormat getDateFormat(int n) {
DateFormat df[] = (DateFormat[]) dateFormat.get();
if (df == null) {
df = new SimpleDateFormat[DATE_FORMATS.length];
for (int i = 0; i < df.length; i++) {
df[i] = new SimpleDateFormat(DATE_FORMATS[i],Locale.US);
df[i].setLenient(true);
}
dateFormat.set(df);
}
return df[n];
}
private Date parseDate(String dateStr) {
Date date = null;
for (int i=0; i<DATE_FORMATS.length; i++) {
try {
date = getDateFormat(i).parse(dateStr.trim());
return date;
} catch (ParseException e) {
}
}
// do not fail test just because a date could not be parsed
System.out.println("ignoring date parse exception (assigning 'now') for: "+dateStr);
date = new Date(); // now
return date;
}
/*
* (non-Javadoc)
* @see DocMaker#resetIinputs()
*/
public synchronized void resetInputs() {
super.resetInputs();
closeInputs();
nextFile = 0;
iteration = 0;
}
/*
* (non-Javadoc)
* @see DocMaker#numUniqueTexts()
*/
public int numUniqueTexts() {
return inputFiles.size();
}
}
|
allow TrecDocMaker to be more easily extended by making values protected
git-svn-id: 4c5078813df38efa56971a28e09a55254294f104@616882 13f79535-47bb-0310-9956-ffa450edef68
|
contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/TrecDocMaker.java
|
allow TrecDocMaker to be more easily extended by making values protected
|
|
Java
|
apache-2.0
|
61b072b9342fa7bef215f14ab9e41e640e144788
| 0
|
dslomov/intellij-community,ryano144/intellij-community,kdwink/intellij-community,asedunov/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,dslomov/intellij-community,amith01994/intellij-community,retomerz/intellij-community,apixandru/intellij-community,apixandru/intellij-community,petteyg/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,ryano144/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,samthor/intellij-community,petteyg/intellij-community,supersven/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,asedunov/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,adedayo/intellij-community,kool79/intellij-community,slisson/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,samthor/intellij-community,signed/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,supersven/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,caot/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,jagguli/intellij-community,signed/intellij-community,apixandru/intellij-community,signed/intellij-community,salguarnieri/intellij-community,signed/intellij-community,retomerz/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,caot/intellij-community,holmes/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,allotria/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,allotria/intellij-community,ibinti/intellij-community,apixandru/intellij-community,izonder/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,signed/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,semonte/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,signed/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,FHannes/intellij-community,kool79/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,semonte/intellij-community,youdonghai/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,fnouama/intellij-community,asedunov/intellij-community,samthor/intellij-community,hurricup/intellij-community,ryano144/intellij-community,amith01994/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,retomerz/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,blademainer/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,da1z/intellij-community,retomerz/intellij-community,retomerz/intellij-community,slisson/intellij-community,jagguli/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,vladmm/intellij-community,apixandru/intellij-community,asedunov/intellij-community,kool79/intellij-community,kool79/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,asedunov/intellij-community,signed/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,caot/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,xfournet/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,supersven/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,fnouama/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,samthor/intellij-community,slisson/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,semonte/intellij-community,vladmm/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,supersven/intellij-community,blademainer/intellij-community,fitermay/intellij-community,semonte/intellij-community,robovm/robovm-studio,supersven/intellij-community,caot/intellij-community,caot/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,fnouama/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,asedunov/intellij-community,slisson/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,hurricup/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,caot/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,ibinti/intellij-community,vladmm/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,semonte/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,ryano144/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,clumsy/intellij-community,robovm/robovm-studio,ahb0327/intellij-community,holmes/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,fitermay/intellij-community,semonte/intellij-community,allotria/intellij-community,clumsy/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,petteyg/intellij-community,dslomov/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,akosyakov/intellij-community,supersven/intellij-community,kdwink/intellij-community,diorcety/intellij-community,caot/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,allotria/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,fitermay/intellij-community,retomerz/intellij-community,da1z/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,da1z/intellij-community,gnuhub/intellij-community,signed/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,signed/intellij-community,slisson/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,retomerz/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,kool79/intellij-community,suncycheng/intellij-community,slisson/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,da1z/intellij-community,ahb0327/intellij-community,allotria/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,supersven/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,diorcety/intellij-community,caot/intellij-community,adedayo/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,izonder/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,slisson/intellij-community,fitermay/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,diorcety/intellij-community,caot/intellij-community,semonte/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,blademainer/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,signed/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,fnouama/intellij-community,kool79/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,adedayo/intellij-community,adedayo/intellij-community,izonder/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,da1z/intellij-community,semonte/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,izonder/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,FHannes/intellij-community,ryano144/intellij-community,hurricup/intellij-community,kdwink/intellij-community,samthor/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,allotria/intellij-community,holmes/intellij-community,asedunov/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,ibinti/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,robovm/robovm-studio,fnouama/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,jagguli/intellij-community,semonte/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,hurricup/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,retomerz/intellij-community,supersven/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,kool79/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,wreckJ/intellij-community,dslomov/intellij-community
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.builders.impl.storage;
import com.intellij.openapi.util.AtomicNotNullLazyValue;
import com.intellij.openapi.util.NotNullLazyValue;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.builders.BuildTarget;
import org.jetbrains.jps.builders.storage.BuildDataPaths;
import org.jetbrains.jps.builders.storage.StorageProvider;
import org.jetbrains.jps.incremental.storage.CompositeStorageOwner;
import org.jetbrains.jps.incremental.storage.StorageOwner;
import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @author nik
*/
public class BuildTargetStorages extends CompositeStorageOwner {
private final BuildTarget<?> myTarget;
private final BuildDataPaths myPaths;
private final ConcurrentMap<StorageProvider<?>, AtomicNotNullLazyValue<? extends StorageOwner>> myStorages
= new ConcurrentHashMap<StorageProvider<?>, AtomicNotNullLazyValue<? extends StorageOwner>>(16, 0.75f, 1);
public BuildTargetStorages(BuildTarget<?> target, BuildDataPaths paths) {
myTarget = target;
myPaths = paths;
}
@NotNull
public <S extends StorageOwner> S getOrCreateStorage(@NotNull final StorageProvider<S> provider) throws IOException {
NotNullLazyValue<? extends StorageOwner> lazyValue = myStorages.get(provider);
if (lazyValue == null) {
AtomicNotNullLazyValue<S> newValue = new AtomicNotNullLazyValue<S>() {
@NotNull
@Override
protected S compute() {
try {
return provider.createStorage(myPaths.getTargetDataRoot(myTarget));
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
};
lazyValue = myStorages.putIfAbsent(provider, newValue);
if (lazyValue == null) {
lazyValue = newValue; // just initialized
}
}
//noinspection unchecked
try {
return (S)lazyValue.getValue();
}
catch (RuntimeException e) {
final Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
}
throw e;
}
}
@Override
protected Iterable<? extends StorageOwner> getChildStorages() {
return new Iterable<StorageOwner>() {
@Override
public Iterator<StorageOwner> iterator() {
final Iterator<AtomicNotNullLazyValue<? extends StorageOwner>> iterator = myStorages.values().iterator();
return new Iterator<StorageOwner>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public StorageOwner next() {
return iterator.next().getValue();
}
@Override
public void remove() {
iterator.remove();
}
};
}
};
}
}
|
jps/jps-builders/src/org/jetbrains/jps/builders/impl/storage/BuildTargetStorages.java
|
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.jps.builders.impl.storage;
import com.intellij.openapi.util.NotNullLazyValue;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.builders.BuildTarget;
import org.jetbrains.jps.builders.storage.BuildDataPaths;
import org.jetbrains.jps.builders.storage.StorageProvider;
import org.jetbrains.jps.incremental.storage.CompositeStorageOwner;
import org.jetbrains.jps.incremental.storage.StorageOwner;
import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* @author nik
*/
public class BuildTargetStorages extends CompositeStorageOwner {
private final BuildTarget<?> myTarget;
private final BuildDataPaths myPaths;
private final ConcurrentMap<StorageProvider<?>, NotNullLazyValue<? extends StorageOwner>> myStorages
= new ConcurrentHashMap<StorageProvider<?>, NotNullLazyValue<? extends StorageOwner>>(16, 0.75f, 1);
public BuildTargetStorages(BuildTarget<?> target, BuildDataPaths paths) {
myTarget = target;
myPaths = paths;
}
@NotNull
public <S extends StorageOwner> S getOrCreateStorage(@NotNull final StorageProvider<S> provider) throws IOException {
NotNullLazyValue<? extends StorageOwner> lazyValue = myStorages.get(provider);
if (lazyValue == null) {
NotNullLazyValue<S> newValue = new NotNullLazyValue<S>() {
@NotNull
@Override
protected S compute() {
try {
return provider.createStorage(myPaths.getTargetDataRoot(myTarget));
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
};
lazyValue = myStorages.putIfAbsent(provider, newValue);
if (lazyValue == null) {
lazyValue = newValue; // just initialized
}
}
//noinspection unchecked
try {
return (S)lazyValue.getValue();
}
catch (RuntimeException e) {
final Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
}
throw e;
}
}
@Override
protected Iterable<? extends StorageOwner> getChildStorages() {
return new Iterable<StorageOwner>() {
@Override
public Iterator<StorageOwner> iterator() {
final Iterator<NotNullLazyValue<? extends StorageOwner>> iterator = myStorages.values().iterator();
return new Iterator<StorageOwner>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public StorageOwner next() {
return iterator.next().getValue();
}
@Override
public void remove() {
iterator.remove();
}
};
}
};
}
}
|
thread safe lazy value
|
jps/jps-builders/src/org/jetbrains/jps/builders/impl/storage/BuildTargetStorages.java
|
thread safe lazy value
|
|
Java
|
artistic-2.0
|
925089348f6c4e1681d73e9f6223da602b7a3f0d
| 0
|
SourceUtils/hl2-hud-editor
|
package com.timepath.hl2.hudeditor;
import com.timepath.plaf.OS;
import com.timepath.plaf.linux.DesktopLauncher;
import com.timepath.plaf.linux.WindowToolkit;
import com.timepath.plaf.mac.OSXProps;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.*;
import java.util.Collections;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import javax.swing.SwingUtilities;
/**
*
* @author TimePath
*/
public class Main {
public static final ResourceBundle strings = ResourceBundle.getBundle(
"com/timepath/hl2/hudeditor/res/lang");
public static final String appName = "TF2 HUD Editor";
/**
* Used for storing preferences. Do not localize
* The window class on Linux systems
* The app name on Mac systems
*/
public static final String projectName = "tf2-hud-editor"; // in xfce, window grouping show this, unfortunately
public static final Preferences prefs = Preferences.userRoot().node(projectName);
public static final long myVer = getVer();
private static final Logger LOG = Logger.getLogger(Main.class.getName());
//<editor-fold defaultstate="collapsed" desc="OS tweaks">
static {
if(OS.isWindows()) {
} else if(OS.isLinux()) {
WindowToolkit.setWindowClass(projectName); // Main.class.getName().replace(".", "-");
DesktopLauncher.create(projectName, "/com/timepath/hl2/hudeditor/res/",
new String[] {"Icon.png", "Icon.svg"},
new String[] {projectName, projectName});
} else if(OS.isMac()) {
OSXProps.metal(false);
OSXProps.quartz(true);
OSXProps.growBox(true);
OSXProps.globalMenu(true);
OSXProps.smallTabs(true);
OSXProps.fileDialogPackages(true);
OSXProps.name(appName);
OSXProps.growBoxIntrudes(false);
OSXProps.liveResize(true);
}
}
//</editor-fold>
/**
*
* 'return Main.strings.containsKey(key) ? Main.strings.getString(key) : key' is
* unavailable prior to 1.6
*
* @param key
* @param fallback
*
* @return
*/
public static String getString(String key, String fallback) {
return Collections.list(Main.strings.getKeys()).contains(key) ? Main.strings.getString(key)
: fallback;
}
public static String getString(String key) {
return getString(key, key);
}
public static void main(String[] args) {
boolean daemon = false;
if(daemon) {
LOG.log(Level.INFO, "Current version = {0}", myVer);
int port = prefs.getInt("port", -1);
if(port == -1) { // Was removed on shutdown
port = 0;
} else {
LOG.info("Communicating with daemon...");
}
for(;;) {
if(startClient(port, args)) {
break;
}
LOG.info("Daemon not running, starting...");
if(startServer(port)) {
start(args);
break;
}
LOG.info("Daemon already running, conecting...");
}
} else {
start(args);
}
}
private static long getVer() {
String impl = Main.class.getPackage().getImplementationVersion();
if(impl == null) {
return 0;
}
return Long.parseLong(impl);
}
/**
* Attempts to listen on the specified port
*
* @param port the port to listen on
*
* @return true if a server was started
*/
private static boolean startServer(int port) {
try {
final ServerSocket sock = new ServerSocket(port, 0, InetAddress.getByName(null)); // cannot use java7 InetAddress.getLoopbackAddress(). On windows, this prevents firewall warnings. It's also good for security in general
int truePort = sock.getLocalPort();
prefs.putInt("port", truePort);
prefs.flush();
LOG.log(Level.INFO, "Daemon listening on port {0}", truePort);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOG.info("Daemon shutting down...");
prefs.remove("port");
try {
prefs.flush();
} catch(BackingStoreException ex) {
Logger.getLogger(Main.class.getName()).log(Level.WARNING, null, ex);
}
}
});
Thread server = new Thread(new ServerRunnable(sock), "Process Listener");
// server.setDaemon(!OS.isMac()); // non-daemon threads work in the background. Stick around if on a mac until manually terminated
// server.setDaemon(false); // hang around
server.setDaemon(true); // die immediately
server.start();
} catch(BindException ex) {
return false;
} catch(Exception ex) {
LOG.log(Level.SEVERE, null, ex);
return false;
}
return true;
}
/**
*
* @param port
* @param args
*
* @return true if connected
*/
private static boolean startClient(int port, String... args) {
try {
Socket client = new Socket(InetAddress.getByName(null), port);
BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()));
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
out.println(myVer);
StringBuilder sb = new StringBuilder();
for(String arg : args) {
sb.append(arg).append(" ");
}
out.println(sb.toString());
long sVer = Long.parseLong(in.readLine());
return myVer <= sVer && myVer != 0;
} catch(SocketException ex) {
} catch(IOException ex) {
LOG.log(Level.SEVERE, null, ex);
}
return false;
}
private static void start(String... args) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
new HUDEditor().setVisible(true);
}
});
}
private static class ServerRunnable implements Runnable {
private final ServerSocket sock;
ServerRunnable(ServerSocket sock) {
this.sock = sock;
}
public void run() {
while(!sock.isClosed()) {
try {
Socket client = sock.accept();
BufferedReader in = new BufferedReader(new InputStreamReader(
client.getInputStream()));
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
long cVer = Long.parseLong(in.readLine());
LOG.log(Level.INFO, "client {0} vs host {1}", new Object[] {cVer, myVer});
String request = in.readLine();
LOG.log(Level.INFO, "Request: {0}", request);
out.println(myVer);
if(cVer > myVer || cVer == 0) {
LOG.info("Daemon surrendering control to other process");
out.flush();
sock.close();
} else {
start(request.split(" "));
}
} catch(Exception ex) {
LOG.log(Level.SEVERE, null, ex);
}
}
LOG.info("Exiting...");
}
}
}
|
src/com/timepath/hl2/hudeditor/Main.java
|
package com.timepath.hl2.hudeditor;
import com.timepath.plaf.OS;
import com.timepath.plaf.linux.DesktopLauncher;
import com.timepath.plaf.linux.WindowToolkit;
import com.timepath.plaf.mac.OSXProps;
import com.timepath.plaf.x.filechooser.XFileDialogFileChooser;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.*;
import java.util.Collections;
import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import javax.swing.SwingUtilities;
/**
*
* @author TimePath
*/
public class Main {
public static final ResourceBundle strings = ResourceBundle.getBundle(
"com/timepath/hl2/hudeditor/res/lang");
public static final String appName = "TF2 HUD Editor";
/**
* Used for storing preferences. Do not localize
* The window class on Linux systems
* The app name on Mac systems
*/
public static final String projectName = "tf2-hud-editor"; // in xfce, window grouping show this, unfortunately
public static final Preferences prefs = Preferences.userRoot().node(projectName);
public static final long myVer = getVer();
private static final Logger LOG = Logger.getLogger(Main.class.getName());
//<editor-fold defaultstate="collapsed" desc="OS tweaks">
static {
if(OS.isWindows()) {
XFileDialogFileChooser.setTraceLevel(0);
} else if(OS.isLinux()) {
WindowToolkit.setWindowClass(projectName); // Main.class.getName().replace(".", "-");
DesktopLauncher.create(projectName, "/com/timepath/hl2/hudeditor/res/",
new String[] {"Icon.png", "Icon.svg"},
new String[] {projectName, projectName});
} else if(OS.isMac()) {
OSXProps.metal(false);
OSXProps.quartz(true);
OSXProps.growBox(true);
OSXProps.globalMenu(true);
OSXProps.smallTabs(true);
OSXProps.fileDialogPackages(true);
OSXProps.name(appName);
OSXProps.growBoxIntrudes(false);
OSXProps.liveResize(true);
}
}
//</editor-fold>
/**
*
* 'return Main.strings.containsKey(key) ? Main.strings.getString(key) : key' is
* unavailable prior to 1.6
*
* @param key
* @param fallback
*
* @return
*/
public static String getString(String key, String fallback) {
return Collections.list(Main.strings.getKeys()).contains(key) ? Main.strings.getString(key) : fallback;
}
public static String getString(String key) {
return getString(key, key);
}
public static void main(String[] args) {
boolean daemon = false;
if(daemon) {
LOG.log(Level.INFO, "Current version = {0}", myVer);
int port = prefs.getInt("port", -1);
if(port == -1) { // Was removed on shutdown
port = 0;
} else {
LOG.info("Communicating with daemon...");
}
for(;;) {
if(startClient(port, args)) {
break;
}
LOG.info("Daemon not running, starting...");
if(startServer(port)) {
start(args);
break;
}
LOG.info("Daemon already running, conecting...");
}
} else {
start(args);
}
}
private static long getVer() {
String impl = Main.class.getPackage().getImplementationVersion();
if(impl == null) {
return 0;
}
return Long.parseLong(impl);
}
/**
* Attempts to listen on the specified port
*
* @param port the port to listen on
*
* @return true if a server was started
*/
private static boolean startServer(int port) {
try {
final ServerSocket sock = new ServerSocket(port, 0, InetAddress.getByName(null)); // cannot use java7 InetAddress.getLoopbackAddress(). On windows, this prevents firewall warnings. It's also good for security in general
int truePort = sock.getLocalPort();
prefs.putInt("port", truePort);
prefs.flush();
LOG.log(Level.INFO, "Daemon listening on port {0}", truePort);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOG.info("Daemon shutting down...");
prefs.remove("port");
try {
prefs.flush();
} catch(BackingStoreException ex) {
Logger.getLogger(Main.class.getName()).log(Level.WARNING, null, ex);
}
}
});
Thread server = new Thread(new ServerRunnable(sock), "Process Listener");
// server.setDaemon(!OS.isMac()); // non-daemon threads work in the background. Stick around if on a mac until manually terminated
// server.setDaemon(false); // hang around
server.setDaemon(true); // die immediately
server.start();
} catch(BindException ex) {
return false;
} catch(Exception ex) {
LOG.log(Level.SEVERE, null, ex);
return false;
}
return true;
}
/**
*
* @param port
* @param args
*
* @return true if connected
*/
private static boolean startClient(int port, String... args) {
try {
Socket client = new Socket(InetAddress.getByName(null), port);
BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()));
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
out.println(myVer);
StringBuilder sb = new StringBuilder();
for(String arg : args) {
sb.append(arg).append(" ");
}
out.println(sb.toString());
long sVer = Long.parseLong(in.readLine());
return myVer <= sVer && myVer != 0;
}catch(SocketException ex) {
}catch(IOException ex) {
LOG.log(Level.SEVERE, null, ex);
}
return false;
}
private static void start(String... args) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
new HUDEditor().setVisible(true);
}
});
}
private static class ServerRunnable implements Runnable {
private final ServerSocket sock;
ServerRunnable(ServerSocket sock) {
this.sock = sock;
}
public void run() {
while(!sock.isClosed()) {
try {
Socket client = sock.accept();
BufferedReader in = new BufferedReader(new InputStreamReader(
client.getInputStream()));
PrintWriter out = new PrintWriter(client.getOutputStream(), true);
long cVer = Long.parseLong(in.readLine());
LOG.log(Level.INFO, "client {0} vs host {1}", new Object[] {cVer, myVer});
String request = in.readLine();
LOG.log(Level.INFO, "Request: {0}", request);
out.println(myVer);
if(cVer > myVer || cVer == 0) {
LOG.info("Daemon surrendering control to other process");
out.flush();
sock.close();
} else {
start(request.split(" "));
}
} catch(Exception ex) {
LOG.log(Level.SEVERE, null, ex);
}
}
LOG.info("Exiting...");
}
}
}
|
Dropped XFileDialog
|
src/com/timepath/hl2/hudeditor/Main.java
|
Dropped XFileDialog
|
|
Java
|
bsd-2-clause
|
91840a803c2d50b006475d8a777a67fd552d7009
| 0
|
scifio/scifio
|
//
// OmeroReader.java
//
/*
OME database I/O package for communicating with OME and OMERO servers.
Copyright (C) 2005-@year@ Melissa Linkert, Curtis Rueden and Philip Huettl.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.ome.io;
import Glacier2.CannotCreateSessionException;
import Glacier2.PermissionDeniedException;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.IOException;
import loci.formats.FormatException;
import loci.formats.FormatReader;
import loci.formats.FormatTools;
import loci.formats.MetadataTools;
import loci.formats.meta.MetadataStore;
import loci.formats.tools.ImageInfo;
import omero.ServerError;
import omero.api.GatewayPrx;
import omero.api.RawPixelsStorePrx;
import omero.api.ServiceFactoryPrx;
import omero.model.Image;
import omero.model.Pixels;
/**
* Implementation of {@link loci.formats.IFormatReader}
* for use in export from an OMERO Beta 4.2.x database.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/ome-io/src/loci/ome/io/OmeroReader.java">Trac</a>,
* <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/ome-io/src/loci/ome/io/OmeroReader.java;hb=HEAD">Gitweb</a></dd></dl>
*/
public class OmeroReader extends FormatReader {
// -- Constants --
public static final int DEFAULT_PORT = 4064;
// -- Fields --
private String server;
private String username;
private String password;
private int thePort = DEFAULT_PORT;
private RawPixelsStorePrx store;
private Pixels pix;
// -- Constructors --
public OmeroReader() {
super("OMERO", "*");
}
// -- OmeroReader methods --
public void setServer(String server) {
this.server = server;
}
public void setPort(int port) {
thePort = port;
}
public void setUsername(String username) {
this.username = username;
}
public void setPassword(String password) {
this.password = password;
}
// -- IFormatReader methods --
@Override
public boolean isThisType(String name, boolean open) {
return name.startsWith("omero:");
}
@Override
public byte[] openBytes(int no, byte[] buf, int x1, int y1, int w1, int h1)
throws FormatException, IOException
{
FormatTools.assertId(currentId, true, 1);
FormatTools.checkPlaneNumber(this, no);
FormatTools.checkBufferSize(this, buf.length);
final int[] zct = FormatTools.getZCTCoords(this, no);
final byte[] plane;
try {
plane = store.getPlane(zct[0], zct[1], zct[2]);
}
catch (ServerError e) {
throw new FormatException(e);
}
final int len = getSizeX() * getSizeY() *
FormatTools.getBytesPerPixel(getPixelType());
System.arraycopy(plane, 0, buf, 0, len);
return buf;
}
@Override
public void close() throws IOException {
super.close();
// TODO close last session
}
@Override
protected void initFile(String id) throws FormatException, IOException {
LOGGER.debug("OmeroReader.initFile({})", id);
super.initFile(id);
if (!id.startsWith("omero:")) {
throw new IllegalArgumentException("Not an OMERO id: " + id);
}
// parse credentials from id string
LOGGER.info("Parsing credentials");
String address = server, user = username, pass = password;
int port = thePort;
long pid = -1;
final String[] tokens = id.substring(6).split("\n");
for (String token : tokens) {
final int equals = token.indexOf("=");
if (equals < 0) continue;
final String key = token.substring(0, equals);
final String val = token.substring(equals + 1);
if (key.equals("server")) address = val;
else if (key.equals("user")) user = val;
else if (key.equals("pass")) pass = val;
else if (key.equals("port")) {
try {
port = Integer.parseInt(val);
}
catch (NumberFormatException exc) { }
}
else if (key.equals("pid")) {
try {
pid = Long.parseLong(val);
}
catch (NumberFormatException exc) { }
}
}
if (address == null) {
throw new FormatException("Invalid server address");
}
if (user == null) {
throw new FormatException("Invalid username");
}
if (pass == null) {
throw new FormatException("Invalid password");
}
if (pid < 0) {
throw new FormatException("Invalid pixels ID");
}
try {
// authenticate with OMERO server
LOGGER.info("Logging in");
omero.client client = new omero.client(server, port);
final ServiceFactoryPrx serviceFactory = client.createSession(user, pass);
// get raw pixels store and pixels
store = serviceFactory.createRawPixelsStore();
store.setPixelsId(pid, false);
final GatewayPrx gateway = serviceFactory.createGateway();
pix = gateway.getPixels(pid);
final int sizeX = pix.getSizeX().getValue();
final int sizeY = pix.getSizeY().getValue();
final int sizeZ = pix.getSizeZ().getValue();
final int sizeC = pix.getSizeC().getValue();
final int sizeT = pix.getSizeT().getValue();
final String pixelType = pix.getPixelsType().getValue().getValue();
// populate metadata
LOGGER.info("Populating metadata");
core[0].sizeX = sizeX;
core[0].sizeY = sizeY;
core[0].sizeZ = sizeZ;
core[0].sizeC = sizeC;
core[0].sizeT = sizeT;
core[0].rgb = false;
core[0].littleEndian = false;
core[0].dimensionOrder = "XYZCT";
core[0].imageCount = sizeZ * sizeC * sizeT;
core[0].pixelType = FormatTools.pixelTypeFromString(pixelType);
// CTR TODO this is wrong
double px = pix.getSizeX().getValue();
double py = pix.getSizeY().getValue();
double pz = pix.getSizeZ().getValue();
Image image = pix.getImage();
String name = image.getName().getValue();
String description = image.getDescription().getValue();
MetadataStore store = getMetadataStore();
store.setImageName(name, 0);
store.setImageDescription(description, 0);
MetadataTools.populatePixels(store, this);
store.setPixelsPhysicalSizeX(new Double(px), 0);
store.setPixelsPhysicalSizeY(new Double(py), 0);
store.setPixelsPhysicalSizeZ(new Double(pz), 0);
}
catch (CannotCreateSessionException e) {
throw new FormatException(e);
}
catch (PermissionDeniedException e) {
throw new FormatException(e);
}
catch (ServerError e) {
throw new FormatException(e);
}
}
/** A simple command line tool for downloading images from OMERO. */
public static void main(String[] args) throws Exception {
// parse OMERO credentials
BufferedReader con = new BufferedReader(new InputStreamReader(System.in));
System.out.print("Server? ");
final String server = con.readLine();
System.out.printf("Port [%d]? ", DEFAULT_PORT);
final String portString = con.readLine();
final int port = portString.equals("") ? DEFAULT_PORT :
Integer.parseInt(portString);
System.out.print("Username? ");
final String user = con.readLine();
System.out.print("Password? ");
final String pass = new String(con.readLine());
System.out.print("Pixels ID? ");
final int pixelsId = Integer.parseInt(con.readLine());
System.out.print("\n\n");
// construct the OMERO reader
final OmeroReader omeroReader = new OmeroReader();
omeroReader.setUsername(user);
omeroReader.setPassword(pass);
omeroReader.setServer(server);
omeroReader.setPort(port);
final String id = "omero:pid=" + pixelsId;
omeroReader.setId(id);
omeroReader.close();
// delegate the heavy lifting to Bio-Formats ImageInfo utility
final ImageInfo imageInfo = new ImageInfo();
imageInfo.setReader(omeroReader); // override default image reader
if (!imageInfo.testRead(args)) System.exit(1);
}
}
|
components/ome-io/src/loci/ome/io/OmeroReader.java
|
//
// OmeroReader.java
//
/*
OME database I/O package for communicating with OME and OMERO servers.
Copyright (C) 2005-@year@ Melissa Linkert, Curtis Rueden and Philip Huettl.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.ome.io;
import Glacier2.CannotCreateSessionException;
import Glacier2.PermissionDeniedException;
import java.io.Console;
import java.io.IOException;
import loci.formats.FormatException;
import loci.formats.FormatReader;
import loci.formats.FormatTools;
import loci.formats.MetadataTools;
import loci.formats.meta.MetadataStore;
import loci.formats.tools.ImageInfo;
import omero.ServerError;
import omero.api.GatewayPrx;
import omero.api.RawPixelsStorePrx;
import omero.api.ServiceFactoryPrx;
import omero.model.Image;
import omero.model.Pixels;
/**
* Implementation of {@link loci.formats.IFormatReader}
* for use in export from an OMERO Beta 4.2.x database.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/ome-io/src/loci/ome/io/OmeroReader.java">Trac</a>,
* <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/ome-io/src/loci/ome/io/OmeroReader.java;hb=HEAD">Gitweb</a></dd></dl>
*/
public class OmeroReader extends FormatReader {
// -- Constants --
public static final int DEFAULT_PORT = 4064;
// -- Fields --
private String server;
private String username;
private String password;
private int thePort = DEFAULT_PORT;
private RawPixelsStorePrx store;
private Pixels pix;
// -- Constructors --
public OmeroReader() {
super("OMERO", "*");
}
// -- OmeroReader methods --
public void setServer(String server) {
this.server = server;
}
public void setPort(int port) {
thePort = port;
}
public void setUsername(String username) {
this.username = username;
}
public void setPassword(String password) {
this.password = password;
}
// -- IFormatReader methods --
@Override
public boolean isThisType(String name, boolean open) {
return name.startsWith("omero:");
}
@Override
public byte[] openBytes(int no, byte[] buf, int x1, int y1, int w1, int h1)
throws FormatException, IOException
{
FormatTools.assertId(currentId, true, 1);
FormatTools.checkPlaneNumber(this, no);
FormatTools.checkBufferSize(this, buf.length);
final int[] zct = FormatTools.getZCTCoords(this, no);
final byte[] plane;
try {
plane = store.getPlane(zct[0], zct[1], zct[2]);
}
catch (ServerError e) {
throw new FormatException(e);
}
final int len = getSizeX() * getSizeY() *
FormatTools.getBytesPerPixel(getPixelType());
System.arraycopy(plane, 0, buf, 0, len);
return buf;
}
@Override
public void close() throws IOException {
super.close();
// TODO close last session
}
@Override
protected void initFile(String id) throws FormatException, IOException {
LOGGER.debug("OmeroReader.initFile({})", id);
super.initFile(id);
if (!id.startsWith("omero:")) {
throw new IllegalArgumentException("Not an OMERO id: " + id);
}
// parse credentials from id string
LOGGER.info("Parsing credentials");
String address = server, user = username, pass = password;
int port = thePort;
long pid = -1;
final String[] tokens = id.substring(6).split("\n");
for (String token : tokens) {
final int equals = token.indexOf("=");
if (equals < 0) continue;
final String key = token.substring(0, equals);
final String val = token.substring(equals + 1);
if (key.equals("server")) address = val;
else if (key.equals("user")) user = val;
else if (key.equals("pass")) pass = val;
else if (key.equals("port")) {
try {
port = Integer.parseInt(val);
}
catch (NumberFormatException exc) { }
}
else if (key.equals("pid")) {
try {
pid = Long.parseLong(val);
}
catch (NumberFormatException exc) { }
}
}
if (address == null) {
throw new FormatException("Invalid server address");
}
if (user == null) {
throw new FormatException("Invalid username");
}
if (pass == null) {
throw new FormatException("Invalid password");
}
if (pid < 0) {
throw new FormatException("Invalid pixels ID");
}
try {
// authenticate with OMERO server
LOGGER.info("Logging in");
omero.client client = new omero.client(server, port);
final ServiceFactoryPrx serviceFactory = client.createSession(user, pass);
// get raw pixels store and pixels
store = serviceFactory.createRawPixelsStore();
store.setPixelsId(pid, false);
final GatewayPrx gateway = serviceFactory.createGateway();
pix = gateway.getPixels(pid);
final int sizeX = pix.getSizeX().getValue();
final int sizeY = pix.getSizeY().getValue();
final int sizeZ = pix.getSizeZ().getValue();
final int sizeC = pix.getSizeC().getValue();
final int sizeT = pix.getSizeT().getValue();
final String pixelType = pix.getPixelsType().getValue().getValue();
// populate metadata
LOGGER.info("Populating metadata");
core[0].sizeX = sizeX;
core[0].sizeY = sizeY;
core[0].sizeZ = sizeZ;
core[0].sizeC = sizeC;
core[0].sizeT = sizeT;
core[0].rgb = false;
core[0].littleEndian = false;
core[0].dimensionOrder = "XYZCT";
core[0].imageCount = sizeZ * sizeC * sizeT;
core[0].pixelType = FormatTools.pixelTypeFromString(pixelType);
// CTR TODO this is wrong
double px = pix.getSizeX().getValue();
double py = pix.getSizeY().getValue();
double pz = pix.getSizeZ().getValue();
Image image = pix.getImage();
String name = image.getName().getValue();
String description = image.getDescription().getValue();
MetadataStore store = getMetadataStore();
store.setImageName(name, 0);
store.setImageDescription(description, 0);
MetadataTools.populatePixels(store, this);
store.setPixelsPhysicalSizeX(new Double(px), 0);
store.setPixelsPhysicalSizeY(new Double(py), 0);
store.setPixelsPhysicalSizeZ(new Double(pz), 0);
}
catch (CannotCreateSessionException e) {
throw new FormatException(e);
}
catch (PermissionDeniedException e) {
throw new FormatException(e);
}
catch (ServerError e) {
throw new FormatException(e);
}
}
/** A simple command line tool for downloading images from OMERO. */
public static void main(String[] args) throws Exception {
// parse OMERO credentials
final Console con = System.console();
final String server = con.readLine("Server? ");
final String portString = con.readLine("Port [%d]? ", DEFAULT_PORT);
final int port = portString.equals("") ? DEFAULT_PORT :
Integer.parseInt(portString);
final String user = con.readLine("Username? ");
final String pass = new String(con.readPassword("Password? "));
final int pixelsId = Integer.parseInt(con.readLine("Pixels ID? "));
con.writer().write("\n\n");
// construct the OMERO reader
final OmeroReader omeroReader = new OmeroReader();
omeroReader.setUsername(user);
omeroReader.setPassword(pass);
omeroReader.setServer(server);
omeroReader.setPort(port);
final String id = "omero:pid=" + pixelsId;
omeroReader.setId(id);
omeroReader.close();
// delegate the heavy lifting to Bio-Formats ImageInfo utility
final ImageInfo imageInfo = new ImageInfo();
imageInfo.setReader(omeroReader); // override default image reader
if (!imageInfo.testRead(args)) System.exit(1);
}
}
|
Eliminate usage of 1.6-specific java.io.Console.
This prevents compile errors under 1.5.
|
components/ome-io/src/loci/ome/io/OmeroReader.java
|
Eliminate usage of 1.6-specific java.io.Console.
|
|
Java
|
mit
|
08fcb46374c3d3d86440accf5a94e4a317a484cc
| 0
|
Dimensions/Solar
|
package net.dimensions.solar.block;
public interface Block{
public Material getType();
public void setType(Material m);
public String getUnlocalizedName(); //I don't think we need this because it's already in the Material
public float getHardness();
public int getLightValue();
public boolean isReplaceable();
public boolean isBurning();
public int getFlameTick();
public boolean isFlammable();
public int getFireSpreadSpeed();
public boolean isTileEntity();
}
|
src/net/dimensions/solar/block/Block.java
|
package net.dimensions.solar.block;
public interface Block
{
String getUnlocalizedName();
float getHardness();
int getLightValue();
boolean isReplaceable();
boolean isBurning();
int getFlammability();
boolean isFlammable();
int getFireSpreadSpeed();
boolean hasTileEntity();
}
|
Add Material & change methods
|
src/net/dimensions/solar/block/Block.java
|
Add Material & change methods
|
|
Java
|
mit
|
cbcaf1c4ecf7b94c8f62d8476e0b433f7c44c4fd
| 0
|
BrujosDeJava/jrpg-2017a-dominio
|
package tests_dominio;
import org.junit.Assert;
import org.junit.Test;
import dominio.Asesino;
import dominio.Guerrero;
import dominio.Hechicero;
import dominio.Humano;
import dominio.Orco;
public class TestOrco {
@Test
public void testGolpeDefensivo() {
Humano h = new Humano("Nicolas", new Guerrero(), 1);
Orco o = new Orco("Hernan", new Guerrero(), 1);
Assert.assertTrue(h.getSalud() == 105);
if (o.habilidadRaza1(h))
Assert.assertTrue(h.getSalud() == 95);
else
Assert.assertTrue(o.getSalud() == 105);
}
@Test
public void testGolpeDefensivoFallido() {
Humano h = new Humano("Nicolas", new Guerrero(), 1);
Orco o = new Orco("Hernan", new Guerrero(), 1);
o.setEnergia(0);
Assert.assertFalse(o.habilidadRaza1(h));
}
@Test
public void testMordiscoDeVida() {
Humano h = new Humano("Nico", 100, 100, 55, 20, 30, new Hechicero(0.2, 0.3, 1.5), 0, 1, 1);
Orco o = new Orco("Nico", 100, 100, 80, 20, 30, new Asesino(0.2, 0.3, 1.5), 0, 1, 1);
Assert.assertTrue(h.getSalud() == 100);
o.setSalud(100);
if (o.habilidadRaza2(h)) {
Assert.assertEquals(40, h.getSalud());
Assert.assertTrue(o.getSalud() == 100);
} else {
Assert.assertTrue(o.getSalud() == 100);
Assert.assertTrue(h.getSalud() == 100);
}
}
@Test
public void testMordiscoDeVidaFallido() {
Humano h = new Humano("Nico", 100, 100, 55, 20, 30, new Hechicero(0.2, 0.3, 1.5), 0, 1, 1);
Orco o = new Orco("Juan", 100, 100, 80, 20, 30, new Asesino(0.2, 0.3, 1.5), 0, 1, 1);
o.setEnergia(0);
Assert.assertFalse(o.habilidadRaza2(h));
}
}
|
src/test/java/tests_dominio/TestOrco.java
|
package tests_dominio;
import org.junit.Assert;
import org.junit.Test;
import dominio.Asesino;
import dominio.Guerrero;
import dominio.Hechicero;
import dominio.Humano;
import dominio.Orco;
public class TestOrco {
@Test
public void testGolpeDefensivo() {
Humano h = new Humano("Nicolas", new Guerrero(), 1);
Orco o = new Orco("Hernan", new Guerrero(), 1);
Assert.assertTrue(h.getSalud() == 105);
if (o.habilidadRaza1(h))
Assert.assertTrue(h.getSalud() == 95);
else
Assert.assertTrue(o.getSalud() == 105);
}
@Test
public void testGolpeDefensivoFallido() {
Humano h = new Humano("Nicolas", new Guerrero(), 1);
Orco o = new Orco("Hernan", new Guerrero(), 1);
o.setEnergia(0);
Assert.assertFalse(o.habilidadRaza1(h));
}
@Test
public void testMordiscoDeVida() {
Humano h = new Humano("Nico", 100, 100, 55, 20, 30, new Hechicero(0.2, 0.3, 1.5), 0, 1, 1);
Orco o = new Orco("Nico", 100, 100, 80, 20, 30, new Asesino(0.2, 0.3, 1.5), 0, 1, 1);
Assert.assertTrue(h.getSalud() == 100);
o.setSalud(100);
if (o.habilidadRaza2(h)) {
Assert.assertEquals(40, h.getSalud());
Assert.assertTrue(o.getSalud() == 100);
} else {
Assert.assertTrue(o.getSalud() == 100);
Assert.assertTrue(h.getSalud() == 100);
}
}
@Test
public void testMordiscoDeVidaFallido() {
Humano h = new Humano("Nico", 100, 100, 55, 20, 30, new Hechicero(0.2, 0.3, 1.5), 0, 1, 1);
Orco o = new Orco("Nico", 100, 100, 80, 20, 30, new Asesino(0.2, 0.3, 1.5), 0, 1, 1);
o.setEnergia(0);
Assert.assertFalse(o.habilidadRaza2(h));
}
}
|
pequeños cambios en test de la clase orco
|
src/test/java/tests_dominio/TestOrco.java
|
pequeños cambios en test de la clase orco
|
|
Java
|
mit
|
863995cb50013b6c6a7ef5c1e13f69ae964e60fe
| 0
|
recena/github-api,kohsuke/github-api
|
package org.kohsuke.github;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.IOException;
import java.net.URL;
/**
* @see GitHub#getMyInvitations()
* @see GHRepository#listInvitations()
*/
@SuppressFBWarnings(value = {"UWF_UNWRITTEN_PUBLIC_OR_PROTECTED_FIELD", "UWF_UNWRITTEN_FIELD",
"NP_UNWRITTEN_FIELD"}, justification = "JSON API")
public class GHInvitation extends GHObject {
/*package almost final*/ GitHub root;
private int id;
private GHRepository repository;
private GHUser invitee, inviter;
private String permissions;
private String html_url;
/*package*/ GHInvitation wrapUp(GitHub root) {
this.root = root;
return this;
}
/**
* Accept a repository invitation.
*/
public void accept() throws IOException {
root.retrieve().method("PATCH").to("/user/repository_invitations/" + id);
}
/**
* Decline a repository invitation.
*/
public void decline() throws IOException {
root.retrieve().method("DELETE").to("/user/repository_invitations/" + id);
}
@Override
public URL getHtmlUrl() {
return GitHub.parseURL(html_url);
}
}
|
src/main/java/org/kohsuke/github/GHInvitation.java
|
package org.kohsuke.github;
import java.io.IOException;
import java.net.URL;
/**
* @see GitHub#getMyInvitations()
* @see GHRepository#listInvitations()
*/
public class GHInvitation extends GHObject {
/*package almost final*/ GitHub root;
private int id;
private GHRepository repository;
private GHUser invitee, inviter;
private String permissions;
private String html_url;
/*package*/ GHInvitation wrapUp(GitHub root) {
this.root = root;
return this;
}
/**
* Accept a repository invitation.
*/
public void accept() throws IOException {
root.retrieve().method("PATCH").to("/user/repository_invitations/" + id);
}
/**
* Decline a repository invitation.
*/
public void decline() throws IOException {
root.retrieve().method("DELETE").to("/user/repository_invitations/" + id);
}
@Override
public URL getHtmlUrl() {
return GitHub.parseURL(html_url);
}
}
|
findbugs warning fix
|
src/main/java/org/kohsuke/github/GHInvitation.java
|
findbugs warning fix
|
|
Java
|
mit
|
aad023eb57e323800282486d60538638437efa2e
| 0
|
stevenuray/XChange,npomfret/XChange,Muffon/XChange,sutra/XChange,anwfr/XChange,douggie/XChange,andre77/XChange,jheusser/XChange,yarKH/XChange,timmolter/XChange,gaborkolozsy/XChange,kzbikowski/XChange,joansmith/XChange,TSavo/XChange,cinjoff/XChange-1,nivertech/XChange,dozd/XChange,chrisrico/XChange,stachon/XChange,nopy/XChange,ww3456/XChange,coingecko/XChange,LeonidShamis/XChange,Panchen/XChange,okazia/XChange,mmithril/XChange,codeck/XChange,jennieolsson/XChange,evdubs/XChange
|
package com.xeiam.xchange.bitfinex.v1.service.marketdata;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import org.junit.Test;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.xeiam.xchange.bitfinex.v1.BitfinexAdapters;
import com.xeiam.xchange.bitfinex.v1.dto.marketdata.BitfinexDepth;
import com.xeiam.xchange.bitfinex.v1.dto.marketdata.BitfinexLendDepth;
import com.xeiam.xchange.currency.CurrencyPair;
import com.xeiam.xchange.dto.Order.OrderType;
public class BitfinexMarketDataJSONTest {
@Test
public void testLendbookMarketData() throws IOException {
InputStream resourceAsStream = BitfinexMarketDataJSONTest.class.getResourceAsStream("/v1/marketdata/example-marketdepth-lendbook-data.json");
BitfinexLendDepth lendDepth = new ObjectMapper().readValue(resourceAsStream, BitfinexLendDepth.class);
assertEquals(lendDepth.getAsks().length, 50);
assertEquals(lendDepth.getBids().length, 50);
}
@Test
public void testMarketDepth() throws Exception {
InputStream resourceAsStream = BitfinexMarketDataJSONTest.class.getResourceAsStream("/v1/marketdata/example-marketdepth-data.json");
BitfinexDepth depthRaw = new ObjectMapper().readValue(resourceAsStream, BitfinexDepth.class);
BitfinexAdapters.OrdersContainer asksOrdersContainer = BitfinexAdapters.adaptOrders(depthRaw.getAsks(), CurrencyPair.BTC_EUR, OrderType.ASK);
BitfinexAdapters.OrdersContainer bidsOrdersContainer = BitfinexAdapters.adaptOrders(depthRaw.getBids(), CurrencyPair.BTC_EUR, OrderType.BID);
assertEquals(new BigDecimal("851.87"), asksOrdersContainer.getLimitOrders().get(0).getLimitPrice());
assertEquals(new BigDecimal("849.59"), bidsOrdersContainer.getLimitOrders().get(0).getLimitPrice());
assertThat(asksOrdersContainer.getTimestamp()).isEqualTo(1387060950000L);
assertThat(bidsOrdersContainer.getTimestamp()).isEqualTo(1387060435000L);
}
}
|
xchange-bitfinex/src/test/java/com/xeiam/xchange/bitfinex/v1/service/marketdata/BitfinexMarketDataJSONTest.java
|
package com.xeiam.xchange.bitfinex.v1.service.marketdata;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import org.junit.Test;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.xeiam.xchange.bitfinex.v1.BitfinexAdapters;
import com.xeiam.xchange.bitfinex.v1.dto.marketdata.BitfinexDepth;
import com.xeiam.xchange.bitfinex.v1.dto.marketdata.BitfinexLendDepth;
import com.xeiam.xchange.currency.CurrencyPair;
import com.xeiam.xchange.dto.Order.OrderType;
public class BitfinexMarketDataJSONTest {
@Test
public void testLendbookMarketData() throws IOException {
InputStream resourceAsStream = BitfinexMarketDataJSONTest.class.getResourceAsStream("/v1/marketdata/example-marketdepth-lendbook-data.json");
BitfinexLendDepth lendDepth = new ObjectMapper().readValue(resourceAsStream, BitfinexLendDepth.class);
assertEquals(lendDepth.getAsks().length, 50);
assertEquals(lendDepth.getBids().length, 50);
}
@Test
public void testMarketDepth() throws Exception {
InputStream resourceAsStream = BitfinexMarketDataJSONTest.class.getResourceAsStream("/v1/marketdata/example-marketdepth-data.json");
BitfinexDepth depthRaw = new ObjectMapper().readValue(resourceAsStream, BitfinexDepth.class);
BitfinexAdapters.OrdersContainer asksOrdersContainer = BitfinexAdapters.adaptOrders(depthRaw.getAsks(), CurrencyPair.BTC_EUR, OrderType.ASK);
BitfinexAdapters.OrdersContainer bidsOrdersContainer = BitfinexAdapters.adaptOrders(depthRaw.getBids(), CurrencyPair.BTC_EUR, OrderType.BID);
assertEquals(new BigDecimal("851.87"), asksOrdersContainer.getLimitOrders().get(0).getLimitPrice());
assertEquals(new BigDecimal("849.59"), bidsOrdersContainer.getLimitOrders().get(0).getLimitPrice());
assertThat(asksOrdersContainer.getTimestamp()).isEqualTo(1387060992000L);
assertThat(bidsOrdersContainer.getTimestamp()).isEqualTo(1387060480000L);
}
}
|
Corrected (!!!) this unit test. After the change of the timestamp datatype from flaot to BigDecimal in several DTO classes, the timestamp can now be read out exactly.
|
xchange-bitfinex/src/test/java/com/xeiam/xchange/bitfinex/v1/service/marketdata/BitfinexMarketDataJSONTest.java
|
Corrected (!!!) this unit test. After the change of the timestamp datatype from flaot to BigDecimal in several DTO classes, the timestamp can now be read out exactly.
|
|
Java
|
mit
|
a72c4c88809a69f8fd86d91bed4bd01936bd42f9
| 0
|
jotatoledo/Programmieren-WS16-17
|
package test.java.board_game;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.junit.Test;
import main.java.board_game.LineType;
import main.java.board_game.Tile;
public class TileTest {
/**
* <pre>
* Empty tile
* ------
* </pre>
*/
private static final Tile EMPTY_TILE = new Tile();
/**
* <pre>
* Example in picture 6
* RRG-G-
* </pre>
*/
private static final Tile K0 = new Tile(new LineType[] {
LineType.RED, LineType.RED, LineType.GREEN,
LineType.NONE, LineType.GREEN, LineType.NONE });
/**
* <pre>
* Example in picture 6
* R-GGR-
* </pre>
*/
private static final Tile K1 = new Tile(new LineType[] {
LineType.RED, LineType.NONE, LineType.GREEN,
LineType.GREEN, LineType.RED, LineType.NONE });
/**
* <pre>
* Example in picture 4
* RYRYGG
* </pre>
*/
private static final Tile TEST_TRIO = new Tile(new LineType[] {
LineType.RED, LineType.YELLOW, LineType.RED,
LineType.YELLOW, LineType.GREEN, LineType.GREEN });
public TileTest() {
}
@Test
public void testTile() {
Tile firstTest = new Tile();
assertTrue(firstTest != null);
assertTrue(firstTest.isEmpty() == true);
assertTrue(EMPTY_TILE.isEmpty() == true);
}
@Test
public void testGetLineTypeAtIndex() {
assertTrue(EMPTY_TILE.getLineTypeAtIndex(0) == LineType.NONE);
assertTrue(TEST_TRIO.getLineTypeAtIndex(0) == LineType.RED);
assertTrue(TEST_TRIO.getLineTypeAtIndex(1) == LineType.YELLOW);
assertTrue(TEST_TRIO.getLineTypeAtIndex(5) == LineType.GREEN);
}
@Test
public void testGetNumberOfColors() {
//Examples in picture 4
Tile testSingular = new Tile(new LineType[] {
LineType.RED, LineType.RED, LineType.NONE,
LineType.NONE, LineType.NONE, LineType.NONE });
Tile testDuo = new Tile(new LineType[] {
LineType.NONE, LineType.YELLOW, LineType.RED,
LineType.NONE, LineType.RED, LineType.YELLOW });
assertTrue(testDuo.getNumberOfColors() == 2);
assertTrue(TEST_TRIO.getNumberOfColors() == 3);
assertTrue(testSingular.getNumberOfColors() == 1);
assertTrue(EMPTY_TILE.getNumberOfColors() == 0);
assertTrue(K1.getNumberOfColors() == 2);
assertTrue(K0.getNumberOfColors() == 2);
}
@Test
public void testIsExactlyEqualTo() {
assertTrue(EMPTY_TILE.isExactlyEqualTo(EMPTY_TILE) == true);
assertTrue(K1.isExactlyEqualTo(K1) == true);
assertTrue(K0.isExactlyEqualTo(K0) == true);
assertTrue(EMPTY_TILE.isExactlyEqualTo(K1) == false);
assertTrue(K1.isExactlyEqualTo(K0) == false);
assertTrue(K0.isExactlyEqualTo(EMPTY_TILE) == false);
}
@Test
public void testCopy() {
Tile copyEmpty = EMPTY_TILE.copy();
Tile copyK1 = K1.copy();
Tile copyK0 = K0.copy();
assertTrue(copyEmpty != EMPTY_TILE);
assertTrue(copyEmpty.isExactlyEqualTo(EMPTY_TILE) == true);
assertTrue(copyK1 != K1);
assertTrue(copyK1.isExactlyEqualTo(K1) == true);
assertTrue(copyK0 != K0);
assertTrue(copyK0.isExactlyEqualTo(K0) == true);
}
@Test
public void testRotateClockwise() {
//RYRYGG
Tile copyTrio = TEST_TRIO.copy();
copyTrio.rotateClockwise();
assertThat(copyTrio.toString(), is("GRYRYG"));
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
assertThat(copyTrio.toString(), is("YGGRYR"));
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
assertThat(copyTrio.toString(), is(TEST_TRIO.toString()));
}
@Test
public void testRotateCounterClockwise() {
//RYRYGG
Tile copyTrio = TEST_TRIO.copy();
copyTrio.rotateCounterClockwise();
assertThat(copyTrio.toString(), is("YRYGGR"));
copyTrio.rotateCounterClockwise();
copyTrio.rotateCounterClockwise();
assertThat(copyTrio.toString(), is("YGGRYR"));
copyTrio.rotateCounterClockwise();
copyTrio.rotateCounterClockwise();
copyTrio.rotateCounterClockwise();
assertThat(copyTrio.toString(), is(TEST_TRIO.toString()));
}
@Test
public void testIsEmpty() {
assertTrue(TEST_TRIO.isEmpty() == false);
assertTrue(K0.isEmpty() == false);
assertTrue(K1.isEmpty() == false);
assertTrue(EMPTY_TILE.isEmpty() == true);
}
@Test
public void testIsRotationEqualTo() {
Tile copyTrio = TEST_TRIO.copy();
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
assertTrue(copyTrio.isRotationEqualTo(TEST_TRIO) == true);
assertTrue(EMPTY_TILE.isRotationEqualTo(EMPTY_TILE) == true);
assertTrue(TEST_TRIO.isRotationEqualTo(TEST_TRIO) == true);
assertTrue(TEST_TRIO.isRotationEqualTo(EMPTY_TILE) == false);
assertTrue(EMPTY_TILE.isRotationEqualTo(TEST_TRIO) == false);
}
@Test
public void testCanBeRecoloredTo() {
//Praktomat example
Tile test = new Tile(new LineType[] {
LineType.RED, LineType.NONE, LineType.GREEN,
LineType.GREEN, LineType.RED, LineType.NONE });
Tile objective = new Tile(new LineType[] {
LineType.YELLOW, LineType.NONE, LineType.RED,
LineType.RED, LineType.YELLOW, LineType.NONE });
assertTrue(test.canBeRecoloredTo(objective) == true);
test.rotateClockwise();
assertTrue(test.canBeRecoloredTo(objective) == false);
//Every tile can be recolored to itself
assertTrue(EMPTY_TILE.canBeRecoloredTo(EMPTY_TILE) == true);
assertTrue(K0.canBeRecoloredTo(K0) == true);
assertTrue(K1.canBeRecoloredTo(K1) == true);
assertTrue(TEST_TRIO.canBeRecoloredTo(TEST_TRIO) == true);
//Recoloring requires adding one or more connection lines
assertTrue(TEST_TRIO.canBeRecoloredTo(EMPTY_TILE) == false);
assertTrue(TEST_TRIO.canBeRecoloredTo(K0) == false);
assertTrue(TEST_TRIO.canBeRecoloredTo(K1) == false);
}
@Test
public void testDominates() {
//The tiles do not dominate themselves
assertTrue(EMPTY_TILE.dominates(EMPTY_TILE) == false);
assertTrue(K0.dominates(K0) == false);
assertTrue(K1.dominates(K1) == false);
assertTrue(TEST_TRIO.dominates(TEST_TRIO) == false);
//Every tile except the empty one dominates the empty one
assertTrue(K0.dominates(EMPTY_TILE) == true);
assertTrue(K1.dominates(EMPTY_TILE) == true);
assertTrue(TEST_TRIO.dominates(EMPTY_TILE) == true);
}
@Test
public void testHasSameColorsAs() {
assertTrue(EMPTY_TILE.hasSameColorsAs(EMPTY_TILE) == true);
assertTrue(K0.hasSameColorsAs(K0) == true);
assertTrue(K1.hasSameColorsAs(K1) == true);
assertTrue(TEST_TRIO.hasSameColorsAs(TEST_TRIO) == true);
assertTrue(EMPTY_TILE.hasSameColorsAs(K0) == false);
assertTrue(K0.hasSameColorsAs(EMPTY_TILE) == false);
assertTrue(K1.hasSameColorsAs(K0) == true);
assertTrue(K0.hasSameColorsAs(K0) == true);
assertTrue(TEST_TRIO.hasSameColorsAs(EMPTY_TILE) == false);
assertTrue(TEST_TRIO.hasSameColorsAs(K0) == false);
assertTrue(TEST_TRIO.hasSameColorsAs(K1) == false);
}
@Test
public void testToString() {
assertThat(EMPTY_TILE.toString(), is("------"));
assertThat(K0.toString(), is("RRG-G-"));
assertThat(K1.toString(), is("R-GGR-"));
}
@Test
public void testFitsTo() {
assertTrue(K0.fitsTo(K1, 0) == false);
assertTrue(K0.fitsTo(K1, 1) == true);
assertTrue(K0.fitsTo(K1, 2) == true);
assertTrue(K0.fitsTo(K1, 3) == true);
assertTrue(K0.fitsTo(K1, 4) == true);
assertTrue(K0.fitsTo(K1, 5) == true);
}
}
|
src/test/java/board_game/TileTest.java
|
package test.java.board_game;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import org.junit.Test;
import main.java.board_game.LineType;
import main.java.board_game.Tile;
public class TileTest {
/**
* <pre>
* Empty tile
* ------
* </pre>
*/
private static final Tile EMPTY_TILE = new Tile();
/**
* <pre>
* Example in picture 6
* RRG-G-
* </pre>
*/
private static final Tile K0 = new Tile(new LineType[] {
LineType.RED, LineType.RED, LineType.GREEN,
LineType.NONE, LineType.GREEN, LineType.NONE });
/**
* <pre>
* Example in picture 6
* R-GGR-
* </pre>
*/
private static final Tile K1 = new Tile(new LineType[] {
LineType.RED, LineType.NONE, LineType.GREEN,
LineType.GREEN, LineType.RED, LineType.NONE });
/**
* <pre>
* Example in picture 4
* RYRYGG
* </pre>
*/
private static final Tile TEST_TRIO = new Tile(new LineType[] {
LineType.RED, LineType.YELLOW, LineType.RED,
LineType.YELLOW, LineType.GREEN, LineType.GREEN });
public TileTest() {
}
@Test
public void testTile() {
Tile firstTest = new Tile();
assertTrue(firstTest != null);
assertTrue(firstTest.isEmpty() == true);
assertTrue(EMPTY_TILE.isEmpty() == true);
}
@Test
public void testGetLineTypeAtIndex() {
assertTrue(EMPTY_TILE.getLineTypeAtIndex(0) == LineType.NONE);
assertTrue(TEST_TRIO.getLineTypeAtIndex(0) == LineType.RED);
assertTrue(TEST_TRIO.getLineTypeAtIndex(1) == LineType.YELLOW);
assertTrue(TEST_TRIO.getLineTypeAtIndex(5) == LineType.GREEN);
}
@Test
public void testGetNumberOfColors() {
//Examples in picture 4
Tile testSingular = new Tile(new LineType[] {
LineType.RED, LineType.RED, LineType.NONE,
LineType.NONE, LineType.NONE, LineType.NONE });
Tile testDuo = new Tile(new LineType[] {
LineType.NONE, LineType.YELLOW, LineType.RED,
LineType.NONE, LineType.RED, LineType.YELLOW });
assertTrue(testDuo.getNumberOfColors() == 2);
assertTrue(TEST_TRIO.getNumberOfColors() == 3);
assertTrue(testSingular.getNumberOfColors() == 1);
assertTrue(EMPTY_TILE.getNumberOfColors() == 0);
assertTrue(K1.getNumberOfColors() == 2);
assertTrue(K0.getNumberOfColors() == 2);
}
@Test
public void testIsExactlyEqualTo() {
assertTrue(EMPTY_TILE.isExactlyEqualTo(EMPTY_TILE) == true);
assertTrue(K1.isExactlyEqualTo(K1) == true);
assertTrue(K0.isExactlyEqualTo(K0) == true);
assertTrue(EMPTY_TILE.isExactlyEqualTo(K1) == false);
assertTrue(K1.isExactlyEqualTo(K0) == false);
assertTrue(K0.isExactlyEqualTo(EMPTY_TILE) == false);
}
@Test
public void testCopy() {
Tile copyEmpty = EMPTY_TILE.copy();
Tile copyK1 = K1.copy();
Tile copyK0 = K0.copy();
assertTrue(copyEmpty != EMPTY_TILE);
assertTrue(copyEmpty.isExactlyEqualTo(EMPTY_TILE) == true);
assertTrue(copyK1 != K1);
assertTrue(copyK1.isExactlyEqualTo(K1) == true);
assertTrue(copyK0 != K0);
assertTrue(copyK0.isExactlyEqualTo(K0) == true);
}
@Test
public void testRotateClockwise() {
//RYRYGG
Tile copyTrio = TEST_TRIO.copy();
copyTrio.rotateClockwise();
assertThat(copyTrio.toString(), is("GRYRYG"));
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
assertThat(copyTrio.toString(), is("YGGRYR"));
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
assertThat(copyTrio.toString(), is(TEST_TRIO.toString()));
}
@Test
public void testRotateCounterClockwise() {
//RYRYGG
Tile copyTrio = TEST_TRIO.copy();
copyTrio.rotateCounterClockwise();
assertThat(copyTrio.toString(), is("YRYGGR"));
copyTrio.rotateCounterClockwise();
copyTrio.rotateCounterClockwise();
assertThat(copyTrio.toString(), is("YGGRYR"));
copyTrio.rotateCounterClockwise();
copyTrio.rotateCounterClockwise();
copyTrio.rotateCounterClockwise();
assertThat(copyTrio.toString(), is(TEST_TRIO.toString()));
}
@Test
public void testIsEmpty() {
assertTrue(TEST_TRIO.isEmpty() == false);
assertTrue(K0.isEmpty() == false);
assertTrue(K1.isEmpty() == false);
assertTrue(EMPTY_TILE.isEmpty() == true);
}
@Test
public void testIsRotationEqualTo() {
Tile copyTrio = TEST_TRIO.copy();
copyTrio.rotateClockwise();
copyTrio.rotateClockwise();
assertTrue(copyTrio.isRotationEqualTo(TEST_TRIO) == true);
assertTrue(EMPTY_TILE.isRotationEqualTo(EMPTY_TILE) == true);
assertTrue(TEST_TRIO.isRotationEqualTo(TEST_TRIO) == true);
assertTrue(TEST_TRIO.isRotationEqualTo(EMPTY_TILE) == false);
assertTrue(EMPTY_TILE.isRotationEqualTo(TEST_TRIO) == false);
}
@Test
public void testCanBeRecoloredTo() {
//Praktomat example
Tile test = new Tile(new LineType[] {
LineType.RED, LineType.NONE, LineType.GREEN,
LineType.GREEN, LineType.RED, LineType.NONE });
Tile objective = new Tile(new LineType[] {
LineType.YELLOW, LineType.NONE, LineType.RED,
LineType.RED, LineType.YELLOW, LineType.NONE });
assertTrue(test.canBeRecoloredTo(objective) == true);
test.rotateClockwise();
assertTrue(test.canBeRecoloredTo(objective) == false);
assertTrue(EMPTY_TILE.canBeRecoloredTo(EMPTY_TILE) == true);
assertTrue(K0.canBeRecoloredTo(K0) == true);
assertTrue(K1.canBeRecoloredTo(K1) == true);
assertTrue(TEST_TRIO.canBeRecoloredTo(TEST_TRIO) == true);
assertTrue(TEST_TRIO.canBeRecoloredTo(EMPTY_TILE) == false);
assertTrue(TEST_TRIO.canBeRecoloredTo(K0) == false);
assertTrue(TEST_TRIO.canBeRecoloredTo(K1) == false);
}
@Test
public void testDominates() {
//The tiles do not dominate themselves
assertTrue(EMPTY_TILE.dominates(EMPTY_TILE) == false);
assertTrue(K0.dominates(K0) == false);
assertTrue(K1.dominates(K1) == false);
assertTrue(TEST_TRIO.dominates(TEST_TRIO) == false);
//Every tile except the empty one dominates the empty one
assertTrue(K0.dominates(EMPTY_TILE) == true);
assertTrue(K1.dominates(EMPTY_TILE) == true);
assertTrue(TEST_TRIO.dominates(EMPTY_TILE) == true);
}
@Test
public void testHasSameColorsAs() {
assertTrue(EMPTY_TILE.hasSameColorsAs(EMPTY_TILE) == true);
assertTrue(K0.hasSameColorsAs(K0) == true);
assertTrue(K1.hasSameColorsAs(K1) == true);
assertTrue(TEST_TRIO.hasSameColorsAs(TEST_TRIO) == true);
assertTrue(EMPTY_TILE.hasSameColorsAs(K0) == false);
assertTrue(K0.hasSameColorsAs(EMPTY_TILE) == false);
assertTrue(K1.hasSameColorsAs(K0) == true);
assertTrue(K0.hasSameColorsAs(K0) == true);
assertTrue(TEST_TRIO.hasSameColorsAs(EMPTY_TILE) == false);
assertTrue(TEST_TRIO.hasSameColorsAs(K0) == false);
assertTrue(TEST_TRIO.hasSameColorsAs(K1) == false);
}
@Test
public void testToString() {
assertThat(EMPTY_TILE.toString(), is("------"));
assertThat(K0.toString(), is("RRG-G-"));
assertThat(K1.toString(), is("R-GGR-"));
}
@Test
public void testFitsTo() {
assertTrue(K0.fitsTo(K1, 0) == false);
assertTrue(K0.fitsTo(K1, 1) == true);
assertTrue(K0.fitsTo(K1, 2) == true);
assertTrue(K0.fitsTo(K1, 3) == true);
assertTrue(K0.fitsTo(K1, 4) == true);
assertTrue(K0.fitsTo(K1, 5) == true);
}
}
|
Added some comments
|
src/test/java/board_game/TileTest.java
|
Added some comments
|
|
Java
|
mit
|
3955fafca5ae436fe1a593efdfea6871c1937ef9
| 0
|
zabawaba99/reflector
|
package com.zabawaba.reflector;
import java.lang.reflect.Constructor;
import java.util.HashSet;
/**
* Provides a set of methods to manipulate {@link Constructor}
*
* @author Steven Berlanga
* @since 0.2.1
*/
public class Constructors {
private static Filter<Constructor<?>> ALL_CONSTRUCTORS = new Filter<Constructor<?>>() {
public boolean apply(Constructor<?> constructor) {
return true;
}
};
private Object obj;
private Constructors(Object obj) {
this.obj = obj;
}
/**
* Get a constructor that has the given name
*
* @param constructorName
* The name of the constructor to look for
* @return The constructor that has the given name
*
* @throws NoSuchMethodException
* If no constructor exists with the provided name
*/
public ReflectorConstructor get(String constructorName) throws NoSuchMethodException {
ReflectorConstructor constructor = null;
for (ReflectorConstructor c : list()) {
String className = c.getConstructor().getDeclaringClass().getSimpleName();
if (className.equals(constructorName)) {
constructor = c;
}
}
if (constructor == null) {
throw new NoSuchMethodException(constructorName);
}
return constructor;
}
/**
* Gets all Constructors for the given class and all of its superclasses
*
* @return A {@link HashSet} containing all of the constructors of object
* the Constructors instance was instantiated with
*/
public HashSet<ReflectorConstructor> list() {
return list(ALL_CONSTRUCTORS);
}
/**
* Gets all Constructors for the given class and all of its superclasses
* where {@link Filter#apply(Object)} returns true
*
* @param filter
* The filter that determines whether or not a method is added to
* the return. If {@code null} all methods will be returned.
* @return A {@link HashSet} containing all of the constructors of object
* the Constructors instance was instantiated with and that meet the
* filtering criteria
*/
public HashSet<ReflectorConstructor> list(Filter<Constructor<?>> filter) {
HashSet<ReflectorConstructor> constructors = new HashSet<ReflectorConstructor>();
if (filter == null) {
filter = ALL_CONSTRUCTORS;
}
Class<?> currentClass = obj.getClass();
while (currentClass != null) {
for (Constructor<?> c : currentClass.getDeclaredConstructors()) {
c.setAccessible(true);
if (filter.apply(c)) {
constructors.add(new ReflectorConstructor(obj, c));
}
}
currentClass = currentClass.getSuperclass();
}
return constructors;
}
/**
* Builds a new Constructors object with the context of the object given
*
* @param obj
* The object whos constructors you want reflect over
* @return A newly created Constructors object
*/
public static Constructors forObj(Object obj) {
return new Constructors(obj);
}
}
|
src/main/java/com/zabawaba/reflector/Constructors.java
|
package com.zabawaba.reflector;
import java.lang.reflect.Constructor;
import java.util.HashSet;
/**
* Provides a set of methods to manipulate {@link Constructor}
*
* @author Steven Berlanga
* @since 0.2.1
*/
public class Constructors {
private static Filter<Constructor<?>> ALL_CONSTRUCTORS = new Filter<Constructor<?>>() {
public boolean apply(Constructor<?> constructor) {
return true;
}
};
private Object obj;
public Constructors(Object obj) {
this.obj = obj;
}
/**
* Get a constructor that has the given name
*
* @param constructorName
* The name of the constructor to look for
* @return The constructor that has the given name
*
* @throws NoSuchMethodException
* If no constructor exists with the provided name
*/
public ReflectorConstructor get(String constructorName) throws NoSuchMethodException {
ReflectorConstructor constructor = null;
for (ReflectorConstructor c : list()) {
String className = c.getConstructor().getDeclaringClass().getSimpleName();
if (className.equals(constructorName)) {
constructor = c;
}
}
if (constructor == null) {
throw new NoSuchMethodException(constructorName);
}
return constructor;
}
/**
* Gets all Constructors for the given class and all of its superclasses
*
* @return A {@link HashSet} containing all of the constructors of object
* the Constructors instance was instantiated with
*/
public HashSet<ReflectorConstructor> list() {
return list(ALL_CONSTRUCTORS);
}
/**
* Gets all Constructors for the given class and all of its superclasses
* where {@link Filter#apply(Object)} returns true
*
* @param filter
* The filter that determines whether or not a method is added to
* the return. If {@code null} all methods will be returned.
* @return A {@link HashSet} containing all of the constructors of object
* the Constructors instance was instantiated with and that meet the
* filtering criteria
*/
public HashSet<ReflectorConstructor> list(Filter<Constructor<?>> filter) {
HashSet<ReflectorConstructor> constructors = new HashSet<ReflectorConstructor>();
if (filter == null) {
filter = ALL_CONSTRUCTORS;
}
Class<?> currentClass = obj.getClass();
while (currentClass != null) {
for (Constructor<?> c : currentClass.getDeclaredConstructors()) {
c.setAccessible(true);
if (filter.apply(c)) {
constructors.add(new ReflectorConstructor(obj, c));
}
}
currentClass = currentClass.getSuperclass();
}
return constructors;
}
/**
* Builds a new Constructors object with the context of the object given
*
* @param obj
* The object whos constructors you want reflect over
* @return A newly created Constructors object
*/
public static Constructors forObj(Object obj) {
return new Constructors(obj);
}
}
|
making Constructors constructor private
|
src/main/java/com/zabawaba/reflector/Constructors.java
|
making Constructors constructor private
|
|
Java
|
epl-1.0
|
1ffbefa16aa46547acea8fe644b27f1be0cf0c7d
| 0
|
my76128/controller,tx1103mark/controller,opendaylight/controller,inocybe/odl-controller,522986491/controller,mandeepdhami/controller,522986491/controller,my76128/controller,inocybe/odl-controller,Sushma7785/OpenDayLight-Load-Balancer,aryantaheri/monitoring-controller,tx1103mark/controller,my76128/controller,mandeepdhami/controller,aryantaheri/monitoring-controller,Johnson-Chou/test,my76128/controller,aryantaheri/monitoring-controller,Johnson-Chou/test,mandeepdhami/controller,tx1103mark/controller,tx1103mark/controller,aryantaheri/monitoring-controller,Sushma7785/OpenDayLight-Load-Balancer,mandeepdhami/controller
|
/*
* Copyright (C) 2014 Red Hat, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Authors : Dave Tucker
*/
package org.opendaylight.controller.networkconfig.neutron.northbound;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.ws.rs.core.UriInfo;
import org.opendaylight.controller.networkconfig.neutron.INeutronObject;
import org.opendaylight.controller.networkconfig.neutron.NeutronNetwork;
import org.opendaylight.controller.networkconfig.neutron.NeutronPort;
import org.opendaylight.controller.networkconfig.neutron.NeutronSubnet;
import org.opendaylight.controller.northbound.commons.exception.BadRequestException;
import org.opendaylight.controller.northbound.commons.exception.ResourceNotFoundException;
public class PaginatedRequestFactory {
private static final Comparator<INeutronObject> NEUTRON_OBJECT_COMPARATOR = new Comparator<INeutronObject>() {
@Override
public int compare(INeutronObject o1, INeutronObject o2) {
return o1.getID().compareTo(o2.getID());
}
};
public static class PaginationResults<T extends INeutronObject> {
List<T> collection;
List<NeutronPageLink> links;
public PaginationResults(List<T> collection, List<NeutronPageLink> links) {
this.collection = collection;
this.links = links;
}
}
private static final class MarkerObject implements INeutronObject {
private final String id;
MarkerObject(String id) {
this.id = id;
}
@Override
public String getID() {
return id;
}
@Override
public void setID(String id) {
throw new UnsupportedOperationException("Marker has constant ID");
}
}
/*
* SuppressWarnings is needed because the compiler does not understand that we
* are actually safe here.
*
* FIXME: the only caller performs a cast back, so this is not actually necessary.
*/
@SuppressWarnings("unchecked")
public static <T extends INeutronObject> INeutronRequest<T> createRequest(Integer limit, String marker,
Boolean pageReverse,
UriInfo uriInfo,
List<T> collection,
Class<T> clazz) {
PaginationResults<T> results = _paginate(limit, marker, pageReverse, uriInfo, collection);
if (clazz.equals(NeutronNetwork.class)){
return (INeutronRequest<T>) new NeutronNetworkRequest((List<NeutronNetwork>) results.collection, results.links);
}
if (clazz.equals(NeutronSubnet.class)){
return (INeutronRequest<T>) new NeutronSubnetRequest((List<NeutronSubnet>) results.collection, results.links);
}
if (clazz.equals(NeutronPort.class)){
return (INeutronRequest<T>) new NeutronPortRequest((List<NeutronPort>) results.collection, results.links);
}
return null;
}
private static <T extends INeutronObject> PaginationResults<T> _paginate(Integer limit, String marker, Boolean pageReverse, UriInfo uriInfo, List<T> collection) {
List<NeutronPageLink> links = new ArrayList<>();
final int startPos;
String startMarker;
String endMarker;
Boolean firstPage = false;
Boolean lastPage = false;
Collections.sort(collection, NEUTRON_OBJECT_COMPARATOR);
if (marker != null) {
int offset = Collections.binarySearch(collection, new MarkerObject(marker), NEUTRON_OBJECT_COMPARATOR);
if (offset < 0) {
throw new ResourceNotFoundException("UUID for marker: " + marker + " could not be found");
}
if (!pageReverse) {
startPos = offset + 1;
}
else {
startPos = offset - limit;
}
}
else {
startPos = 0;
}
if (startPos == 0){
firstPage = true;
}
if (startPos + limit >= collection.size()) {
collection = collection.subList(startPos, collection.size());
startMarker = collection.get(0).getID();
endMarker = collection.get(collection.size() - 1).getID();
lastPage = true;
}
else if (startPos < 0) {
if (startPos + limit > 0) {
collection = collection.subList(0, startPos + limit);
startMarker = collection.get(0).getID();
endMarker = collection.get(collection.size() - 1).getID();
firstPage = true;
}
else {
throw new BadRequestException("Requested page is out of bounds. Please check the supplied limit and marker");
}
}
else {
collection = collection.subList(startPos, startPos + limit);
startMarker = collection.get(0).getID();
endMarker = collection.get(limit-1).getID();
}
if (!lastPage) {
NeutronPageLink next = new NeutronPageLink();
next.setRef("next");
next.setHref(uriInfo.getAbsolutePath().toString() + "?limit=" + limit.toString() + "&marker=" + endMarker);
links.add(next);
}
if (!firstPage) {
NeutronPageLink previous = new NeutronPageLink();
previous.setRef("previous");
previous.setHref(uriInfo.getAbsolutePath().toString() + "?limit=" + limit.toString() + "&marker=" + startMarker + "&page_reverse=True");
links.add(previous);
}
return new PaginationResults<T>(collection, links);
}
}
|
opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/PaginatedRequestFactory.java
|
/*
* Copyright (C) 2014 Red Hat, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*
* Authors : Dave Tucker
*/
package org.opendaylight.controller.networkconfig.neutron.northbound;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.ws.rs.core.UriInfo;
import org.opendaylight.controller.networkconfig.neutron.INeutronObject;
import org.opendaylight.controller.networkconfig.neutron.NeutronNetwork;
import org.opendaylight.controller.networkconfig.neutron.NeutronPort;
import org.opendaylight.controller.networkconfig.neutron.NeutronSubnet;
import org.opendaylight.controller.northbound.commons.exception.BadRequestException;
import org.opendaylight.controller.northbound.commons.exception.ResourceNotFoundException;
public class PaginatedRequestFactory {
private static final Comparator<INeutronObject> NEUTRON_OBJECT_COMPARATOR = new Comparator<INeutronObject>() {
@Override
public int compare(INeutronObject o1, INeutronObject o2) {
return o1.getID().compareTo(o2.getID());
}
};
public static class PaginationResults<T extends INeutronObject> {
List<T> collection;
List<NeutronPageLink> links;
public PaginationResults(List<T> collection, List<NeutronPageLink> links) {
this.collection = collection;
this.links = links;
}
}
/*
* SuppressWarnings is needed because the compiler does not understand that we
* are actually safe here.
*
* FIXME: the only caller performs a cast back, so this is not actually necessary.
*/
@SuppressWarnings("unchecked")
public static <T extends INeutronObject> INeutronRequest<T> createRequest(Integer limit, String marker,
Boolean pageReverse,
UriInfo uriInfo,
List<T> collection,
Class<T> clazz) {
PaginationResults<T> results = _paginate(limit, marker, pageReverse, uriInfo, collection);
if (clazz.equals(NeutronNetwork.class)){
return (INeutronRequest<T>) new NeutronNetworkRequest((List<NeutronNetwork>) results.collection, results.links);
}
if (clazz.equals(NeutronSubnet.class)){
return (INeutronRequest<T>) new NeutronSubnetRequest((List<NeutronSubnet>) results.collection, results.links);
}
if (clazz.equals(NeutronPort.class)){
return (INeutronRequest<T>) new NeutronPortRequest((List<NeutronPort>) results.collection, results.links);
}
return null;
}
private static <T extends INeutronObject> PaginationResults<T> _paginate(Integer limit, String marker, Boolean pageReverse, UriInfo uriInfo, List<T> collection) {
List<NeutronPageLink> links = new ArrayList<>();
Integer startPos = null;
String startMarker;
String endMarker;
Boolean firstPage = false;
Boolean lastPage = false;
Collections.sort(collection, NEUTRON_OBJECT_COMPARATOR);
if (marker == null) {
startPos = 0;
}
else {
class MarkerObject implements INeutronObject {
private String id;
@Override
public String getID() {
return id;
}
@Override
public void setID(String id) {
this.id = id;
}
}
INeutronObject markerObject = new MarkerObject();
markerObject.setID(marker);
startPos = Collections.binarySearch(collection, markerObject, NEUTRON_OBJECT_COMPARATOR);
if (!pageReverse){
startPos = startPos + 1;
}
else {
startPos = startPos - limit;
}
}
if (startPos == null) {
throw new ResourceNotFoundException("UUID for marker:" + marker + " could not be found");
}
if (startPos == 0){
firstPage = true;
}
if (startPos + limit >= collection.size()) {
collection = collection.subList(startPos, collection.size());
startMarker = collection.get(0).getID();
endMarker = collection.get(collection.size() - 1).getID();
lastPage = true;
}
else if (startPos < 0) {
if (startPos + limit > 0) {
collection = collection.subList(0, startPos + limit);
startMarker = collection.get(0).getID();
endMarker = collection.get(collection.size() - 1).getID();
firstPage = true;
}
else {
throw new BadRequestException("Requested page is out of bounds. Please check the supplied limit and marker");
}
}
else {
collection = collection.subList(startPos, startPos + limit);
startMarker = collection.get(0).getID();
endMarker = collection.get(limit-1).getID();
}
if (!lastPage) {
NeutronPageLink next = new NeutronPageLink();
next.setRef("next");
next.setHref(uriInfo.getAbsolutePath().toString() + "?limit=" + limit.toString() + "&marker=" + endMarker);
links.add(next);
}
if (!firstPage) {
NeutronPageLink previous = new NeutronPageLink();
previous.setRef("previous");
previous.setHref(uriInfo.getAbsolutePath().toString() + "?limit=" + limit.toString() + "&marker=" + startMarker + "&page_reverse=True");
links.add(previous);
}
return new PaginationResults<T>(collection, links);
}
}
|
Fix neutron pagination
This patch fixes a logic mistake which results in dead code being
reported. startPos is guaranteed to be non-null by virtue of being
always assigned.
The problem is that the detection that the marker is not found is not
correct, as binarySearch() will report the negated insertion point.
Also improves performance by using a primitive type instead of an
encapsulated object.
Change-Id: I525c2070d2794b1e001465ea925d95d432feca29
Signed-off-by: Robert Varga <b8bd3df785fdc0ff42dd1710c5d91998513c57ef@cisco.com>
|
opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/PaginatedRequestFactory.java
|
Fix neutron pagination
|
|
Java
|
mpl-2.0
|
d9984ae95a2043790598c73192f1be5f51934806
| 0
|
maxanier/Vertretungsplan
|
package com.example.vertretungsplan;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.StrictMode;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.webkit.WebView;
import android.widget.Toast;
public class Anzeige extends Activity {
private static final String login_url="https://www.ratsgymnasium-bielefeld.de/index.php/intern?task=user.login";
private static final String plan_url="https://www.ratsgymnasium-bielefeld.de/index.php/intern/vertretungsplan-schueler";
private static final String loginsite_url="https://www.ratsgymnasium-bielefeld.de/index.php/intern";
public static final String newline = System.getProperty("line.separator");
private static final String PREFS_NAME = "Einstellungen";
private static final String TAG = "Anzeige_Activity";
private static final String no_username ="<html><body><p style=\"padding-top:40%;\"><div align=\"center\">Bitte Nutzernamen, Passwort und Klasse einstellen</div></p></body></html>";
private static final String no_internet="<html><body><p style=\"padding-top:40%;\"><div align=\"center\">Keine Internetverbindung</div></p></body></html>";
private WebView webview=null;
private ProgressDialog progressDialog;
private String username;
private String password;
private String klasse;
private boolean initialisiert=false; //true wenn das Layout geladen wurde
public String cookie="";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//Irgendwas- Schon wieder vergessen- Irgendeine Fehlervorbeugung
if (android.os.Build.VERSION.SDK_INT > 9) {
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
}
//--------------------------------------
//Laden der Nutzereinstellungen
SharedPreferences settings = getSharedPreferences(PREFS_NAME,0);
username = settings.getString("username","");
password = settings.getString("password","");
klasse = settings.getString("klasse","");
//--------------------------------------
if(username!=""&&password!=""&&klasse!=""){
if(isOnline()){
new LoadPlanTask().execute(username,password,klasse);}//Bei Internetverbindung Plan aktualiseren und anzeigen
else{ //Ohne Internet Verbindung
Toast.makeText(getApplicationContext(), "Keine Internetverbindung", Toast.LENGTH_SHORT).show(); //Anzeige von "Keine Internetverbindung
setContentView(R.layout.activity_anzeige); //Layout laden
webview = (WebView) findViewById(R.id.webView1); //Webview finden und speichern
File f=new File(Environment.getExternalStorageDirectory().getPath()+"/vertretungsplan/plan.html");
if(f.exists()){
Log.i("Anzeige ohne Internetverbindung");
webview.loadData(anzeigen(auswerten(f),username, klasse),"text/html; charset=UTF-8",null)//Gespeicherten Plan anzeigen
}
}
}
else
{
setContentView(R.layout.activity_anzeige); //Layout laden
webview = (WebView) findViewById(R.id.webView1); //Webview finden und speichern
webview.loadData(no_username,"text/html; charset=UTF-8",null); //Anzeige der Fehlermeldung, dass no_username
Log.w(TAG,"Nutzername,Passwort oder Klasse nicht eingestellt");
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.anzeige, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item){
switch(item.getItemId()){
case R.id.action_refresh:
SharedPreferences settings = getSharedPreferences(PREFS_NAME,0);
username = settings.getString("username","");
password = settings.getString("password","");
klasse = settings.getString("klasse","");
if(isOnline()){
if(username!=""&&password!=""&&klasse!=""){
new LoadPlanTask().execute(username,password,klasse);
}
else
{
webview.loadData(no_username,"text/html; charset=UTF-8",null);
Log.w(TAG,"Nutzername,Passwort oder Klasse nicht eingestellt");
}
}
else{
Toast.makeText(getApplicationContext(), "Keine Internetverbindung", Toast.LENGTH_SHORT).show();
}
return true;
case R.id.action_settings:
Log.i(TAG,"Optionen anzeigen");
Intent i=new Intent();
i.setClass(this, Options.class);
startActivity(i);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
//Asynchrones Laden des Plans mit Hilfe von AsyncTask
private class LoadPlanTask extends AsyncTask<String, Void, String>
{
//Vor ausf�hren in einem seperaten Task
@Override
protected void onPreExecute(){
//Neuer progress dialog
progressDialog = new ProgressDialog(Anzeige.this);
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.setTitle("L�dt...");
progressDialog.setCancelable(false);
progressDialog.setIndeterminate(false);
progressDialog.show();
}
//Background Thread
protected String doInBackground(String... params)
{
try{
//Get the current thread`s token ????
synchronized(this)
{
return planAnzeigen(params[0],params[1],params[2]);
}
}
catch(Exception e){
e.printStackTrace();
}
return null;
}
//after Execution
@Override
protected void onPostExecute(String result)
{
//ProgressDialog schlie�en
progressDialog.dismiss();
if(!initialisiert){
//initialisiere View
setContentView(R.layout.activity_anzeige);
//finde WebView
webview = (WebView) findViewById(R.id.webView1);}
//Ergebnis Anzeigen
webview.loadData(result,"text/html; charset=UTF-8",null);
Toast.makeText(getApplicationContext(), "Aktualisiert", Toast.LENGTH_SHORT).show();
}
}
/**
*
* /
public String planAnzeigen(String username,String password,String klasse)
{
File dir = new File(Environment.getExternalStorageDirectory().getPath( )+"/vertretungsplan/");
dir.mkdirs();
Log.i(TAG,"Anfrage gestartet");
final HttpParams httpParams = new BasicHttpParams();
HttpConnectionParams.setConnectionTimeout(httpParams,10000);
HttpConnectionParams.setSoTimeout(httpParams,10000);
HttpClient httpclient = new MyHttpsClient(getApplicationContext(),httpParams);
try{
if(isOnline()){
abrufen(httpclient);
login(httpclient,username,password);
auslesen(httpclient);
}
else{
Toast.makeText(getApplicationContext(), "Keine Internetverbindung", Toast.LENGTH_SHORT).show();
}
File f=new File(Environment.getExternalStorageDirectory().getPath()+"/vertretungsplan/plan.html");
if(f.exists()){
Log.i(TAG,"Anfrage erfolgreich abgeschlo�en");
return anzeigen(auswerten(f),username, klasse);
}
else{
throw new Exception("Datei nicht gefunden");
}
}
catch(Exception e){
Log.e(TAG,"Anfrage fehlgeschlagen: ",e);
return e.getMessage();
}
}
public boolean abrufen(HttpClient httpclient) throws Exception
{
try{
Log.i(TAG,"Abrufen der Loginseite gestartet");
HttpResponse response = httpclient.execute(new HttpGet(loginsite_url));
if(response.getStatusLine().getStatusCode()==HttpStatus.SC_OK){
Log.i(TAG,"Erfolgreicher Loginseiten Abruf");
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
save(responseString,"debug_login.html");
String gesucht="<input type=\"hidden\" name=\"return\" value=\"L2luZGV4LnBocC9pbnRlcm4v\" />\n <input type=\"hidden\" name=";
int index = responseString.indexOf(gesucht);
//System.out.println(index);
char[] chars=responseString.toCharArray();
cookie=String.copyValueOf(chars,index+gesucht.length()+1,32);
Log.i(TAG,"Cookie ausgelesen. Wert: "+cookie);
return true;
}
else
{
StatusLine statusLine=response.getStatusLine();
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
}
catch (Exception e)
{
Log.e(TAG,"Fehlgeschlagener Loginseiten Abruf. Fehler: ",e);
throw new Exception(e.getMessage());
}
}
public boolean login(HttpClient httpclient,String username,String password) throws Exception
{
try{
Log.i(TAG,"Loginvorgang gestartet. Username: "+username+" Passwort: "+password);
HttpPost httppost = new HttpPost(login_url);
List<NameValuePair> paare = new ArrayList<NameValuePair>(2);
paare.add(new BasicNameValuePair("username",username));
paare.add(new BasicNameValuePair("password",password));
paare.add(new BasicNameValuePair("return","L2luZGV4LnBocC9pbnRlcm4v"));
paare.add(new BasicNameValuePair(cookie,"1"));
httppost.setEntity(new UrlEncodedFormEntity(paare));
HttpResponse response = httpclient.execute(httppost);
if(response.getStatusLine().getStatusCode()==HttpStatus.SC_OK){
Log.i(TAG,"Loginvorgang erfolgreich abgeschlo�en. Status aber unbekannt");
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
save(responseString,"debug_login2.html");
return true;
}
else
{
StatusLine statusLine=response.getStatusLine();
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
}
catch(Exception e)
{
Log.e(TAG,"Loginvorgang fehlgeschlagen: ",e);
throw new Exception(e.getMessage());
}
}
public void auslesen(HttpClient client) throws Exception
{
try{
Log.i(TAG,"Abrufen des Plans und Auslesen gestartet");
HttpResponse response = client.execute(new HttpGet(plan_url));
if(response.getStatusLine().getStatusCode()==HttpStatus.SC_OK){
Log.i(TAG,"Abrufen des Plans erfolgreich abgeschlo�en");
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
save(responseString,"plan.html");
}
else
{
StatusLine statusLine=response.getStatusLine();
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
}
catch(Exception e)
{
Log.e(TAG,"Planabruf fehlgeschlagen: ",e);
throw new Exception(e.getMessage());
}
}
public ArrayList<Vertretung> auswerten(File file) throws Exception
{
try{
Log.i(TAG,"Auswerten gestartet");
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse(file);
doc.getDocumentElement().normalize();
NodeList font = doc.getElementsByTagName("font");
Log.i(TAG,font.getLength()+" Font-Elemente gefunden");
ArrayList<Vertretung> vertretungen=new ArrayList<Vertretung>();
for(int j=0;j<font.getLength();j+=3){
String tag=font.item(j).getChildNodes().item(1).getChildNodes().item(0).getNodeValue();
NodeList tr=font.item(j).getChildNodes().item(3).getChildNodes();
Log.i(TAG,tag+": "+tr.getLength()+" tr-Elemente gefunden");
for(int i=2;i<tr.getLength();i++)
{
Node node = tr.item(i);
//System.out.println(node.getNodeValue()+"---"+node.getNodeName());
if(node.getNodeName()!="#text"){
NamedNodeMap attr = node.getAttributes();
//System.out.println(attr.getLength());
if(attr.getLength()>0)
{
Node attrclass= attr.getNamedItem("class");
if(attrclass!=null)
{
String value=attrclass.getNodeValue();
//System.out.println(value);
if(value.indexOf("list odd")!=-1||value.indexOf("list even")!=-1)
{
NodeList childnodes = node.getChildNodes();
String klasse= childnodes.item(0).getChildNodes().item(0).getChildNodes().item(0).getNodeValue();
String stunde = childnodes.item(1).getChildNodes().item(0).getNodeValue();
String art = childnodes.item(2).getChildNodes().item(0).getNodeValue();
String fach = childnodes.item(3).getChildNodes().item(0).getNodeValue();
String raum = childnodes.item(4).getChildNodes().item(0).getNodeValue();
if(fach==null){fach="--";}
vertretungen.add(new Vertretung(klasse,stunde,art,fach,raum,tag));
}
}
}
}
}
}
Log.i(TAG,"Auswerten abgeschlo�en");
return vertretungen;
}
catch (SAXParseException err) {
String fehler="** Parsing error" + ", line "
+ err.getLineNumber () + ", uri " + err.getSystemId ()+"\n Message: "+err.getMessage ();
Log.e(TAG,"Parsen fehlgeschlagen: ",err);
throw new Exception(fehler);
}catch (SAXException e) {
Log.e(TAG,"Parsen fehlgeschlagen: ",e);
throw new Exception("Auslesefehler");
}catch (Exception t) {
Log.e(TAG,"Auslesen fehlgeschlagen: ",t);
throw t;
}
}
public String anzeigen(ArrayList<Vertretung> vertretungen,String username, String klasse) throws Exception
{
Log.i(TAG,"Anzeigen gestartet");
if(vertretungen!=null&&vertretungen.size()>0)
{
boolean gefunden=false;
String tag=vertretungen.get(0).tag;
String ergebnis="<html><head><meta http-equiv=\"content-type\" content=\"text/html; charset=utf-8\" /></head><body><div align=\"center\">Datum: "+tag+"\n<table border=\"1\"><tr><th><font size=\"-1\">Klasse</font></th> <th><font size=\"-1\">Stunde</font></th> <th><font size=\"-1\">Art</font></th> <th><font size=\"-1\">Fach</font></th> <th><font size=\"-1\">Raum</font></th></tr>\n";
for(int i=0;i<vertretungen.size();i++){
Vertretung v=vertretungen.get(i);
if(tag!=v.tag){
tag=v.tag;
ergebnis+="</table>\n";
ergebnis+=newline+"Datum: "+tag+"\n";
ergebnis+="<table border=\"1\"><tr><th><font size=\"-1\">Klasse</font></th> <th><font size=\"-1\">Stunde</font></th> <th><font size=\"-1\">Art</font></th> <th><font size=\"-1\">Fach</font></th> <th><font size=\"-1\">Raum</font></th></tr>\n";
}
//System.out.println("Gesuchte Klasse: "+klasse+" Gefundene Klasse: "+v.klasse+"|");
if(klasse.trim().equals("ALL")||v.klasse.trim().equals(klasse.trim())||v.klasse.trim().equals("("+klasse.trim()+")")){
ergebnis+="<tr>";
ergebnis+="<th><font size=\"-1\">" + v.klasse+"</font></th> ";
ergebnis+="<th><font size=\"-1\">"+v.stunde+"</th> ";
ergebnis+="<th><font size=\"-1\">"+v.art+"</font></th> ";
ergebnis+="<th><font size=\"-1\">"+v.fach+"</font></th> ";
ergebnis+="<th><font size=\"-1\">"+v.raum+"</font></th> ";
ergebnis+=("</tr>\n");
gefunden=true;
}
}
ergebnis+="</table></div></body></html>";
if(!gefunden)
{
ergebnis="<html><body><p style=\"padding-top:40%;\"><div align=\"center\">Keine Vertretungen f�r die gew�hlte Stufe/Klasse("+klasse+")</div></p></body></html>";
}
Log.i(TAG,"Anzeigen abgeschlo�en");
return ergebnis;
}
else{
Log.e(TAG,"Keine Vertretungen angekommen");
throw new Exception("Fehler: Vermutlich falscher Benutzername oder falsches Passwort gew&aum;lhlt:\n Nutzername: "+username+" Passwort: *****");
}
}
public void save(String s,String file) throws IOException
{
Log.i(TAG,"Speichern der Datei: "+file+" gestartet");
FileWriter o=new FileWriter(Environment.getExternalStorageDirectory().getPath()+"/vertretungsplan/"+file,false);
BufferedWriter bw=new BufferedWriter(o);
bw.write(s);
bw.close();
o.close();
Log.i(TAG,"Speichern der Datei: "+file+" abgeschlo�en");
}
public boolean isOnline()
{
ConnectivityManager cm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo netInfo = cm.getActiveNetworkInfo();
if (netInfo != null && netInfo.isConnectedOrConnecting())
{
return true;
}
return false;
}
}
|
src/com/example/vertretungsplan/Anzeige.java
|
package com.example.vertretungsplan;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.StrictMode;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.webkit.WebView;
import android.widget.Toast;
public class Anzeige extends Activity {
private static final String login_url="https://www.ratsgymnasium-bielefeld.de/index.php/intern?task=user.login";
private static final String plan_url="https://www.ratsgymnasium-bielefeld.de/index.php/intern/vertretungsplan-schueler";
private static final String loginsite_url="https://www.ratsgymnasium-bielefeld.de/index.php/intern";
public static final String newline = System.getProperty("line.separator");
private static final String PREFS_NAME = "Einstellungen";
private static final String TAG = "Anzeige_Activity";
private static final String no_username ="<html><body><p style=\"padding-top:40%;\"><div align=\"center\">Bitte Nutzernamen, Passwort und Klasse einstellen</div></p></body></html>";
private static final String no_internet="<html><body><p style=\"padding-top:40%;\"><div align=\"center\">Keine Internetverbindung</div></p></body></html>";
private WebView webview=null;
private ProgressDialog progressDialog;
private String username;
private String password;
private String klasse;
private boolean initialisiert=false; //true wenn das Layout geladen wurde
public String cookie="";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (android.os.Build.VERSION.SDK_INT > 9) {
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
}
SharedPreferences settings = getSharedPreferences(PREFS_NAME,0);
username = settings.getString("username","");
password = settings.getString("password","");
klasse = settings.getString("klasse","");
if(username!=""&&password!=""&&klasse!=""){
if(isOnline())
{new LoadPlanTask().execute(username,password,klasse);}
else{
Toast.makeText(getApplicationContext(), "Keine Internetverbindung", Toast.LENGTH_SHORT).show();
setContentView(R.layout.activity_anzeige);
webview = (WebView) findViewById(R.id.webView1);
File f=new File(Environment.getExternalStorageDirectory().getPath()+"/vertretungsplan/plan.html");
if(f.exists()){
Log.i("Anzeige ohne Internetverbindung");
webview.loadData(anzeigen(auswerten(f),username, klasse),"text/html; charset=UTF-8",null)
}
}
}
else
{
setContentView(R.layout.activity_anzeige);
webview = (WebView) findViewById(R.id.webView1);
webview.loadData(no_username,"text/html; charset=UTF-8",null);
Log.w(TAG,"Nutzername,Passwort oder Klasse nicht eingestellt");
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.anzeige, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item){
switch(item.getItemId()){
case R.id.action_refresh:
SharedPreferences settings = getSharedPreferences(PREFS_NAME,0);
username = settings.getString("username","");
password = settings.getString("password","");
klasse = settings.getString("klasse","");
if(isOnline()){
if(username!=""&&password!=""&&klasse!=""){
new LoadPlanTask().execute(username,password,klasse);
}
else
{
webview.loadData(no_username,"text/html; charset=UTF-8",null);
Log.w(TAG,"Nutzername,Passwort oder Klasse nicht eingestellt");
}
}
else{
Toast.makeText(getApplicationContext(), "Keine Internetverbindung", Toast.LENGTH_SHORT).show();
}
return true;
case R.id.action_settings:
Log.i(TAG,"Optionen anzeigen");
Intent i=new Intent();
i.setClass(this, Options.class);
startActivity(i);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private class LoadPlanTask extends AsyncTask<String, Void, String>
{
//Vor ausf�hren in einem seperaten Task
@Override
protected void onPreExecute(){
//Neuer progress dialog
progressDialog = new ProgressDialog(Anzeige.this);
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.setTitle("L�dt...");
progressDialog.setCancelable(false);
progressDialog.setIndeterminate(false);
progressDialog.show();
}
//Background Thread
protected String doInBackground(String... params)
{
try{
//Get the current thread`s token ????
synchronized(this)
{
return planAnzeigen(params[0],params[1],params[2]);
}
}
catch(Exception e){
e.printStackTrace();
}
return null;
}
//after Execution
@Override
protected void onPostExecute(String result)
{
//ProgressDialog schlie�en
progressDialog.dismiss();
if(!initialisiert){
//initialisiere View
setContentView(R.layout.activity_anzeige);
//finde WebView
webview = (WebView) findViewById(R.id.webView1);}
//Ergebnis Anzeigen
webview.loadData(result,"text/html; charset=UTF-8",null);
Toast.makeText(getApplicationContext(), "Aktualisiert", Toast.LENGTH_SHORT).show();
}
}
public String planAnzeigen(String username,String password,String klasse)
{
File dir = new File(Environment.getExternalStorageDirectory().getPath( )+"/vertretungsplan/");
dir.mkdirs();
Log.i(TAG,"Anfrage gestartet");
final HttpParams httpParams = new BasicHttpParams();
HttpConnectionParams.setConnectionTimeout(httpParams,10000);
HttpConnectionParams.setSoTimeout(httpParams,10000);
HttpClient httpclient = new MyHttpsClient(getApplicationContext(),httpParams);
try{
if(isOnline()){
abrufen(httpclient);
login(httpclient,username,password);
auslesen(httpclient);
}
else{
Toast.makeText(getApplicationContext(), "Keine Internetverbindung", Toast.LENGTH_SHORT).show();
}
File f=new File(Environment.getExternalStorageDirectory().getPath()+"/vertretungsplan/plan.html");
if(f.exists()){
Log.i(TAG,"Anfrage erfolgreich abgeschlo�en");
return anzeigen(auswerten(f),username, klasse);
}
else{
throw new Exception("Datei nicht gefunden");
}
}
catch(Exception e){
Log.e(TAG,"Anfrage fehlgeschlagen: ",e);
return e.getMessage();
}
}
public boolean abrufen(HttpClient httpclient) throws Exception
{
try{
Log.i(TAG,"Abrufen der Loginseite gestartet");
HttpResponse response = httpclient.execute(new HttpGet(loginsite_url));
if(response.getStatusLine().getStatusCode()==HttpStatus.SC_OK){
Log.i(TAG,"Erfolgreicher Loginseiten Abruf");
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
save(responseString,"debug_login.html");
String gesucht="<input type=\"hidden\" name=\"return\" value=\"L2luZGV4LnBocC9pbnRlcm4v\" />\n <input type=\"hidden\" name=";
int index = responseString.indexOf(gesucht);
//System.out.println(index);
char[] chars=responseString.toCharArray();
cookie=String.copyValueOf(chars,index+gesucht.length()+1,32);
Log.i(TAG,"Cookie ausgelesen. Wert: "+cookie);
return true;
}
else
{
StatusLine statusLine=response.getStatusLine();
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
}
catch (Exception e)
{
Log.e(TAG,"Fehlgeschlagener Loginseiten Abruf. Fehler: ",e);
throw new Exception(e.getMessage());
}
}
public boolean login(HttpClient httpclient,String username,String password) throws Exception
{
try{
Log.i(TAG,"Loginvorgang gestartet. Username: "+username+" Passwort: "+password);
HttpPost httppost = new HttpPost(login_url);
List<NameValuePair> paare = new ArrayList<NameValuePair>(2);
paare.add(new BasicNameValuePair("username",username));
paare.add(new BasicNameValuePair("password",password));
paare.add(new BasicNameValuePair("return","L2luZGV4LnBocC9pbnRlcm4v"));
paare.add(new BasicNameValuePair(cookie,"1"));
httppost.setEntity(new UrlEncodedFormEntity(paare));
HttpResponse response = httpclient.execute(httppost);
if(response.getStatusLine().getStatusCode()==HttpStatus.SC_OK){
Log.i(TAG,"Loginvorgang erfolgreich abgeschlo�en. Status aber unbekannt");
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
save(responseString,"debug_login2.html");
return true;
}
else
{
StatusLine statusLine=response.getStatusLine();
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
}
catch(Exception e)
{
Log.e(TAG,"Loginvorgang fehlgeschlagen: ",e);
throw new Exception(e.getMessage());
}
}
public void auslesen(HttpClient client) throws Exception
{
try{
Log.i(TAG,"Abrufen des Plans und Auslesen gestartet");
HttpResponse response = client.execute(new HttpGet(plan_url));
if(response.getStatusLine().getStatusCode()==HttpStatus.SC_OK){
Log.i(TAG,"Abrufen des Plans erfolgreich abgeschlo�en");
ByteArrayOutputStream out = new ByteArrayOutputStream();
response.getEntity().writeTo(out);
out.close();
String responseString = out.toString();
save(responseString,"plan.html");
}
else
{
StatusLine statusLine=response.getStatusLine();
response.getEntity().getContent().close();
throw new IOException(statusLine.getReasonPhrase());
}
}
catch(Exception e)
{
Log.e(TAG,"Planabruf fehlgeschlagen: ",e);
throw new Exception(e.getMessage());
}
}
public ArrayList<Vertretung> auswerten(File file) throws Exception
{
try{
Log.i(TAG,"Auswerten gestartet");
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document doc = docBuilder.parse(file);
doc.getDocumentElement().normalize();
NodeList font = doc.getElementsByTagName("font");
Log.i(TAG,font.getLength()+" Font-Elemente gefunden");
ArrayList<Vertretung> vertretungen=new ArrayList<Vertretung>();
for(int j=0;j<font.getLength();j+=3){
String tag=font.item(j).getChildNodes().item(1).getChildNodes().item(0).getNodeValue();
NodeList tr=font.item(j).getChildNodes().item(3).getChildNodes();
Log.i(TAG,tag+": "+tr.getLength()+" tr-Elemente gefunden");
for(int i=2;i<tr.getLength();i++)
{
Node node = tr.item(i);
//System.out.println(node.getNodeValue()+"---"+node.getNodeName());
if(node.getNodeName()!="#text"){
NamedNodeMap attr = node.getAttributes();
//System.out.println(attr.getLength());
if(attr.getLength()>0)
{
Node attrclass= attr.getNamedItem("class");
if(attrclass!=null)
{
String value=attrclass.getNodeValue();
//System.out.println(value);
if(value.indexOf("list odd")!=-1||value.indexOf("list even")!=-1)
{
NodeList childnodes = node.getChildNodes();
String klasse= childnodes.item(0).getChildNodes().item(0).getChildNodes().item(0).getNodeValue();
String stunde = childnodes.item(1).getChildNodes().item(0).getNodeValue();
String art = childnodes.item(2).getChildNodes().item(0).getNodeValue();
String fach = childnodes.item(3).getChildNodes().item(0).getNodeValue();
String raum = childnodes.item(4).getChildNodes().item(0).getNodeValue();
if(fach==null){fach="--";}
vertretungen.add(new Vertretung(klasse,stunde,art,fach,raum,tag));
}
}
}
}
}
}
Log.i(TAG,"Auswerten abgeschlo�en");
return vertretungen;
}
catch (SAXParseException err) {
String fehler="** Parsing error" + ", line "
+ err.getLineNumber () + ", uri " + err.getSystemId ()+"\n Message: "+err.getMessage ();
Log.e(TAG,"Parsen fehlgeschlagen: ",err);
throw new Exception(fehler);
}catch (SAXException e) {
Log.e(TAG,"Parsen fehlgeschlagen: ",e);
throw new Exception("Auslesefehler");
}catch (Exception t) {
Log.e(TAG,"Auslesen fehlgeschlagen: ",t);
throw t;
}
}
public String anzeigen(ArrayList<Vertretung> vertretungen,String username, String klasse) throws Exception
{
Log.i(TAG,"Anzeigen gestartet");
if(vertretungen!=null&&vertretungen.size()>0)
{
boolean gefunden=false;
String tag=vertretungen.get(0).tag;
String ergebnis="<html><head><meta http-equiv=\"content-type\" content=\"text/html; charset=utf-8\" /></head><body><div align=\"center\">Datum: "+tag+"\n<table border=\"1\"><tr><th><font size=\"-1\">Klasse</font></th> <th><font size=\"-1\">Stunde</font></th> <th><font size=\"-1\">Art</font></th> <th><font size=\"-1\">Fach</font></th> <th><font size=\"-1\">Raum</font></th></tr>\n";
for(int i=0;i<vertretungen.size();i++){
Vertretung v=vertretungen.get(i);
if(tag!=v.tag){
tag=v.tag;
ergebnis+="</table>\n";
ergebnis+=newline+"Datum: "+tag+"\n";
ergebnis+="<table border=\"1\"><tr><th><font size=\"-1\">Klasse</font></th> <th><font size=\"-1\">Stunde</font></th> <th><font size=\"-1\">Art</font></th> <th><font size=\"-1\">Fach</font></th> <th><font size=\"-1\">Raum</font></th></tr>\n";
}
//System.out.println("Gesuchte Klasse: "+klasse+" Gefundene Klasse: "+v.klasse+"|");
if(klasse.trim().equals("ALL")||v.klasse.trim().equals(klasse.trim())||v.klasse.trim().equals("("+klasse.trim()+")")){
ergebnis+="<tr>";
ergebnis+="<th><font size=\"-1\">" + v.klasse+"</font></th> ";
ergebnis+="<th><font size=\"-1\">"+v.stunde+"</th> ";
ergebnis+="<th><font size=\"-1\">"+v.art+"</font></th> ";
ergebnis+="<th><font size=\"-1\">"+v.fach+"</font></th> ";
ergebnis+="<th><font size=\"-1\">"+v.raum+"</font></th> ";
ergebnis+=("</tr>\n");
gefunden=true;
}
}
ergebnis+="</table></div></body></html>";
if(!gefunden)
{
ergebnis="<html><body><p style=\"padding-top:40%;\"><div align=\"center\">Keine Vertretungen f�r die gew�hlte Stufe/Klasse("+klasse+")</div></p></body></html>";
}
Log.i(TAG,"Anzeigen abgeschlo�en");
return ergebnis;
}
else{
Log.e(TAG,"Keine Vertretungen angekommen");
throw new Exception("Fehler: Vermutlich falscher Benutzername oder falsches Passwort gew&aum;lhlt:\n Nutzername: "+username+" Passwort: *****");
}
}
public void save(String s,String file) throws IOException
{
Log.i(TAG,"Speichern der Datei: "+file+" gestartet");
FileWriter o=new FileWriter(Environment.getExternalStorageDirectory().getPath()+"/vertretungsplan/"+file,false);
BufferedWriter bw=new BufferedWriter(o);
bw.write(s);
bw.close();
o.close();
Log.i(TAG,"Speichern der Datei: "+file+" abgeschlo�en");
}
public boolean isOnline()
{
ConnectivityManager cm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo netInfo = cm.getActiveNetworkInfo();
if (netInfo != null && netInfo.isConnectedOrConnecting())
{
return true;
}
return false;
}
}
|
Kommentare ergänzt
|
src/com/example/vertretungsplan/Anzeige.java
|
Kommentare ergänzt
|
|
Java
|
agpl-3.0
|
6cf0f914a579e3dbdd6a3ce03a67d0a05ac72e90
| 0
|
daveho/CloudCoder,ndrppnc/CloudCoder,jspacco/CloudCoder,daveho/CloudCoder,aayushmudgal/CloudCoder,x77686d/CloudCoder,vjpudelski/CloudCoder,wicky-info/CloudCoder,cloudcoderdotorg/CloudCoder,vjpudelski/CloudCoder,wicky-info/CloudCoder,wicky-info/CloudCoder,daveho/CloudCoder,daveho/CloudCoder,daveho/CloudCoder,cloudcoderdotorg/CloudCoder,cloudcoderdotorg/CloudCoder,jspacco/CloudCoder,x77686d/CloudCoder,ndrppnc/CloudCoder,cloudcoderdotorg/CloudCoder,cloudcoderdotorg/CloudCoder,aayushmudgal/CloudCoder,cloudcoderdotorg/CloudCoder,vjpudelski/CloudCoder,cloudcoderdotorg/CloudCoder,jspacco/CloudCoder,aayushmudgal/CloudCoder,wicky-info/CloudCoder,wicky-info/CloudCoder,vjpudelski/CloudCoder,daveho/CloudCoder,daveho/CloudCoder,aayushmudgal/CloudCoder,vjpudelski/CloudCoder,jspacco/CloudCoder,csirkeee/CloudCoder,csirkeee/CloudCoder,aayushmudgal/CloudCoder,jspacco/CloudCoder,x77686d/CloudCoder,jspacco/CloudCoder,ndrppnc/CloudCoder,jspacco/CloudCoder2,wicky-info/CloudCoder,vjpudelski/CloudCoder,aayushmudgal/CloudCoder,jspacco/CloudCoder2,ndrppnc/CloudCoder,x77686d/CloudCoder,jspacco/CloudCoder,jspacco/CloudCoder2,vjpudelski/CloudCoder,ndrppnc/CloudCoder,csirkeee/CloudCoder,jspacco/CloudCoder2,csirkeee/CloudCoder,jspacco/CloudCoder2,csirkeee/CloudCoder,wicky-info/CloudCoder,csirkeee/CloudCoder,x77686d/CloudCoder,csirkeee/CloudCoder,x77686d/CloudCoder,jspacco/CloudCoder2,x77686d/CloudCoder,jspacco/CloudCoder2
|
// CloudCoder - a web-based pedagogical programming environment
// Copyright (C) 2011-2012, Jaime Spacco <jspacco@knox.edu>
// Copyright (C) 2011-2012, David H. Hovemeyer <david.hovemeyer@gmail.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package org.cloudcoder.repoapp.servlets;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.LinkedHashSet;
import java.util.Properties;
import java.util.Set;
import javax.mail.Authenticator;
import javax.mail.MessagingException;
import javax.mail.PasswordAuthentication;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMessage.RecipientType;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.cloudcoder.app.server.persist.BCrypt;
import org.cloudcoder.app.server.persist.Database;
import org.cloudcoder.app.shared.model.ConvertBytesToHex;
import org.cloudcoder.app.shared.model.ModelObjectField;
import org.cloudcoder.app.shared.model.ModelObjectUtil;
import org.cloudcoder.app.shared.model.OperationResult;
import org.cloudcoder.app.shared.model.SHA1;
import org.cloudcoder.app.shared.model.User;
import org.cloudcoder.app.shared.model.UserRegistrationRequest;
import org.cloudcoder.app.shared.model.UserRegistrationRequestStatus;
import org.cloudcoder.app.shared.model.json.JSONConversion;
import org.json.simple.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Servlet allowing a new user to register.
*
* @author David Hovemeyer
*/
public class Register extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(Register.class);
private static final SecureRandom random = new SecureRandom();
private volatile Session session;
@Override
public void init() throws ServletException {
ServletContext context = getServletContext();
if (this.session == null) {
String smtpHost = context.getInitParameter("cloudcoder.repoapp.smtp.host");
String smtpUsername = context.getInitParameter("cloudcoder.repoapp.smtp.user");
String smtpPassword = context.getInitParameter("cloudcoder.repoapp.smtp.passwd");
String smtpPort = context.getInitParameter("cloudcoder.repoapp.smtp.port");
final PasswordAuthentication passwordAuthentication = new PasswordAuthentication(smtpUsername, smtpPassword);
Authenticator authenticator = new Authenticator() {
@Override
public PasswordAuthentication getPasswordAuthentication() {
return passwordAuthentication;
}
};
Properties properties = new Properties();
properties.putAll(System.getProperties());
// properties.setProperty("mail.user", smtpUsername);
properties.setProperty("mail.smtp.submitter", passwordAuthentication.getUserName());
properties.setProperty("mail.smtp.auth", "true");
properties.setProperty("mail.password", smtpPassword);
properties.setProperty("mail.smtp.host", smtpHost);
properties.setProperty("mail.smtp.port", smtpPort);
this.session = Session.getInstance(properties, authenticator);
}
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
req.getRequestDispatcher("_view/register.jsp").forward(req, resp);
}
private static final Set<ModelObjectField<? super UserRegistrationRequest, ?>> REQUIRED_ATTRIBUTES = new LinkedHashSet<ModelObjectField<? super UserRegistrationRequest, ?>>();
static {
REQUIRED_ATTRIBUTES.add(User.USERNAME);
REQUIRED_ATTRIBUTES.add(User.FIRSTNAME);
REQUIRED_ATTRIBUTES.add(User.LASTNAME);
REQUIRED_ATTRIBUTES.add(User.EMAIL);
REQUIRED_ATTRIBUTES.add(User.WEBSITE);
}
// POST is for handling AJAX requests
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
UserRegistrationRequest request = new UserRegistrationRequest();
// Convert submitted form data into a User object
for (ModelObjectField<? super UserRegistrationRequest, ?> field : User.SCHEMA.getFieldList()) {
if (!REQUIRED_ATTRIBUTES.contains(field)) {
continue;
}
String value = ServletUtil.getRequiredParam(req, "u_" + field.getName());
if (value != null) {
Object convertedValue = ModelObjectUtil.convertString(value, field.getType());
field.setUntyped(request, convertedValue);
}
}
// Get password. No need to check confirmation, since that's checked on
// the client side.
String password = ServletUtil.getRequiredParam(req, "u_password");
request.setPasswordHash(BCrypt.hashpw(password, BCrypt.gensalt(12)));
// Generate a secret.
SHA1 computeHash = new SHA1();
computeHash.update(String.valueOf(random.nextLong()).getBytes("UTF-8"));
for (ModelObjectField<? super UserRegistrationRequest, ?> field : REQUIRED_ATTRIBUTES) {
computeHash.update(field.get(request).toString().getBytes("UTF-8"));
}
request.setSecret(new ConvertBytesToHex(computeHash.digest()).convert());
// Status is PENDING.
request.setStatus(UserRegistrationRequestStatus.PENDING);
// Attempt to insert the request in the database
OperationResult result = Database.getInstance().addUserRegistrationRequest(request);
// If request was successfully added to database, then send an email
if (result.isSuccess()) {
// send email
boolean sent = sendConfirmationEmail(request);
if (sent) {
result.setMessage("Please check your email to complete the registration.");
} else {
result = new OperationResult(false, "Could not send registration email");
}
}
resp.setStatus(HttpServletResponse.SC_OK);
resp.setContentType("application/json");
JSONValue.writeJSONString(JSONConversion.convertOperationResultToJSON(result), resp.getWriter());
}
private boolean sendConfirmationEmail(UserRegistrationRequest request) {
try {
MimeMessage message = new MimeMessage(session);
message.setFrom(new InternetAddress("support@cloudcoder.org"));
message.addRecipient(RecipientType.TO, new InternetAddress(request.getEmail()));
message.setSubject("CloudCoder exercise repository user registration");
StringBuilder body = new StringBuilder();
String confirmUrl = "https://cloudcoder.org" + getServletContext().getContextPath() + "/confirm/" + request.getSecret();
System.out.println("Confirmation link: " + confirmUrl);
body.append("<h1>CloudCoder exercise repository user registration</h1>\n");
body.append("<p>Please visit the link below to confirm your user registration\n");
body.append("for the CloudCoder excercise repository:\n");
body.append("<blockquote><a href='");
body.append(confirmUrl);
body.append("'>");
body.append(confirmUrl);
body.append("</a></blockquote>\n");
message.setContent(body.toString(), "text/html");
Transport.send(message);
return true;
} catch (MessagingException e) {
logger.error("Could not send registration email", e);
return false;
}
}
}
|
CloudCoderRepository/src/org/cloudcoder/repoapp/servlets/Register.java
|
// CloudCoder - a web-based pedagogical programming environment
// Copyright (C) 2011-2012, Jaime Spacco <jspacco@knox.edu>
// Copyright (C) 2011-2012, David H. Hovemeyer <david.hovemeyer@gmail.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package org.cloudcoder.repoapp.servlets;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.LinkedHashSet;
import java.util.Properties;
import java.util.Set;
import javax.mail.Authenticator;
import javax.mail.MessagingException;
import javax.mail.PasswordAuthentication;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMessage.RecipientType;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.cloudcoder.app.server.persist.BCrypt;
import org.cloudcoder.app.server.persist.Database;
import org.cloudcoder.app.shared.model.ConvertBytesToHex;
import org.cloudcoder.app.shared.model.ModelObjectField;
import org.cloudcoder.app.shared.model.ModelObjectUtil;
import org.cloudcoder.app.shared.model.OperationResult;
import org.cloudcoder.app.shared.model.SHA1;
import org.cloudcoder.app.shared.model.User;
import org.cloudcoder.app.shared.model.UserRegistrationRequest;
import org.cloudcoder.app.shared.model.UserRegistrationRequestStatus;
import org.cloudcoder.app.shared.model.json.JSONConversion;
import org.json.simple.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Servlet allowing a new user to register.
*
* @author David Hovemeyer
*/
public class Register extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = LoggerFactory.getLogger(Register.class);
private static final SecureRandom random = new SecureRandom();
private volatile Session session;
@Override
public void init() throws ServletException {
ServletContext context = getServletContext();
if (this.session == null) {
String smtpHost = context.getInitParameter("cloudcoder.repoapp.smtp.host");
String smtpUsername = context.getInitParameter("cloudcoder.repoapp.smtp.user");
String smtpPassword = context.getInitParameter("cloudcoder.repoapp.smtp.passwd");
String smtpPort = context.getInitParameter("cloudcoder.repoapp.smtp.port");
final PasswordAuthentication passwordAuthentication = new PasswordAuthentication(smtpUsername, smtpPassword);
Authenticator authenticator = new Authenticator() {
@Override
public PasswordAuthentication getPasswordAuthentication() {
return passwordAuthentication;
}
};
Properties properties = new Properties();
properties.putAll(System.getProperties());
// properties.setProperty("mail.user", smtpUsername);
properties.setProperty("mail.smtp.submitter", passwordAuthentication.getUserName());
properties.setProperty("mail.smtp.auth", "true");
properties.setProperty("mail.password", smtpPassword);
properties.setProperty("mail.smtp.host", smtpHost);
properties.setProperty("mail.smtp.port", smtpPort);
this.session = Session.getInstance(properties, authenticator);
}
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
req.getRequestDispatcher("_view/register.jsp").forward(req, resp);
}
private static final Set<ModelObjectField<? super UserRegistrationRequest, ?>> REQUIRED_ATTRIBUTES = new LinkedHashSet<ModelObjectField<? super UserRegistrationRequest, ?>>();
static {
REQUIRED_ATTRIBUTES.add(User.USERNAME);
REQUIRED_ATTRIBUTES.add(User.FIRSTNAME);
REQUIRED_ATTRIBUTES.add(User.LASTNAME);
REQUIRED_ATTRIBUTES.add(User.EMAIL);
REQUIRED_ATTRIBUTES.add(User.WEBSITE);
}
// POST is for handling AJAX requests
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
UserRegistrationRequest request = new UserRegistrationRequest();
// Convert submitted form data into a User object
for (ModelObjectField<? super UserRegistrationRequest, ?> field : User.SCHEMA.getFieldList()) {
if (!REQUIRED_ATTRIBUTES.contains(field)) {
continue;
}
String value = ServletUtil.getRequiredParam(req, "u_" + field.getName());
if (value != null) {
Object convertedValue = ModelObjectUtil.convertString(value, field.getType());
field.setUntyped(request, convertedValue);
}
}
// Get password. No need to check confirmation, since that's checked on
// the client side.
String password = ServletUtil.getRequiredParam(req, "u_password");
request.setPasswordHash(BCrypt.hashpw(password, BCrypt.gensalt(12)));
// Generate a secret.
SHA1 computeHash = new SHA1();
computeHash.update(String.valueOf(random.nextLong()).getBytes("UTF-8"));
for (ModelObjectField<? super UserRegistrationRequest, ?> field : REQUIRED_ATTRIBUTES) {
computeHash.update(field.get(request).toString().getBytes("UTF-8"));
}
request.setSecret(new ConvertBytesToHex(computeHash.digest()).convert());
// Status is PENDING.
request.setStatus(UserRegistrationRequestStatus.PENDING);
// Attempt to insert the request in the database
OperationResult result = Database.getInstance().addUserRegistrationRequest(request);
// If request was successfully added to database, then send an email
if (result.isSuccess()) {
// send email
boolean sent = sendConfirmationEmail(req.getScheme(), req.getServerName(), request);
if (sent) {
result.setMessage("Please check your email to complete the registration.");
} else {
result = new OperationResult(false, "Could not send registration email");
}
}
resp.setStatus(HttpServletResponse.SC_OK);
resp.setContentType("application/json");
JSONValue.writeJSONString(JSONConversion.convertOperationResultToJSON(result), resp.getWriter());
}
private boolean sendConfirmationEmail(String scheme, String host, UserRegistrationRequest request) {
try {
MimeMessage message = new MimeMessage(session);
message.setFrom(new InternetAddress("support@cloudcoder.org"));
message.addRecipient(RecipientType.TO, new InternetAddress(request.getEmail()));
message.setSubject("CloudCoder exercise repository user registration");
StringBuilder body = new StringBuilder();
String confirmUrl = scheme + "://" + host + getServletContext().getContextPath() + "/confirm/" + request.getSecret();
System.out.println("Confirmation link: " + confirmUrl);
body.append("<h1>CloudCoder exercise repository user registration</h1>\n");
body.append("<p>Please visit the link below to confirm your user registration\n");
body.append("for the CloudCoder excercise repository:\n");
body.append("<blockquote><a href='");
body.append(confirmUrl);
body.append("'>");
body.append(confirmUrl);
body.append("</a></blockquote>\n");
message.setContent(body.toString(), "text/html");
Transport.send(message);
return true;
} catch (MessagingException e) {
logger.error("Could not send registration email", e);
return false;
}
}
}
|
hard-code URL of repo webserver
There will only be one deployment, so there's no point in any
fancy code to try to figure out the correct URL.
|
CloudCoderRepository/src/org/cloudcoder/repoapp/servlets/Register.java
|
hard-code URL of repo webserver
|
|
Java
|
agpl-3.0
|
3d73d7024d929128d4e58b80aaaad4a634c0ba25
| 0
|
brsimioni/rstudio,thklaus/rstudio,john-r-mcpherson/rstudio,jar1karp/rstudio,vbelakov/rstudio,jar1karp/rstudio,more1/rstudio,sfloresm/rstudio,john-r-mcpherson/rstudio,tbarrongh/rstudio,edrogers/rstudio,maligulzar/Rstudio-instrumented,suribes/rstudio,edrogers/rstudio,jzhu8803/rstudio,pssguy/rstudio,piersharding/rstudio,more1/rstudio,jrnold/rstudio,maligulzar/Rstudio-instrumented,sfloresm/rstudio,githubfun/rstudio,jzhu8803/rstudio,piersharding/rstudio,edrogers/rstudio,pssguy/rstudio,JanMarvin/rstudio,john-r-mcpherson/rstudio,edrogers/rstudio,tbarrongh/rstudio,more1/rstudio,suribes/rstudio,pssguy/rstudio,piersharding/rstudio,jar1karp/rstudio,nvoron23/rstudio,JanMarvin/rstudio,tbarrongh/rstudio,more1/rstudio,nvoron23/rstudio,thklaus/rstudio,more1/rstudio,brsimioni/rstudio,vbelakov/rstudio,jrnold/rstudio,vbelakov/rstudio,jrnold/rstudio,more1/rstudio,jzhu8803/rstudio,piersharding/rstudio,edrogers/rstudio,jzhu8803/rstudio,sfloresm/rstudio,piersharding/rstudio,edrogers/rstudio,tbarrongh/rstudio,vbelakov/rstudio,jrnold/rstudio,nvoron23/rstudio,jrnold/rstudio,brsimioni/rstudio,thklaus/rstudio,maligulzar/Rstudio-instrumented,suribes/rstudio,piersharding/rstudio,jar1karp/rstudio,john-r-mcpherson/rstudio,suribes/rstudio,suribes/rstudio,brsimioni/rstudio,piersharding/rstudio,sfloresm/rstudio,edrogers/rstudio,more1/rstudio,maligulzar/Rstudio-instrumented,vbelakov/rstudio,brsimioni/rstudio,thklaus/rstudio,githubfun/rstudio,suribes/rstudio,john-r-mcpherson/rstudio,pssguy/rstudio,githubfun/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,githubfun/rstudio,JanMarvin/rstudio,jar1karp/rstudio,maligulzar/Rstudio-instrumented,githubfun/rstudio,tbarrongh/rstudio,pssguy/rstudio,JanMarvin/rstudio,thklaus/rstudio,nvoron23/rstudio,thklaus/rstudio,JanMarvin/rstudio,jrnold/rstudio,vbelakov/rstudio,tbarrongh/rstudio,sfloresm/rstudio,pssguy/rstudio,jzhu8803/rstudio,sfloresm/rstudio,edrogers/rstudio,sfloresm/rstudio,suribes/rstudio,maligulzar/Rstudio-instrumented,jrnold/rstudio,brsimioni/rstudio,jar1karp/rstudio,jar1karp/rstudio,JanMarvin/rstudio,jar1karp/rstudio,JanMarvin/rstudio,suribes/rstudio,maligulzar/Rstudio-instrumented,nvoron23/rstudio,john-r-mcpherson/rstudio,githubfun/rstudio,thklaus/rstudio,piersharding/rstudio,thklaus/rstudio,jrnold/rstudio,pssguy/rstudio,nvoron23/rstudio,piersharding/rstudio,john-r-mcpherson/rstudio,more1/rstudio,jrnold/rstudio,nvoron23/rstudio,maligulzar/Rstudio-instrumented,vbelakov/rstudio,githubfun/rstudio,pssguy/rstudio,tbarrongh/rstudio,jzhu8803/rstudio,jzhu8803/rstudio,sfloresm/rstudio,JanMarvin/rstudio,tbarrongh/rstudio,githubfun/rstudio,brsimioni/rstudio,JanMarvin/rstudio,brsimioni/rstudio,vbelakov/rstudio,john-r-mcpherson/rstudio,jzhu8803/rstudio
|
/*
* TextEditingTargetWidget.java
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.source.editors.text;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.client.ui.*;
import org.rstudio.core.client.events.EnsureVisibleEvent;
import org.rstudio.core.client.events.EnsureVisibleHandler;
import org.rstudio.core.client.layout.RequiresVisibilityChanged;
import org.rstudio.core.client.theme.res.ThemeResources;
import org.rstudio.core.client.widget.*;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.filetypes.FileTypeRegistry;
import org.rstudio.studio.client.common.filetypes.TextFileType;
import org.rstudio.studio.client.common.icons.StandardIcons;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.prefs.model.UIPrefs;
import org.rstudio.studio.client.workbench.views.edit.ui.EditDialog;
import org.rstudio.studio.client.workbench.views.source.PanelWithToolbars;
import org.rstudio.studio.client.workbench.views.source.editors.EditingTargetToolbar;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditingTarget.Display;
import org.rstudio.studio.client.workbench.views.source.editors.text.findreplace.FindReplaceBar;
import org.rstudio.studio.client.workbench.views.source.editors.text.status.StatusBar;
import org.rstudio.studio.client.workbench.views.source.editors.text.status.StatusBarWidget;
public class TextEditingTargetWidget
extends ResizeComposite
implements Display, RequiresVisibilityChanged
{
public TextEditingTargetWidget(Commands commands,
UIPrefs uiPrefs,
FileTypeRegistry fileTypeRegistry,
DocDisplay editor,
TextFileType fileType,
EventBus events)
{
commands_ = commands;
uiPrefs_ = uiPrefs;
fileTypeRegistry_ = fileTypeRegistry;
editor_ = editor;
sourceOnSave_ = new CheckBox();
srcOnSaveLabel_ =
new CheckboxLabel(sourceOnSave_, "Source on Save").getLabel();
statusBar_ = new StatusBarWidget();
findReplace_ = new TextEditingTargetFindReplace(
new TextEditingTargetFindReplace.Container()
{
@Override
public AceEditor getEditor()
{
return (AceEditor)editor_;
}
@Override
public void insertFindReplace(FindReplaceBar findReplaceBar)
{
Widget beforeWidget = null;
if (warningBar_ != null && warningBar_.isAttached())
beforeWidget = warningBar_;
panel_.insertNorth(findReplaceBar,
findReplaceBar.getHeight(),
beforeWidget);
}
@Override
public void removeFindReplace(FindReplaceBar findReplaceBar)
{
panel_.remove(findReplaceBar);
}
});
panel_ = new PanelWithToolbars(createToolbar(fileType),
editor.asWidget(),
statusBar_);
adaptToFileType(fileType);
initWidget(panel_);
}
private StatusBarWidget statusBar_;
private Toolbar createToolbar(TextFileType fileType)
{
Toolbar toolbar = new EditingTargetToolbar(commands_);
toolbar.addLeftWidget(commands_.saveSourceDoc().createToolbarButton());
sourceOnSave_.getElement().getStyle().setMarginRight(0, Unit.PX);
toolbar.addLeftWidget(sourceOnSave_);
srcOnSaveLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
toolbar.addLeftWidget(srcOnSaveLabel_);
toolbar.addLeftSeparator();
toolbar.addLeftWidget(commands_.checkSpelling().createToolbarButton());
toolbar.addLeftWidget(findReplace_.createFindReplaceButton());
toolbar.addLeftWidget(createCodeTransformMenuButton());
texSeparatorWidget_ = toolbar.addLeftSeparator();
toolbar.addLeftWidget(texToolbarButton_ = createLatexFormatButton());
toolbar.addLeftWidget(commands_.markdownHelp().createToolbarButton());
toolbar.addLeftSeparator();
toolbar.addLeftWidget(previewHTMLButton_ = commands_.previewHTML().createToolbarButton());
toolbar.addLeftWidget(knitToHTMLButton_ = commands_.knitToHTML().createToolbarButton());
toolbar.addLeftWidget(compilePdfButton_ = commands_.compilePDF().createToolbarButton());
toolbar.addLeftSeparator();
toolbar.addLeftWidget(commands_.synctexSearch().createToolbarButton());
toolbar.addRightWidget(runButton_ = commands_.executeCode().createToolbarButton());
toolbar.addRightSeparator();
toolbar.addRightWidget(commands_.executeLastCode().createToolbarButton());
toolbar.addRightSeparator();
final String SOURCE_BUTTON_TITLE = "Source the active document";
sourceButton_ = new ToolbarButton(
"Source",
commands_.sourceActiveDocument().getImageResource(),
new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
if (uiPrefs_.sourceWithEcho().getValue())
commands_.sourceActiveDocumentWithEcho().execute();
else
commands_.sourceActiveDocument().execute();
}
});
sourceButton_.setTitle(SOURCE_BUTTON_TITLE);
toolbar.addRightWidget(sourceButton_);
uiPrefs_.sourceWithEcho().addValueChangeHandler(
new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event)
{
if (event.getValue())
sourceButton_.setTitle(SOURCE_BUTTON_TITLE + " (with echo)");
else
sourceButton_.setTitle(SOURCE_BUTTON_TITLE);
}
});
ToolbarPopupMenu sourceMenu = new ToolbarPopupMenu();
sourceMenu.addItem(commands_.sourceActiveDocument().createMenuItem(false));
sourceMenu.addItem(commands_.sourceActiveDocumentWithEcho().createMenuItem(false));
sourceMenuButton_ = new ToolbarButton(sourceMenu, true);
toolbar.addRightWidget(sourceMenuButton_);
toolbar.addRightSeparator();
toolbar.addRightWidget(commands_.compileNotebook().createToolbarButton());
ToolbarPopupMenu chunksMenu = new ToolbarPopupMenu();
chunksMenu.addItem(commands_.insertChunk().createMenuItem(false));
chunksMenu.addSeparator();
chunksMenu.addItem(commands_.jumpTo().createMenuItem(false));
chunksMenu.addSeparator();
chunksMenu.addItem(commands_.executeCurrentChunk().createMenuItem(false));
chunksMenu.addItem(commands_.executeNextChunk().createMenuItem(false));
chunksMenu.addSeparator();
chunksMenu.addItem(commands_.executeAllCode().createMenuItem(false));
chunksButton_ = new ToolbarButton(
"Chunks",
StandardIcons.INSTANCE.chunk_menu(),
chunksMenu,
true);
toolbar.addRightWidget(chunksButton_);
return toolbar;
}
private ToolbarButton createLatexFormatButton()
{
ToolbarPopupMenu texMenu = new TextEditingTargetLatexFormatMenu(editor_,
uiPrefs_);
ToolbarButton texButton = new ToolbarButton(
"Format",
fileTypeRegistry_.getIconForFilename("foo.tex"),
texMenu,
false);
return texButton;
}
private Widget createCodeTransformMenuButton()
{
if (codeTransform_ == null)
{
ImageResource icon = ThemeResources.INSTANCE.codeTransform();
ToolbarPopupMenu menu = new ToolbarPopupMenu();
menu.addItem(commands_.codeCompletion().createMenuItem(false));
menu.addSeparator();
menu.addItem(commands_.goToHelp().createMenuItem(false));
menu.addItem(commands_.goToFunctionDefinition().createMenuItem(false));
menu.addSeparator();
menu.addItem(commands_.extractFunction().createMenuItem(false));
menu.addItem(commands_.reindent().createMenuItem(false));
menu.addItem(commands_.reflowComment().createMenuItem(false));
menu.addItem(commands_.commentUncomment().createMenuItem(false));
codeTransform_ = new ToolbarButton("", icon, menu);
codeTransform_.setTitle("Code Tools");
}
return codeTransform_;
}
public void adaptToFileType(TextFileType fileType)
{
editor_.setFileType(fileType);
boolean canCompilePdf = fileType.canCompilePDF();
boolean canExecuteCode = fileType.canExecuteCode();
boolean canExecuteChunks = fileType.canExecuteChunks();
sourceOnSave_.setVisible(fileType.canSourceOnSave());
srcOnSaveLabel_.setVisible(fileType.canSourceOnSave());
if (fileType.isRd())
srcOnSaveLabel_.setText("Preview on Save");
else
srcOnSaveLabel_.setText("Source on Save");
codeTransform_.setVisible(
(canExecuteCode && !fileType.canAuthorContent()) ||
fileType.isCpp());
sourceButton_.setVisible(canExecuteCode && !canExecuteChunks);
sourceMenuButton_.setVisible(canExecuteCode && !canExecuteChunks);
texSeparatorWidget_.setVisible(canCompilePdf);
texToolbarButton_.setVisible(canCompilePdf);
compilePdfButton_.setVisible(canCompilePdf);
chunksButton_.setVisible(canExecuteChunks);
}
public HasValue<Boolean> getSourceOnSave()
{
return sourceOnSave_;
}
public void ensureVisible()
{
fireEvent(new EnsureVisibleEvent());
}
@Override
public void onResize()
{
super.onResize();
manageToolbarSizes();
}
private void manageToolbarSizes()
{
// sometimes width is passed in as 0 (not sure why)
int width = getOffsetWidth();
if (width == 0)
return;
texToolbarButton_.setText(width < 520 ? "" : "Format");
runButton_.setText(width < 480 ? "" : "Run");
compilePdfButton_.setText(width < 450 ? "" : "Compile PDF");
previewHTMLButton_.setText(width < 450 ? "" : "Preview HTML");
knitToHTMLButton_.setText(width < 450 ? "" : "Knit HTML");
if (editor_.getFileType().isRd())
srcOnSaveLabel_.setText(width < 450 ? "Preview" : "Preview on Save");
else
srcOnSaveLabel_.setText(width < 450 ? "Source" : "Source on Save");
sourceButton_.setText(width < 400 ? "" : "Source");
chunksButton_.setText(width < 400 ? "" : "Chunks");
}
public void showWarningBar(String warning)
{
if (warningBar_ == null)
{
warningBar_ = new InfoBar(InfoBar.WARNING, new ClickHandler() {
@Override
public void onClick(ClickEvent event)
{
hideWarningBar();
}
});
}
warningBar_.setText(warning);
panel_.insertNorth(warningBar_, warningBar_.getHeight(), null);
}
public void hideWarningBar()
{
if (warningBar_ != null)
{
panel_.remove(warningBar_);
}
}
public void showFindReplace(boolean defaultForward)
{
findReplace_.showFindReplace(defaultForward);
}
@Override
public void findNext()
{
findReplace_.findNext();
}
@Override
public void findPrevious()
{
findReplace_.findPrevious();
}
@Override
public void replaceAndFind()
{
findReplace_.replaceAndFind();
}
public void onActivate()
{
editor_.onActivate();
Scheduler.get().scheduleDeferred(new ScheduledCommand() {
@Override
public void execute()
{
manageToolbarSizes();
}
});
}
public void setFontSize(double size)
{
editor_.setFontSize(size);
}
public StatusBar getStatusBar()
{
return statusBar_;
}
@Override
public void debug_dumpContents()
{
String dump = editor_.debug_getDocumentDump();
new EditDialog(dump, false, false, new ProgressOperationWithInput<String>()
{
@Override
public void execute(String input, ProgressIndicator indicator)
{
indicator.onCompleted();
}
}).showModal();
}
@Override
public void debug_importDump()
{
new EditDialog("", false, false, new ProgressOperationWithInput<String>()
{
@Override
public void execute(String input, ProgressIndicator indicator)
{
indicator.onCompleted();
if (input == null)
return;
input = input.replaceAll("[ \\r\\n]+", " ");
String[] chars = input.split(" ");
StringBuilder sb = new StringBuilder();
for (String s : chars)
{
if (s.equals("."))
sb.append('\n');
else
sb.append((char)Integer.parseInt(s));
}
editor_.debug_setSessionValueDirectly(sb.toString());
}
}).showModal();
}
public HandlerRegistration addEnsureVisibleHandler(EnsureVisibleHandler handler)
{
return addHandler(handler, EnsureVisibleEvent.TYPE);
}
public void onVisibilityChanged(boolean visible)
{
editor_.onVisibilityChanged(visible);
}
private final Commands commands_;
private final UIPrefs uiPrefs_;
private final FileTypeRegistry fileTypeRegistry_;
private final DocDisplay editor_;
private CheckBox sourceOnSave_;
private PanelWithToolbars panel_;
private InfoBar warningBar_;
private final TextEditingTargetFindReplace findReplace_;
private ToolbarButton codeTransform_;
private ToolbarButton compilePdfButton_;
private ToolbarButton previewHTMLButton_;
private ToolbarButton knitToHTMLButton_;
private ToolbarButton runButton_;
private ToolbarButton sourceButton_;
private ToolbarButton sourceMenuButton_;
private ToolbarButton chunksButton_;
private Widget texSeparatorWidget_;
private ToolbarButton texToolbarButton_;
private Label srcOnSaveLabel_;
}
|
src/gwt/src/org/rstudio/studio/client/workbench/views/source/editors/text/TextEditingTargetWidget.java
|
/*
* TextEditingTargetWidget.java
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.source.editors.text;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.client.ui.*;
import org.rstudio.core.client.events.EnsureVisibleEvent;
import org.rstudio.core.client.events.EnsureVisibleHandler;
import org.rstudio.core.client.layout.RequiresVisibilityChanged;
import org.rstudio.core.client.theme.res.ThemeResources;
import org.rstudio.core.client.widget.*;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.filetypes.FileTypeRegistry;
import org.rstudio.studio.client.common.filetypes.TextFileType;
import org.rstudio.studio.client.common.icons.StandardIcons;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.prefs.model.UIPrefs;
import org.rstudio.studio.client.workbench.views.edit.ui.EditDialog;
import org.rstudio.studio.client.workbench.views.source.PanelWithToolbars;
import org.rstudio.studio.client.workbench.views.source.editors.EditingTargetToolbar;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditingTarget.Display;
import org.rstudio.studio.client.workbench.views.source.editors.text.findreplace.FindReplaceBar;
import org.rstudio.studio.client.workbench.views.source.editors.text.status.StatusBar;
import org.rstudio.studio.client.workbench.views.source.editors.text.status.StatusBarWidget;
public class TextEditingTargetWidget
extends ResizeComposite
implements Display, RequiresVisibilityChanged
{
public TextEditingTargetWidget(Commands commands,
UIPrefs uiPrefs,
FileTypeRegistry fileTypeRegistry,
DocDisplay editor,
TextFileType fileType,
EventBus events)
{
commands_ = commands;
uiPrefs_ = uiPrefs;
fileTypeRegistry_ = fileTypeRegistry;
editor_ = editor;
sourceOnSave_ = new CheckBox();
srcOnSaveLabel_ =
new CheckboxLabel(sourceOnSave_, "Source on Save").getLabel();
statusBar_ = new StatusBarWidget();
findReplace_ = new TextEditingTargetFindReplace(
new TextEditingTargetFindReplace.Container()
{
@Override
public AceEditor getEditor()
{
return (AceEditor)editor_;
}
@Override
public void insertFindReplace(FindReplaceBar findReplaceBar)
{
Widget beforeWidget = null;
if (warningBar_ != null && warningBar_.isAttached())
beforeWidget = warningBar_;
panel_.insertNorth(findReplaceBar,
findReplaceBar.getHeight(),
beforeWidget);
}
@Override
public void removeFindReplace(FindReplaceBar findReplaceBar)
{
panel_.remove(findReplaceBar);
}
});
panel_ = new PanelWithToolbars(createToolbar(fileType),
editor.asWidget(),
statusBar_);
adaptToFileType(fileType);
initWidget(panel_);
}
private StatusBarWidget statusBar_;
private Toolbar createToolbar(TextFileType fileType)
{
// exclude back and forward buttons for authoring file types
// (because they have too many other commands)
Toolbar toolbar = fileType.canAuthorContent() ?
new Toolbar() :
new EditingTargetToolbar(commands_);
toolbar.addLeftWidget(commands_.saveSourceDoc().createToolbarButton());
sourceOnSave_.getElement().getStyle().setMarginRight(0, Unit.PX);
toolbar.addLeftWidget(sourceOnSave_);
srcOnSaveLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
toolbar.addLeftWidget(srcOnSaveLabel_);
toolbar.addLeftSeparator();
toolbar.addLeftWidget(commands_.checkSpelling().createToolbarButton());
toolbar.addLeftWidget(findReplace_.createFindReplaceButton());
toolbar.addLeftWidget(createCodeTransformMenuButton());
texSeparatorWidget_ = toolbar.addLeftSeparator();
toolbar.addLeftWidget(texToolbarButton_ = createLatexFormatButton());
toolbar.addLeftWidget(commands_.markdownHelp().createToolbarButton());
toolbar.addLeftSeparator();
toolbar.addLeftWidget(commands_.previewHTML().createToolbarButton());
toolbar.addLeftWidget(commands_.knitToHTML().createToolbarButton());
toolbar.addLeftWidget(compilePdfButton_ = commands_.compilePDF().createToolbarButton());
toolbar.addLeftSeparator();
toolbar.addLeftWidget(commands_.synctexSearch().createToolbarButton());
toolbar.addRightWidget(runButton_ = commands_.executeCode().createToolbarButton());
toolbar.addRightSeparator();
toolbar.addRightWidget(commands_.executeLastCode().createToolbarButton());
toolbar.addRightSeparator();
final String SOURCE_BUTTON_TITLE = "Source the active document";
sourceButton_ = new ToolbarButton(
"Source",
commands_.sourceActiveDocument().getImageResource(),
new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
if (uiPrefs_.sourceWithEcho().getValue())
commands_.sourceActiveDocumentWithEcho().execute();
else
commands_.sourceActiveDocument().execute();
}
});
sourceButton_.setTitle(SOURCE_BUTTON_TITLE);
toolbar.addRightWidget(sourceButton_);
uiPrefs_.sourceWithEcho().addValueChangeHandler(
new ValueChangeHandler<Boolean>() {
@Override
public void onValueChange(ValueChangeEvent<Boolean> event)
{
if (event.getValue())
sourceButton_.setTitle(SOURCE_BUTTON_TITLE + " (with echo)");
else
sourceButton_.setTitle(SOURCE_BUTTON_TITLE);
}
});
ToolbarPopupMenu sourceMenu = new ToolbarPopupMenu();
sourceMenu.addItem(commands_.sourceActiveDocument().createMenuItem(false));
sourceMenu.addItem(commands_.sourceActiveDocumentWithEcho().createMenuItem(false));
sourceMenuButton_ = new ToolbarButton(sourceMenu, true);
toolbar.addRightWidget(sourceMenuButton_);
toolbar.addRightSeparator();
toolbar.addRightWidget(commands_.compileNotebook().createToolbarButton());
ToolbarPopupMenu chunksMenu = new ToolbarPopupMenu();
chunksMenu.addItem(commands_.insertChunk().createMenuItem(false));
chunksMenu.addSeparator();
chunksMenu.addItem(commands_.jumpTo().createMenuItem(false));
chunksMenu.addSeparator();
chunksMenu.addItem(commands_.executeCurrentChunk().createMenuItem(false));
chunksMenu.addItem(commands_.executeNextChunk().createMenuItem(false));
chunksMenu.addSeparator();
chunksMenu.addItem(commands_.executeAllCode().createMenuItem(false));
chunksButton_ = new ToolbarButton(
"Chunks",
StandardIcons.INSTANCE.chunk_menu(),
chunksMenu,
true);
toolbar.addRightWidget(chunksButton_);
return toolbar;
}
private ToolbarButton createLatexFormatButton()
{
ToolbarPopupMenu texMenu = new TextEditingTargetLatexFormatMenu(editor_,
uiPrefs_);
ToolbarButton texButton = new ToolbarButton(
"Format",
fileTypeRegistry_.getIconForFilename("foo.tex"),
texMenu,
false);
return texButton;
}
private Widget createCodeTransformMenuButton()
{
if (codeTransform_ == null)
{
ImageResource icon = ThemeResources.INSTANCE.codeTransform();
ToolbarPopupMenu menu = new ToolbarPopupMenu();
menu.addItem(commands_.codeCompletion().createMenuItem(false));
menu.addSeparator();
menu.addItem(commands_.goToHelp().createMenuItem(false));
menu.addItem(commands_.goToFunctionDefinition().createMenuItem(false));
menu.addSeparator();
menu.addItem(commands_.extractFunction().createMenuItem(false));
menu.addItem(commands_.reindent().createMenuItem(false));
menu.addItem(commands_.reflowComment().createMenuItem(false));
menu.addItem(commands_.commentUncomment().createMenuItem(false));
codeTransform_ = new ToolbarButton("", icon, menu);
codeTransform_.setTitle("Code Tools");
}
return codeTransform_;
}
public void adaptToFileType(TextFileType fileType)
{
editor_.setFileType(fileType);
boolean canCompilePdf = fileType.canCompilePDF();
boolean canExecuteCode = fileType.canExecuteCode();
boolean canExecuteChunks = fileType.canExecuteChunks();
sourceOnSave_.setVisible(fileType.canSourceOnSave());
srcOnSaveLabel_.setVisible(fileType.canSourceOnSave());
if (fileType.isRd())
srcOnSaveLabel_.setText("Preview on Save");
else
srcOnSaveLabel_.setText("Source on Save");
codeTransform_.setVisible(
(canExecuteCode && !fileType.canAuthorContent()) ||
fileType.isCpp());
sourceButton_.setVisible(canExecuteCode && !canExecuteChunks);
sourceMenuButton_.setVisible(canExecuteCode && !canExecuteChunks);
texSeparatorWidget_.setVisible(canCompilePdf);
texToolbarButton_.setVisible(canCompilePdf);
compilePdfButton_.setVisible(canCompilePdf);
chunksButton_.setVisible(canExecuteChunks);
}
public HasValue<Boolean> getSourceOnSave()
{
return sourceOnSave_;
}
public void ensureVisible()
{
fireEvent(new EnsureVisibleEvent());
}
@Override
public void onResize()
{
super.onResize();
// sometimes width is passed in as 0 (not sure why)
int width = getOffsetWidth();
if (width == 0)
return;
texToolbarButton_.setText(width < 520 ? "" : "Format");
runButton_.setText(width < 480 ? "" : "Run");
compilePdfButton_.setText(width < 450 ? "" : "Compile PDF");
if (editor_.getFileType().isRd())
srcOnSaveLabel_.setText(width < 450 ? "Preview" : "Preview on Save");
else
srcOnSaveLabel_.setText(width < 450 ? "Source" : "Source on Save");
sourceButton_.setText(width < 400 ? "" : "Source");
}
public void showWarningBar(String warning)
{
if (warningBar_ == null)
{
warningBar_ = new InfoBar(InfoBar.WARNING, new ClickHandler() {
@Override
public void onClick(ClickEvent event)
{
hideWarningBar();
}
});
}
warningBar_.setText(warning);
panel_.insertNorth(warningBar_, warningBar_.getHeight(), null);
}
public void hideWarningBar()
{
if (warningBar_ != null)
{
panel_.remove(warningBar_);
}
}
public void showFindReplace(boolean defaultForward)
{
findReplace_.showFindReplace(defaultForward);
}
@Override
public void findNext()
{
findReplace_.findNext();
}
@Override
public void findPrevious()
{
findReplace_.findPrevious();
}
@Override
public void replaceAndFind()
{
findReplace_.replaceAndFind();
}
public void onActivate()
{
editor_.onActivate();
}
public void setFontSize(double size)
{
editor_.setFontSize(size);
}
public StatusBar getStatusBar()
{
return statusBar_;
}
@Override
public void debug_dumpContents()
{
String dump = editor_.debug_getDocumentDump();
new EditDialog(dump, false, false, new ProgressOperationWithInput<String>()
{
@Override
public void execute(String input, ProgressIndicator indicator)
{
indicator.onCompleted();
}
}).showModal();
}
@Override
public void debug_importDump()
{
new EditDialog("", false, false, new ProgressOperationWithInput<String>()
{
@Override
public void execute(String input, ProgressIndicator indicator)
{
indicator.onCompleted();
if (input == null)
return;
input = input.replaceAll("[ \\r\\n]+", " ");
String[] chars = input.split(" ");
StringBuilder sb = new StringBuilder();
for (String s : chars)
{
if (s.equals("."))
sb.append('\n');
else
sb.append((char)Integer.parseInt(s));
}
editor_.debug_setSessionValueDirectly(sb.toString());
}
}).showModal();
}
public HandlerRegistration addEnsureVisibleHandler(EnsureVisibleHandler handler)
{
return addHandler(handler, EnsureVisibleEvent.TYPE);
}
public void onVisibilityChanged(boolean visible)
{
editor_.onVisibilityChanged(visible);
}
private final Commands commands_;
private final UIPrefs uiPrefs_;
private final FileTypeRegistry fileTypeRegistry_;
private final DocDisplay editor_;
private CheckBox sourceOnSave_;
private PanelWithToolbars panel_;
private InfoBar warningBar_;
private final TextEditingTargetFindReplace findReplace_;
private ToolbarButton codeTransform_;
private ToolbarButton compilePdfButton_;
private ToolbarButton runButton_;
private ToolbarButton sourceButton_;
private ToolbarButton sourceMenuButton_;
private ToolbarButton chunksButton_;
private Widget texSeparatorWidget_;
private ToolbarButton texToolbarButton_;
private Label srcOnSaveLabel_;
}
|
always show forward and back buttons in source editor
|
src/gwt/src/org/rstudio/studio/client/workbench/views/source/editors/text/TextEditingTargetWidget.java
|
always show forward and back buttons in source editor
|
|
Java
|
agpl-3.0
|
c6fb92fe328daf7169da6d3d471738bfdaf51c20
| 0
|
ow2-proactive/scheduling,mbenguig/scheduling,sgRomaric/scheduling,jrochas/scheduling,sgRomaric/scheduling,youribonnaffe/scheduling,ShatalovYaroslav/scheduling,zeineb/scheduling,jrochas/scheduling,ow2-proactive/scheduling,mbenguig/scheduling,marcocast/scheduling,ShatalovYaroslav/scheduling,lpellegr/scheduling,laurianed/scheduling,zeineb/scheduling,yinan-liu/scheduling,lpellegr/scheduling,zeineb/scheduling,sgRomaric/scheduling,zeineb/scheduling,sandrineBeauche/scheduling,marcocast/scheduling,laurianed/scheduling,yinan-liu/scheduling,lpellegr/scheduling,sandrineBeauche/scheduling,sgRomaric/scheduling,mbenguig/scheduling,ow2-proactive/scheduling,sandrineBeauche/scheduling,fviale/scheduling,marcocast/scheduling,tobwiens/scheduling,tobwiens/scheduling,yinan-liu/scheduling,paraita/scheduling,tobwiens/scheduling,mbenguig/scheduling,sandrineBeauche/scheduling,jrochas/scheduling,paraita/scheduling,ShatalovYaroslav/scheduling,sgRomaric/scheduling,paraita/scheduling,ShatalovYaroslav/scheduling,ShatalovYaroslav/scheduling,paraita/scheduling,lpellegr/scheduling,zeineb/scheduling,sandrineBeauche/scheduling,lpellegr/scheduling,paraita/scheduling,fviale/scheduling,youribonnaffe/scheduling,lpellegr/scheduling,youribonnaffe/scheduling,fviale/scheduling,marcocast/scheduling,marcocast/scheduling,mbenguig/scheduling,sandrineBeauche/scheduling,paraita/scheduling,ow2-proactive/scheduling,jrochas/scheduling,ShatalovYaroslav/scheduling,lpellegr/scheduling,tobwiens/scheduling,tobwiens/scheduling,zeineb/scheduling,laurianed/scheduling,jrochas/scheduling,laurianed/scheduling,yinan-liu/scheduling,laurianed/scheduling,marcocast/scheduling,ow2-proactive/scheduling,ow2-proactive/scheduling,fviale/scheduling,sgRomaric/scheduling,fviale/scheduling,youribonnaffe/scheduling,laurianed/scheduling,sandrineBeauche/scheduling,jrochas/scheduling,zeineb/scheduling,sgRomaric/scheduling,jrochas/scheduling,youribonnaffe/scheduling,yinan-liu/scheduling,ow2-proactive/scheduling,tobwiens/scheduling,paraita/scheduling,laurianed/scheduling,mbenguig/scheduling,yinan-liu/scheduling,yinan-liu/scheduling,mbenguig/scheduling,ShatalovYaroslav/scheduling,fviale/scheduling,fviale/scheduling,tobwiens/scheduling,youribonnaffe/scheduling,youribonnaffe/scheduling,marcocast/scheduling
|
/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2010 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2
* or a different license than the GPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.ow2.proactive.scheduler.job;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
import org.ow2.proactive.scheduler.common.job.JobDescriptor;
import org.ow2.proactive.scheduler.common.job.JobId;
import org.ow2.proactive.scheduler.common.job.JobPriority;
import org.ow2.proactive.scheduler.common.job.JobType;
import org.ow2.proactive.scheduler.common.task.EligibleTaskDescriptor;
import org.ow2.proactive.scheduler.common.task.TaskDescriptor;
import org.ow2.proactive.scheduler.common.task.TaskId;
import org.ow2.proactive.scheduler.common.task.TaskStatus;
import org.ow2.proactive.scheduler.task.EligibleTaskDescriptorImpl;
import org.ow2.proactive.scheduler.task.internal.InternalTask;
import org.ow2.proactive.scheduler.util.SchedulerDevLoggers;
/**
* This class represents a job for the policy.
* The internal scheduler job is not sent to the policy.
* Only a restricted number of properties on each jobs is sent to the policy.
*
* @author The ProActive Team
* @since ProActive Scheduling 0.9.1
*/
public class JobDescriptorImpl implements JobDescriptor {
public static final Logger logger_dev = ProActiveLogger.getLogger(SchedulerDevLoggers.CORE);
/** Job id */
private JobId id;
/** Job priority */
private JobPriority priority;
/** Job type */
private JobType type;
/** Total number of tasks. */
private int numberOfTasks;
/** Project name for this job */
protected String projectName = "";
/** Job user informations */
private Map<String, String> genericInformations;
/** List that knows which task has children and which have not */
private Set<TaskId> hasChildren = new HashSet<TaskId>();
/** Job tasks to be able to be schedule */
private Map<TaskId, EligibleTaskDescriptor> eligibleTasks = new ConcurrentHashMap<TaskId, EligibleTaskDescriptor>();
/** Job running tasks */
private Map<TaskId, TaskDescriptor> runningTasks = new ConcurrentHashMap<TaskId, TaskDescriptor>();
/** Job paused tasks */
private Map<TaskId, TaskDescriptor> pausedTasks = new HashMap<TaskId, TaskDescriptor>();
/**
* Create a new instance of job descriptor using an internal job.
* Just make a mapping between some fields of the two type of job in order to
* give it to the policy.
* It ensures that the policy won't have bad activities on the real internal job.
*
* @param job the internal job to be lighted.
*/
public JobDescriptorImpl(InternalJob job) {
logger_dev.debug("job = " + job.getId());
id = job.getId();
priority = job.getPriority();
type = job.getType();
numberOfTasks = job.getTasks().size();
genericInformations = job.getGenericInformations();
projectName = job.getProjectName();
if (type == JobType.TASKSFLOW) {
//build dependence tree
makeTree(job);
} else {
//every tasks are eligible
for (InternalTask td : job.getITasks()) {
if (td.getStatus() == TaskStatus.SUBMITTED) {
eligibleTasks.put(td.getId(), new EligibleTaskDescriptorImpl(td));
}
}
}
}
/**
* Make a dependences tree of the job's tasks according to the dependence list
* stored in taskDescriptor.
* This list represents the ordered TaskDescriptor list of its parent tasks.
*/
private void makeTree(InternalJob job) {
logger_dev.debug("job = " + job.getId());
Map<InternalTask, TaskDescriptor> mem = new HashMap<InternalTask, TaskDescriptor>();
//create task descriptor list
for (InternalTask td : job.getITasks()) {
//if this task is a first task, put it in eligible tasks list
EligibleTaskDescriptor lt = new EligibleTaskDescriptorImpl(td);
if (!td.hasDependences()) {
eligibleTasks.put(td.getId(), lt);
}
mem.put(td, lt);
}
//now for each taskDescriptor, set the parents and children list
for (InternalTask td : job.getITasks()) {
if (td.getDependences() != null) {
TaskDescriptor taskDescriptor = mem.get(td);
for (InternalTask depends : td.getIDependences()) {
((EligibleTaskDescriptorImpl) taskDescriptor).addParent(mem.get(depends));
}
for (TaskDescriptor lt : taskDescriptor.getParents()) {
((EligibleTaskDescriptorImpl) lt).addChild(taskDescriptor);
hasChildren.add(lt.getId());
}
}
}
}
/**
* Return true if the task represented by the given taskId has children, false if not.
*
* @param taskId the id representing the real task.
* @return true if the task represented by the given taskId has children, false if not.
*/
public boolean hasChildren(TaskId taskId) {
return hasChildren.contains(taskId);
}
/**
* Delete this task from eligible task view and add it to running view.
* Visibility is package because user cannot use this method.
*
* @param taskId the task that has just been started.
*/
public void start(TaskId taskId) {
runningTasks.put(taskId, eligibleTasks.remove(taskId));
}
/**
* Delete this task from running task view and add it to eligible view.
* Visibility is package because user cannot use this method.
*
* @param taskId the task that has just been started.
*/
public void reStart(TaskId taskId) {
eligibleTasks.put(taskId, (EligibleTaskDescriptor) runningTasks.remove(taskId));
}
/**
* Update the eligible list of task and dependencies if necessary.
* This function considered that the taskId is in eligible task list.
* Visibility is package because user cannot use this method.
*
* @param taskId the task to remove from running task.
*/
public void terminate(TaskId taskId) {
logger_dev.debug("task = " + taskId);
if (type == JobType.TASKSFLOW) {
TaskDescriptor lt = runningTasks.get(taskId);
if (lt != null) {
for (TaskDescriptor task : lt.getChildren()) {
((EligibleTaskDescriptorImpl) task).setCount(((EligibleTaskDescriptorImpl) task)
.getCount() - 1);
if (((EligibleTaskDescriptorImpl) task).getCount() == 0) {
eligibleTasks.put(task.getId(), (EligibleTaskDescriptor) task);
}
}
for (TaskDescriptor task : lt.getParents()) {
((EligibleTaskDescriptorImpl) task).setChildrenCount(task.getChildrenCount() - 1);
}
}
}
runningTasks.remove(taskId);
}
/**
* Failed this job descriptor by removing every tasks from eligible and running list.
* This function considered that the taskIds are in eligible tasks list.
* Visibility is package because user cannot use this method.
*/
public void failed() {
eligibleTasks.clear();
runningTasks.clear();
}
/**
* Update the list of eligible tasks according to the status of each task.
* This method is called only if user pause a job.
*
* @param taskStatus the taskId with their current status.
*/
public void update(Map<TaskId, TaskStatus> taskStatus) {
logger_dev.debug(" ");
for (Entry<TaskId, TaskStatus> tid : taskStatus.entrySet()) {
if (tid.getValue() == TaskStatus.PAUSED) {
TaskDescriptor lt = eligibleTasks.get(tid.getKey());
if (lt != null) {
pausedTasks.put(tid.getKey(), eligibleTasks.remove(tid.getKey()));
}
} else if ((tid.getValue() == TaskStatus.PENDING) || (tid.getValue() == TaskStatus.SUBMITTED)) {
EligibleTaskDescriptor lt = (EligibleTaskDescriptor) pausedTasks.get(tid.getKey());
if (lt != null) {
eligibleTasks.put(tid.getKey(), lt);
pausedTasks.remove(tid.getKey());
}
}
}
}
/**
* Get a task descriptor that is in the running task.
*
* @param id the id of the task descriptor to retrieve.
* @return the task descriptor associated to this id, or null if not running.
*/
public TaskDescriptor GetRunningTaskDescriptor(TaskId id) {
return runningTasks.get(id);
}
/**
* Set the priority of this job descriptor.
*
* @param priority the new priority.
*/
public void setPriority(JobPriority priority) {
this.priority = priority;
}
/**
* To get the id
*
* @return the id
*/
public JobId getId() {
return id;
}
/**
* To get the priority
*
* @return the priority
*/
public JobPriority getPriority() {
return priority;
}
/**
* To get the tasks.
*
* @return the tasks.
*/
public Collection<EligibleTaskDescriptor> getEligibleTasks() {
return new Vector<EligibleTaskDescriptor>(eligibleTasks.values());
}
/**
* To get the type
*
* @return the type
*/
public JobType getType() {
return type;
}
/**
* Returns the number Of Tasks.
*
* @return the number Of Tasks.
*/
public int getNumberOfTasks() {
return numberOfTasks;
}
/**
* Return the generic informations has a Map.
*
* @return the generic informations has a Map.
*/
public Map<String, String> getGenericInformations() {
return genericInformations;
}
/**
* @see java.lang.Comparable#compareTo(java.lang.Object)
* @param job the job to be compared.
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
public int compareTo(JobDescriptor job) {
return job.getPriority().compareTo(priority);
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "JobDescriptor(" + getId() + ")";
}
/**
* Returns the projectName.
* @return the projectName.
*/
public String getProjectName() {
return projectName;
}
}
|
src/scheduler/src/org/ow2/proactive/scheduler/job/JobDescriptorImpl.java
|
/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2010 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2
* or a different license than the GPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.ow2.proactive.scheduler.job;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import java.util.Map.Entry;
import org.apache.log4j.Logger;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
import org.ow2.proactive.scheduler.common.job.JobDescriptor;
import org.ow2.proactive.scheduler.common.job.JobId;
import org.ow2.proactive.scheduler.common.job.JobPriority;
import org.ow2.proactive.scheduler.common.job.JobType;
import org.ow2.proactive.scheduler.common.task.EligibleTaskDescriptor;
import org.ow2.proactive.scheduler.common.task.TaskDescriptor;
import org.ow2.proactive.scheduler.common.task.TaskId;
import org.ow2.proactive.scheduler.common.task.TaskStatus;
import org.ow2.proactive.scheduler.task.EligibleTaskDescriptorImpl;
import org.ow2.proactive.scheduler.task.internal.InternalTask;
import org.ow2.proactive.scheduler.util.SchedulerDevLoggers;
/**
* This class represents a job for the policy.
* The internal scheduler job is not sent to the policy.
* Only a restricted number of properties on each jobs is sent to the policy.
*
* @author The ProActive Team
* @since ProActive Scheduling 0.9.1
*/
public class JobDescriptorImpl implements JobDescriptor {
public static final Logger logger_dev = ProActiveLogger.getLogger(SchedulerDevLoggers.CORE);
/** Job id */
private JobId id;
/** Job priority */
private JobPriority priority;
/** Job type */
private JobType type;
/** Total number of tasks. */
private int numberOfTasks;
/** Project name for this job */
protected String projectName = "";
/** Job user informations */
private Map<String, String> genericInformations;
/** List that knows which task has children and which have not */
private Set<TaskId> hasChildren = new HashSet<TaskId>();
/** Job tasks to be able to be schedule */
private Map<TaskId, EligibleTaskDescriptor> eligibleTasks = new HashMap<TaskId, EligibleTaskDescriptor>();
/** Job running tasks */
private Map<TaskId, TaskDescriptor> runningTasks = new HashMap<TaskId, TaskDescriptor>();
/** Job paused tasks */
private Map<TaskId, TaskDescriptor> pausedTasks = new HashMap<TaskId, TaskDescriptor>();
/**
* Create a new instance of job descriptor using an internal job.
* Just make a mapping between some fields of the two type of job in order to
* give it to the policy.
* It ensures that the policy won't have bad activities on the real internal job.
*
* @param job the internal job to be lighted.
*/
public JobDescriptorImpl(InternalJob job) {
logger_dev.debug("job = " + job.getId());
id = job.getId();
priority = job.getPriority();
type = job.getType();
numberOfTasks = job.getTasks().size();
genericInformations = job.getGenericInformations();
projectName = job.getProjectName();
if (type == JobType.TASKSFLOW) {
//build dependence tree
makeTree(job);
} else {
//every tasks are eligible
for (InternalTask td : job.getITasks()) {
if (td.getStatus() == TaskStatus.SUBMITTED) {
eligibleTasks.put(td.getId(), new EligibleTaskDescriptorImpl(td));
}
}
}
}
/**
* Make a dependences tree of the job's tasks according to the dependence list
* stored in taskDescriptor.
* This list represents the ordered TaskDescriptor list of its parent tasks.
*/
private void makeTree(InternalJob job) {
logger_dev.debug("job = " + job.getId());
Map<InternalTask, TaskDescriptor> mem = new HashMap<InternalTask, TaskDescriptor>();
//create task descriptor list
for (InternalTask td : job.getITasks()) {
//if this task is a first task, put it in eligible tasks list
EligibleTaskDescriptor lt = new EligibleTaskDescriptorImpl(td);
if (!td.hasDependences()) {
eligibleTasks.put(td.getId(), lt);
}
mem.put(td, lt);
}
//now for each taskDescriptor, set the parents and children list
for (InternalTask td : job.getITasks()) {
if (td.getDependences() != null) {
TaskDescriptor taskDescriptor = mem.get(td);
for (InternalTask depends : td.getIDependences()) {
((EligibleTaskDescriptorImpl) taskDescriptor).addParent(mem.get(depends));
}
for (TaskDescriptor lt : taskDescriptor.getParents()) {
((EligibleTaskDescriptorImpl) lt).addChild(taskDescriptor);
hasChildren.add(lt.getId());
}
}
}
}
/**
* Return true if the task represented by the given taskId has children, false if not.
*
* @param taskId the id representing the real task.
* @return true if the task represented by the given taskId has children, false if not.
*/
public boolean hasChildren(TaskId taskId) {
return hasChildren.contains(taskId);
}
/**
* Delete this task from eligible task view and add it to running view.
* Visibility is package because user cannot use this method.
*
* @param taskId the task that has just been started.
*/
public void start(TaskId taskId) {
runningTasks.put(taskId, eligibleTasks.remove(taskId));
}
/**
* Delete this task from running task view and add it to eligible view.
* Visibility is package because user cannot use this method.
*
* @param taskId the task that has just been started.
*/
public void reStart(TaskId taskId) {
eligibleTasks.put(taskId, (EligibleTaskDescriptor) runningTasks.remove(taskId));
}
/**
* Update the eligible list of task and dependencies if necessary.
* This function considered that the taskId is in eligible task list.
* Visibility is package because user cannot use this method.
*
* @param taskId the task to remove from running task.
*/
public void terminate(TaskId taskId) {
logger_dev.debug("task = " + taskId);
if (type == JobType.TASKSFLOW) {
TaskDescriptor lt = runningTasks.get(taskId);
for (TaskDescriptor task : lt.getChildren()) {
((EligibleTaskDescriptorImpl) task)
.setCount(((EligibleTaskDescriptorImpl) task).getCount() - 1);
if (((EligibleTaskDescriptorImpl) task).getCount() == 0) {
eligibleTasks.put(task.getId(), (EligibleTaskDescriptor) task);
}
}
for (TaskDescriptor task : lt.getParents()) {
((EligibleTaskDescriptorImpl) task).setChildrenCount(task.getChildrenCount() - 1);
}
}
runningTasks.remove(taskId);
}
/**
* Failed this job descriptor by removing every tasks from eligible and running list.
* This function considered that the taskIds are in eligible tasks list.
* Visibility is package because user cannot use this method.
*/
public void failed() {
eligibleTasks.clear();
runningTasks.clear();
}
/**
* Update the list of eligible tasks according to the status of each task.
* This method is called only if user pause a job.
*
* @param taskStatus the taskId with their current status.
*/
public void update(Map<TaskId, TaskStatus> taskStatus) {
logger_dev.debug(" ");
for (Entry<TaskId, TaskStatus> tid : taskStatus.entrySet()) {
if (tid.getValue() == TaskStatus.PAUSED) {
TaskDescriptor lt = eligibleTasks.get(tid.getKey());
if (lt != null) {
pausedTasks.put(tid.getKey(), eligibleTasks.remove(tid.getKey()));
}
} else if ((tid.getValue() == TaskStatus.PENDING) || (tid.getValue() == TaskStatus.SUBMITTED)) {
EligibleTaskDescriptor lt = (EligibleTaskDescriptor) pausedTasks.get(tid.getKey());
if (lt != null) {
eligibleTasks.put(tid.getKey(), lt);
pausedTasks.remove(tid.getKey());
}
}
}
}
/**
* Get a task descriptor that is in the running task.
*
* @param id the id of the task descriptor to retrieve.
* @return the task descriptor associated to this id, or null if not running.
*/
public TaskDescriptor GetRunningTaskDescriptor(TaskId id) {
return runningTasks.get(id);
}
/**
* Set the priority of this job descriptor.
*
* @param priority the new priority.
*/
public void setPriority(JobPriority priority) {
this.priority = priority;
}
/**
* To get the id
*
* @return the id
*/
public JobId getId() {
return id;
}
/**
* To get the priority
*
* @return the priority
*/
public JobPriority getPriority() {
return priority;
}
/**
* To get the tasks.
*
* @return the tasks.
*/
public Collection<EligibleTaskDescriptor> getEligibleTasks() {
return new Vector<EligibleTaskDescriptor>(eligibleTasks.values());
}
/**
* To get the type
*
* @return the type
*/
public JobType getType() {
return type;
}
/**
* Returns the number Of Tasks.
*
* @return the number Of Tasks.
*/
public int getNumberOfTasks() {
return numberOfTasks;
}
/**
* Return the generic informations has a Map.
*
* @return the generic informations has a Map.
*/
public Map<String, String> getGenericInformations() {
return genericInformations;
}
/**
* @see java.lang.Comparable#compareTo(java.lang.Object)
* @param job the job to be compared.
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
*/
public int compareTo(JobDescriptor job) {
return job.getPriority().compareTo(priority);
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "JobDescriptor(" + getId() + ")";
}
/**
* Returns the projectName.
* @return the projectName.
*/
public String getProjectName() {
return projectName;
}
}
|
Fix SCHEDULING-596:
Use concurrenthashMap instead standard map, avoid inconsistent state inside each map
git-svn-id: 27916816d6cfa57849e9a885196bf7392b80e1ac@15746 28e8926c-6b08-0410-baaa-805c5e19b8d6
|
src/scheduler/src/org/ow2/proactive/scheduler/job/JobDescriptorImpl.java
|
Fix SCHEDULING-596: Use concurrenthashMap instead standard map, avoid inconsistent state inside each map
|
|
Java
|
agpl-3.0
|
2fe887190ee9a75c6ae714ebf3ed3e80236e670f
| 0
|
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
|
import java.io.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import imcode.external.diverse.* ;
import imcode.server.* ;
import imcode.util.* ;
public class AdminQuestionsFile extends Administrator {
private final static String CVS_REV = "$Revision$" ;
private final static String CVS_DATE = "$Date$" ;
String HTML_TEMPLATE ;
/**
The GET method creates the html page when this side has been
redirected from somewhere else.
**/
public void doGet(HttpServletRequest req, HttpServletResponse res)
throws ServletException, IOException
{
this.doPost(req,res);
} // End doGet
/**
doPost
*/
public void doPost(HttpServletRequest req, HttpServletResponse res)
throws ServletException, IOException
{
// Lets get the server this request was aimed for
String host = req.getHeader("Host") ;
String imcServer = Utility.getDomainPref("adminserver",host) ;
HttpSession session = req.getSession();
res.setContentType("text/html");
PrintWriter out = res.getWriter();
String whichFile = (String)session.getAttribute("file") ;
if (req.getParameter("back")!=null)
{
String url = MetaInfo.getServletPath(req) + "AdminQuestions";
res.sendRedirect(url) ;
return;
}
String date1 = " ";
String date2 = " ";
String text = " ";
if (req.getParameter("save")!=null)
{
//out.println("okbuttonpushed <br> ");
//spara texten till filen
//String fortune_path = Utility.getDomainPref("FortunePath",host);
File fortune_path = Utility.getDomainPrefPath("FortunePath",host);
String readFile = (String)session.getAttribute("file");
File file = new File(fortune_path,readFile + "enkat.txt");
BufferedWriter fileW = new BufferedWriter( new FileWriter(file) );
//out.println("file: " + file + "<br>");
//hmta nuvarande rader
Map lines = (Map)session.getAttribute("lines");
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
if( checkDate(date1) && checkDate(date2) && text.length()>1 )
{
String fullLine = date1 + " " + date2 + " " + text;
//hitta hgsta radnr
int last = 0;//( (Integer)lines.lastKey() ).intValue();
Set keys = lines.keySet();
Iterator rowI = keys.iterator();
while(rowI.hasNext())
{
int temp = (((Integer)rowI.next()).intValue());
last = temp>last?temp:last;
}
lines.put(new Integer(last+1),fullLine);
}
boolean dates = true;//this.checkDates(lines,res);
if (!dates)
{
text = "FEL: Enkternas datum mste vara unika! ";
date1 = " ";
date2 = " ";
String options = "<option value=\"No_Choice\" selected>-- Välj Rad --</option>";
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
String fullLine = (String)lines.get(row);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + row + "\" > " + fullLine.substring(0,stop) + "</option>";
// out.println(" lines: " + lines.get(row) + "<br>" );
//options = options + "<option value=\"" + row + "\" > " + lines.get(row) + "</option>";
}
Vector values = new Vector();
values.add("#date1#");
values.add(date1);
values.add("#date2#");
values.add(date2);
values.add("#text#");
values.add(text);
values.add("#file#");
values.add(whichFile);
values.add("#options#");
values.add(options);
String parsed = IMCServiceRMI.parseExternalDoc(imcServer, values, "AdminQuestionsFile.htm" , "se", "admin");
out.print(parsed);
return;
}
else
{
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
// out.println(" lines2: " + lines.get(row) + "<br>" );
//FIX s linjen blir ok med #
String fullLine = ((String)lines.get(row)).trim();
date1= fullLine.substring(0,6);
date2 = fullLine.substring(7,13);
text = HTMLConv.toHTML(fullLine.substring(14));
//out.println(date1 + "#" + date2 + "#" + text + "#" + "<br>");
fileW.write(date1 + "#" + date2 + "#" + text + "#" );
fileW.newLine();
}
fileW.flush();
fileW.close();
//tillbaks till
String url = MetaInfo.getServletPath(req) + "AdminQuestions" ;
res.sendRedirect(url) ;
return;
}
}
else
{
String options = "<option value=\"No_Choice\" selected>-- Välj Rad --</option>";
date1 = " ";
date2 = " ";
text = " ";
if ((req.getParameter("add")).equals("add"))
{
//hmta nuvarande rader
Map lines = (Map)session.getAttribute("lines");
//hmta parametrar
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
boolean wrong = false;
if( !checkDate(date1) )
{
date1="Fel datum!";
wrong = true;
}
if( !checkDate(date2) )
{
date2="Fel datum!";
wrong = true;
}
if( text.length()<1 )
{
text="Fel: Du har inte angett texten!";
wrong = true;
}
if( !wrong )
{
//lgg ihop den nya raden
String fullLine = date1 + " " + date2 + " " + text;
//hitta hgsta radnr
int last = 0;//( (Integer)lines.lastKey() ).intValue();
Set keys = lines.keySet();
Iterator rowI = keys.iterator();
while(rowI.hasNext())
{
int temp = (((Integer)rowI.next()).intValue());
last = temp>last?temp:last;
}
//lgg till den nya raden
lines.put(new Integer(last+1),fullLine);
//spara de nya raderna
session.setAttribute("lines",lines);
date1 = " ";
date2 = " ";
text = " ";
}
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
// out.println(" lines2: " + lines.get(row) + "<br>" );
String fullLine = (String)lines.get(row);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + row + "\" > " + fullLine.substring(0,stop) + "</option>";
// options = options + "<option value=\"" + row + "\" > " + lines.get(row) + "</option>";
}
}
if (req.getParameter("edit")!=null)
{
//hmta raden som r markerad
String row = req.getParameter("AdminFile") ;
//out.println("row: " + row);
Map lines = (Map)session.getAttribute("lines");
//lgg till en eventuellt redan uppflyttad rad
if(req.getParameter("date1")!=null && req.getParameter("date2")!=null && req.getParameter("text")!=null )
{ //lgg till en eventuellt redan uppflyttad rad
//hmta parametrar
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
boolean wrong = false;
if( !checkDate(date1) || !checkDate(date2) || text.length()<1 ){wrong = true;}
if( !wrong )
{
//lgg ihop den nya raden
String fullLine = date1 + " " + date2 + " " + text;
//hitta hgsta radnr
int last = 0;//( (Integer)lines.lastKey() ).intValue();
Set keys = lines.keySet();
Iterator rowI = keys.iterator();
while(rowI.hasNext())
{
int temp = (((Integer)rowI.next()).intValue());
last = temp>last?temp:last;
}
//lgg till den nya raden
lines.put(new Integer(last+1),fullLine);
}
}
if (!row.equals("No_Choice"))
{
Integer theRow = Integer.decode(row);
// out.println("row: " + row + "therow: " + theRow + "<br>");
String fullLine = ((String)lines.get(theRow)).trim();
date1 = fullLine.substring(0,6);
date2 = fullLine.substring(7,13);
text = fullLine.substring(14);
lines.remove(theRow);
}
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
//out.println(" lines2: " + lines.get(rowIt.next()) + "<br>" );
Integer rad = (Integer)rowIt.next();
String fullLine = (String)lines.get(rad);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + rad + "\" > " + fullLine.substring(0,stop) + "</option>";
}
}
if (req.getParameter("remove")!=null)
{
//hmta nuvarande rader
Map lines = (Map)session.getAttribute("lines");
//out.println("removebuttonpushed <br> ");
if(req.getParameter("date1")!=null && req.getParameter("date2")!=null && req.getParameter("text")!=null )
{ //lgg till en eventuellt redan uppflyttad rad
//hmta parametrar
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
}
//hmta de rader som ska tas bort
String rows[] = req.getParameterValues("AdminFile") ;
//ta bort de som ska raderas
for(int i=0;i<rows.length;i++)
{
// out.println("rows: " + rows[i] + "<br>");
if (!rows[i].equals("No_Choice"))
{
lines.remove(new Integer(rows[i]));
}
}
//spara de nya raderna
session.setAttribute("lines",lines);
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
String fullLine = (String)lines.get(row);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + row + "\" > " + fullLine.substring(0,stop) + "</option>";
// out.println(" lines: " + lines.get(row) + "<br>" );
//options = options + "<option value=\"" + row + "\" > " + lines.get(row) + "</option>";
}
}
//Add info for parsing to a Vector and parse it with a template to a htmlString that is printed
Vector values = new Vector();
values.add("#date1#");
values.add(date1);
values.add("#date2#");
values.add(date2);
values.add("#text#");
values.add(text);
values.add("#file#");
values.add(whichFile);
values.add("#options#");
values.add(options);
String parsed = IMCServiceRMI.parseExternalDoc(imcServer, values, "AdminQuestionsFile.htm" , "se", "admin");
out.print(parsed);
return;
}
}
public boolean checkDate(String date)
{
boolean check = true;
// datumet mste best av 6 tecken
if( date.length()!=6 ){check = false;}
//alla tecknen mste vara siffror
for(int i=0;i<date.length();i++)
{
char tec = date.charAt(i);
if (!Character.isDigit(tec))
{check = false;}
}
return check;
}
public boolean checkDates(Map lines,HttpServletResponse res)
throws ServletException, IOException
{
boolean check = true;
/*res.setContentType("text/html");
PrintWriter out = res.getWriter();
String[] dates =new String[lines.size()*2];
out.println("lines.size()*2: " + lines.size()*2);
//hmta alla datum
Set keyRows = lines.keySet();
Iterator dateIt = keyRows.iterator();
while (dateIt.hasNext())
{
Integer row = (Integer)dateIt.next();
out.println("row: " + row + "<br>");
String fullLine = ((String)lines.get(row)).trim();
//out.println("row.intValue()*2: " + (row.intValue()*2));
//out.println("fullLine.substring(0,6): " + fullLine.substring(0,6));
dates[row.intValue()*2] = fullLine.substring(0,6);
//out.println("row.intValue()*2+1: " + (row.intValue()*2+1));
//out.println("fullLine.substring(7,13): " + fullLine.substring(7,13) +"<br>");
dates[row.intValue()*2+1] = fullLine.substring(7,13);
}
Iterator rowIt = keyRows.iterator();
while (rowIt.hasNext())
{
//hmta frsta datumparet
Integer row = (Integer)rowIt.next();
String fullLine = ((String)lines.get(row)).trim();
String date1 = fullLine.substring(0,6);
String date2 = fullLine.substring(7,13);
//kolla att inga andra datum ligger i det intervallet
for(int i = 0;i<=dates.length;i++)
{
if(( Integer.parseInt(dates[i])>=Integer.parseInt(date1)) && (Integer.parseInt(dates[i])<=Integer.parseInt(date2) ))
{
//om det gr det , returnera false
check = false;
return check;
}
}
}
*/
return check;
}
/**
Log function, will work for both servletexec and Apache
**/
public void log( String str) {
super.log(str) ;
System.out.println("AdminMQuestionsFile: " + str ) ;
}
} // End of class
|
servlets/AdminQuestionsFile.java
|
import java.io.*;
import java.util.*;
import javax.servlet.*;
import javax.servlet.http.*;
import imcode.external.diverse.* ;
import imcode.server.* ;
import imcode.util.* ;
public class AdminQuestionsFile extends Administrator {
private final static String CVS_REV = "$Revision$" ;
private final static String CVS_DATE = "$Date$" ;
String HTML_TEMPLATE ;
/**
The GET method creates the html page when this side has been
redirected from somewhere else.
**/
public void doGet(HttpServletRequest req, HttpServletResponse res)
throws ServletException, IOException
{
this.doPost(req,res);
} // End doGet
/**
doPost
*/
public void doPost(HttpServletRequest req, HttpServletResponse res)
throws ServletException, IOException
{
// Lets get the server this request was aimed for
String host = req.getHeader("Host") ;
String imcServer = Utility.getDomainPref("adminserver",host) ;
HttpSession session = req.getSession();
res.setContentType("text/html");
PrintWriter out = res.getWriter();
String whichFile = (String)session.getAttribute("file") ;
if (req.getParameter("back")!=null)
{
String url = MetaInfo.getServletPath(req) + "AdminQuestions";
res.sendRedirect(url) ;
return;
}
if (req.getParameter("save")!=null)
{
//out.println("okbuttonpushed <br> ");
//spara texten till filen
//String fortune_path = Utility.getDomainPref("FortunePath",host);
File fortune_path = Utility.getDomainPrefPath("FortunePath",host);
String readFile = (String)session.getAttribute("file");
File file = new File(fortune_path,readFile + "enkat.txt");
BufferedWriter fileW = new BufferedWriter( new FileWriter(file) );
//out.println("file: " + file + "<br>");
//hmta nuvarande rader
Map lines = (Map)session.getAttribute("lines");
boolean dates = true;//this.checkDates(lines,res);
if (!dates)
{
String text = "FEL: Enkternas datum mste vara unika! ";
String date1 = " ";
String date2 = " ";
String options = "<option value=\"No_Choice\" selected>-- Välj Rad --</option>";
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
String fullLine = (String)lines.get(row);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + row + "\" > " + fullLine.substring(0,stop) + "</option>";
// out.println(" lines: " + lines.get(row) + "<br>" );
//options = options + "<option value=\"" + row + "\" > " + lines.get(row) + "</option>";
}
Vector values = new Vector();
values.add("#date1#");
values.add(date1);
values.add("#date2#");
values.add(date2);
values.add("#text#");
values.add(text);
values.add("#file#");
values.add(whichFile);
values.add("#options#");
values.add(options);
String parsed = IMCServiceRMI.parseExternalDoc(imcServer, values, "AdminQuestionsFile.htm" , "se", "admin");
out.print(parsed);
return;
}
else
{
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
// out.println(" lines2: " + lines.get(row) + "<br>" );
//FIX s linjen blir ok med #
String fullLine = ((String)lines.get(row)).trim();
String date1 = fullLine.substring(0,6);
String date2 = fullLine.substring(7,13);
String text = HTMLConv.toHTML(fullLine.substring(14));
//out.println(date1 + "#" + date2 + "#" + text + "#" + "<br>");
fileW.write(date1 + "#" + date2 + "#" + text + "#" );
fileW.newLine();
}
fileW.flush();
fileW.close();
//tillbaks till
String url = MetaInfo.getServletPath(req) + "AdminQuestions" ;
res.sendRedirect(url) ;
return;
}
}
else
{
String options = "<option value=\"No_Choice\" selected>-- Välj Rad --</option>";
String date1 = " ";
String date2 = " ";
String text = " ";
if ((req.getParameter("add")).equals("add"))
{
//hmta nuvarande rader
Map lines = (Map)session.getAttribute("lines");
//hmta parametrar
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
boolean wrong = false;
if( !checkDate(date1) )
{
date1="Fel datum!";
wrong = true;
}
if( !checkDate(date2) )
{
date2="Fel datum!";
wrong = true;
}
if( text.length()<1 )
{
text="Fel: Du har inte angett texten!";
wrong = true;
}
if( !wrong )
{
//lgg ihop den nya raden
String fullLine = date1 + " " + date2 + " " + text;
//hitta hgsta radnr
int last = 0;//( (Integer)lines.lastKey() ).intValue();
Set keys = lines.keySet();
Iterator rowI = keys.iterator();
while(rowI.hasNext())
{
int temp = (((Integer)rowI.next()).intValue());
last = temp>last?temp:last;
}
//lgg till den nya raden
lines.put(new Integer(last+1),fullLine);
//spara de nya raderna
session.setAttribute("lines",lines);
date1 = " ";
date2 = " ";
text = " ";
}
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
// out.println(" lines2: " + lines.get(row) + "<br>" );
String fullLine = (String)lines.get(row);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + row + "\" > " + fullLine.substring(0,stop) + "</option>";
// options = options + "<option value=\"" + row + "\" > " + lines.get(row) + "</option>";
}
}
if (req.getParameter("edit")!=null)
{
//hmta raden som r markerad
String row = req.getParameter("AdminFile") ;
//out.println("row: " + row);
Map lines = (Map)session.getAttribute("lines");
//lgg till en eventuellt redan uppflyttad rad
if(req.getParameter("date1")!=null && req.getParameter("date2")!=null && req.getParameter("text")!=null )
{ //lgg till en eventuellt redan uppflyttad rad
//hmta parametrar
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
boolean wrong = false;
if( !checkDate(date1) || !checkDate(date2) || text.length()<1 ){wrong = true;}
if( !wrong )
{
//lgg ihop den nya raden
String fullLine = date1 + " " + date2 + " " + text;
//hitta hgsta radnr
int last = 0;//( (Integer)lines.lastKey() ).intValue();
Set keys = lines.keySet();
Iterator rowI = keys.iterator();
while(rowI.hasNext())
{
int temp = (((Integer)rowI.next()).intValue());
last = temp>last?temp:last;
}
//lgg till den nya raden
lines.put(new Integer(last+1),fullLine);
}
}
if (!row.equals("No_Choice"))
{
Integer theRow = Integer.decode(row);
// out.println("row: " + row + "therow: " + theRow + "<br>");
String fullLine = ((String)lines.get(theRow)).trim();
date1 = fullLine.substring(0,6);
date2 = fullLine.substring(7,13);
text = fullLine.substring(14);
lines.remove(theRow);
}
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
//out.println(" lines2: " + lines.get(rowIt.next()) + "<br>" );
Integer rad = (Integer)rowIt.next();
String fullLine = (String)lines.get(rad);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + rad + "\" > " + fullLine.substring(0,stop) + "</option>";
}
}
if (req.getParameter("remove")!=null)
{
//hmta nuvarande rader
Map lines = (Map)session.getAttribute("lines");
//out.println("removebuttonpushed <br> ");
if(req.getParameter("date1")!=null && req.getParameter("date2")!=null && req.getParameter("text")!=null )
{ //lgg till en eventuellt redan uppflyttad rad
//hmta parametrar
date1 = (req.getParameter("date1")).trim();
date2 = (req.getParameter("date2")).trim();
text = (req.getParameter("text")).trim();
}
//hmta de rader som ska tas bort
String rows[] = req.getParameterValues("AdminFile") ;
//ta bort de som ska raderas
for(int i=0;i<rows.length;i++)
{
// out.println("rows: " + rows[i] + "<br>");
if (!rows[i].equals("No_Choice"))
{
lines.remove(new Integer(rows[i]));
}
}
//spara de nya raderna
session.setAttribute("lines",lines);
Set keyRows = lines.keySet();
Iterator rowIt = keyRows.iterator();
while(rowIt.hasNext())
{
Integer row = (Integer)rowIt.next();
String fullLine = (String)lines.get(row);
int stop = fullLine.length();
if (fullLine.indexOf("<BR>") != -1 ){ stop = fullLine.indexOf("<BR>");}
if (fullLine.indexOf("<br>") != -1 ){ stop = fullLine.indexOf("<br>");}
options = options + "<option value=\"" + row + "\" > " + fullLine.substring(0,stop) + "</option>";
// out.println(" lines: " + lines.get(row) + "<br>" );
//options = options + "<option value=\"" + row + "\" > " + lines.get(row) + "</option>";
}
}
//Add info for parsing to a Vector and parse it with a template to a htmlString that is printed
Vector values = new Vector();
values.add("#date1#");
values.add(date1);
values.add("#date2#");
values.add(date2);
values.add("#text#");
values.add(text);
values.add("#file#");
values.add(whichFile);
values.add("#options#");
values.add(options);
String parsed = IMCServiceRMI.parseExternalDoc(imcServer, values, "AdminQuestionsFile.htm" , "se", "admin");
out.print(parsed);
return;
}
}
public boolean checkDate(String date)
{
boolean check = true;
// datumet mste best av 6 tecken
if( date.length()!=6 ){check = false;}
//alla tecknen mste vara siffror
for(int i=0;i<date.length();i++)
{
char tec = date.charAt(i);
if (!Character.isDigit(tec))
{check = false;}
}
return check;
}
public boolean checkDates(Map lines,HttpServletResponse res)
throws ServletException, IOException
{
boolean check = true;
/*res.setContentType("text/html");
PrintWriter out = res.getWriter();
String[] dates =new String[lines.size()*2];
out.println("lines.size()*2: " + lines.size()*2);
//hmta alla datum
Set keyRows = lines.keySet();
Iterator dateIt = keyRows.iterator();
while (dateIt.hasNext())
{
Integer row = (Integer)dateIt.next();
out.println("row: " + row + "<br>");
String fullLine = ((String)lines.get(row)).trim();
//out.println("row.intValue()*2: " + (row.intValue()*2));
//out.println("fullLine.substring(0,6): " + fullLine.substring(0,6));
dates[row.intValue()*2] = fullLine.substring(0,6);
//out.println("row.intValue()*2+1: " + (row.intValue()*2+1));
//out.println("fullLine.substring(7,13): " + fullLine.substring(7,13) +"<br>");
dates[row.intValue()*2+1] = fullLine.substring(7,13);
}
Iterator rowIt = keyRows.iterator();
while (rowIt.hasNext())
{
//hmta frsta datumparet
Integer row = (Integer)rowIt.next();
String fullLine = ((String)lines.get(row)).trim();
String date1 = fullLine.substring(0,6);
String date2 = fullLine.substring(7,13);
//kolla att inga andra datum ligger i det intervallet
for(int i = 0;i<=dates.length;i++)
{
if(( Integer.parseInt(dates[i])>=Integer.parseInt(date1)) && (Integer.parseInt(dates[i])<=Integer.parseInt(date2) ))
{
//om det gr det , returnera false
check = false;
return check;
}
}
}
*/
return check;
}
/**
Log function, will work for both servletexec and Apache
**/
public void log( String str) {
super.log(str) ;
System.out.println("AdminMQuestionsFile: " + str ) ;
}
} // End of class
|
bug 432 Added fkn for adding a new question when saving
git-svn-id: b7e9aa1d6cd963481915708f70423d437278b157@1220 bd66a97b-2aff-0310-9095-89ca5cabf5a6
|
servlets/AdminQuestionsFile.java
|
bug 432 Added fkn for adding a new question when saving
|
|
Java
|
lgpl-2.1
|
6cc35274af4121c183b32f412c179516f1ba2bc4
| 0
|
BEAST2-Dev/codonmodels
|
package test.beast.util;
import beast.util.DistributionUtils;
import beast.util.RandomUtils;
import beast.util.Randomizer;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import static org.junit.Assert.assertArrayEquals;
/**
* {@link Randomizer#randomChoice(double[])} seems faster than
* {@link RandomUtils#binarySearchSampling(double[], double)}
* at about 60 states.
* But the input will be an unnormalized probabilities, and
* the former has to take the normalized cpd, so the total
* speed is slower.
*
* @author Walter Xie
*/
public class BinarySearchSamplingTest {
final int nrOfStates = 60;
final int ite = 100000000; // 100 million
double[] prob;
double[] cf;
@Before
public void setUp() {
// unnormalized probabilities
double[] freq = new double[nrOfStates];
for (int i = 0; i < nrOfStates; i++)
freq[i] = Randomizer.nextDouble(); // may have 0
// normalized probabilities
prob = new double[nrOfStates];
// compute distribution
DistributionUtils.computeDistribution(freq, prob);
System.out.println("\nTrue probability distribution : " + Arrays.toString(prob) + "\n");
// unnormalized cumulative probabilities
cf = new double[nrOfStates];
cf[0] = freq[0];
for (int i = 1; i < freq.length; i++) {
cf[i] = cf[i-1] + freq[i];
}
}
@Test
public void linearSampling() {
//++++++ Linear time sampling ++++++//
int w;
int[] freq1 = new int[nrOfStates];
long start = System.currentTimeMillis();
for (int i = 0; i < ite; i++) {
w = RandomUtils.linearTimeSampling(prob, -1, false);
freq1[w]++;
}
long timeLinear = System.currentTimeMillis() - start;
System.out.println("Linear sampling time : " + timeLinear + " milliseconds.");
System.out.println("Freq : " + Arrays.toString(freq1) + "\n");
double[] prob1 = new double[nrOfStates];
DistributionUtils.computeDistribution(freq1, prob1);
System.out.println("Normalized probability : " + Arrays.toString(prob1) + "\n");
assertArrayEquals(prob, prob1, 1E-4);
}
@Test
public void linearSampling2() {
//++++++ Linear time sampling ++++++//
int w;
int[] freq1 = new int[nrOfStates];
long start = System.currentTimeMillis();
double[] cpd = new double[cf.length];
for (int i = 0; i < ite; i++) {
// count the time to normalise
for (int j = 0; j < cf.length; j++)
cpd[j] = cf[j] / cf[cf.length-1];
w = Randomizer.randomChoice(cpd);
freq1[w]++;
}
long timeLinear = System.currentTimeMillis() - start;
System.out.println("Linear sampling time : " + timeLinear + " milliseconds.");
System.out.println("Freq : " + Arrays.toString(freq1) + "\n");
double[] prob1 = new double[nrOfStates];
DistributionUtils.computeDistribution(freq1, prob1);
System.out.println("Normalized probability : " + Arrays.toString(prob1) + "\n");
assertArrayEquals(prob, prob1, 1E-4);
}
@Test
public void binarySearch() {
int w;
//++++++ Binary search sampling ++++++//
double random;
int[] freq2 = new int[nrOfStates];
long start = System.currentTimeMillis();
for (int i = 0; i < ite; i++) {
random = Randomizer.nextDouble() * cf[cf.length-1];
w = RandomUtils.binarySearchSampling(cf, random);
freq2[w]++;
}
long timeBiSearch = System.currentTimeMillis() - start;
System.out.println("Binary search sampling time : " + timeBiSearch + " milliseconds.");
System.out.println("Freq : " + Arrays.toString(freq2) + "\n");
double[] prob2 = new double[nrOfStates];
DistributionUtils.computeDistribution(freq2, prob2);
System.out.println("Normalized probability : " + Arrays.toString(prob2) + "\n");
assertArrayEquals(prob, prob2, 1E-4);
}
}
|
src/test/beast/util/BinarySearchSamplingTest.java
|
package test.beast.util;
import beast.util.DistributionUtils;
import beast.util.RandomUtils;
import beast.util.Randomizer;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import static org.junit.Assert.assertArrayEquals;
/**
* @author Walter Xie
*/
public class BinarySearchSamplingTest {
final int nrOfStates = 60;
final int ite = 100000000; // 100 million
double[] prob;
double[] cf;
@Before
public void setUp() {
// unnormalized probabilities
double[] freq = new double[nrOfStates];
for (int i = 0; i < nrOfStates; i++)
freq[i] = Randomizer.nextDouble(); // may have 0
// normalized probabilities
prob = new double[nrOfStates];
// compute distribution
DistributionUtils.computeDistribution(freq, prob);
System.out.println("\nTrue probability distribution : " + Arrays.toString(prob) + "\n");
// unnormalized cumulative probabilities
cf = new double[nrOfStates];
cf[0] = freq[0];
for (int i = 1; i < freq.length; i++) {
cf[i] = cf[i-1] + freq[i];
}
}
@Test
public void linearSampling() {
//++++++ Linear time sampling ++++++//
int w;
int[] freq1 = new int[nrOfStates];
long start = System.currentTimeMillis();
for (int i = 0; i < ite; i++) {
w = RandomUtils.linearTimeSampling(prob, -1, false);
freq1[w]++;
}
long timeLinear = System.currentTimeMillis() - start;
System.out.println("Linear sampling time : " + timeLinear + " milliseconds.");
System.out.println("Freq : " + Arrays.toString(freq1) + "\n");
double[] prob1 = new double[nrOfStates];
DistributionUtils.computeDistribution(freq1, prob1);
System.out.println("Normalized probability : " + Arrays.toString(prob1) + "\n");
assertArrayEquals(prob, prob1, 1E-4);
}
@Test
public void linearSampling2() {
//++++++ Linear time sampling ++++++//
int w;
int[] freq1 = new int[nrOfStates];
long start = System.currentTimeMillis();
double[] cpd = new double[cf.length];
for (int i = 0; i < ite; i++) {
// count the time to normalise
for (int j = 0; j < cf.length; j++)
cpd[j] = cf[j] / cf[cf.length-1];
w = Randomizer.randomChoice(cpd);
freq1[w]++;
}
long timeLinear = System.currentTimeMillis() - start;
System.out.println("Linear sampling time : " + timeLinear + " milliseconds.");
System.out.println("Freq : " + Arrays.toString(freq1) + "\n");
double[] prob1 = new double[nrOfStates];
DistributionUtils.computeDistribution(freq1, prob1);
System.out.println("Normalized probability : " + Arrays.toString(prob1) + "\n");
assertArrayEquals(prob, prob1, 1E-4);
}
@Test
public void binarySearch() {
int w;
//++++++ Binary search sampling ++++++//
double random;
int[] freq2 = new int[nrOfStates];
long start = System.currentTimeMillis();
for (int i = 0; i < ite; i++) {
random = Randomizer.nextDouble() * cf[cf.length-1];
w = RandomUtils.binarySearchSampling(cf, random);
freq2[w]++;
}
long timeBiSearch = System.currentTimeMillis() - start;
System.out.println("Binary search sampling time : " + timeBiSearch + " milliseconds.");
System.out.println("Freq : " + Arrays.toString(freq2) + "\n");
double[] prob2 = new double[nrOfStates];
DistributionUtils.computeDistribution(freq2, prob2);
System.out.println("Normalized probability : " + Arrays.toString(prob2) + "\n");
assertArrayEquals(prob, prob2, 1E-4);
}
}
|
add comment
|
src/test/beast/util/BinarySearchSamplingTest.java
|
add comment
|
|
Java
|
lgpl-2.1
|
62588692656accb2dc7f33f947a36ffd09f32bf9
| 0
|
modius/railo,getrailo/railo,getrailo/railo,getrailo/railo,JordanReiter/railo,modius/railo,JordanReiter/railo
|
package railo.commons.io.res.type.compress;
import java.io.IOException;
import java.util.Map;
import railo.commons.io.res.Resource;
import railo.commons.io.res.ResourceProvider;
import railo.commons.io.res.Resources;
import railo.commons.io.res.util.ResourceLockImpl;
import railo.commons.io.res.util.ResourceUtil;
import railo.commons.lang.SizeOf;
import railo.commons.lang.StringUtil;
import railo.runtime.PageContext;
import railo.runtime.engine.ThreadLocalPageContext;
import railo.runtime.op.Caster;
import railo.runtime.type.Sizeable;
public abstract class CompressResourceProvider implements ResourceProvider,Sizeable {
private static final long serialVersionUID = 5930090603192203086L;
private Resources resources;
protected String scheme=null;
protected boolean caseSensitive=true;
boolean async=true;
private long lockTimeout=10000;
private final ResourceLockImpl lock=new ResourceLockImpl(lockTimeout,caseSensitive);
private Map arguments;
/**
* @see railo.commons.io.res.ResourceProvider#init(java.lang.String, java.util.Map)
*/
public ResourceProvider init(String scheme, Map arguments) {// FUTURE Map<String,String> arguments
if(!StringUtil.isEmpty(scheme))this.scheme=scheme;
if(arguments!=null) {
this.arguments=arguments;
// case-sensitive
String strCaseSensitive=(String) arguments.get("case-sensitive");
if(strCaseSensitive!=null) {
caseSensitive=Caster.toBooleanValue(strCaseSensitive,true);
}
// sync
String strASync=(String) arguments.get("asynchronus");
if(strASync==null)strASync=(String) arguments.get("async");
if(strASync!=null) {
async=Caster.toBooleanValue(strASync,true);
}
// lock-timeout
String strTimeout = (String) arguments.get("lock-timeout");
if(strTimeout!=null) {
lockTimeout=Caster.toLongValue(arguments.get("lock-timeout"),lockTimeout);
}
}
lock.setLockTimeout(lockTimeout);
lock.setCaseSensitive(caseSensitive);
return this;
}
public ResourceProvider init(String scheme, boolean caseSensitive, boolean async) {
if(!StringUtil.isEmpty(scheme))this.scheme=scheme;
this.caseSensitive=caseSensitive;
this.async=async;
return this;
}
/**
* @see railo.commons.io.res.ResourceProvider#getResource(java.lang.String)
*/
public Resource getResource(String path) {
path=ResourceUtil.removeScheme(scheme,path);
int index=path.lastIndexOf('!');
if(index!=-1) {
Resource file = toResource(path.substring(0,index));//resources.getResource(path.substring(0,index));
return new CompressResource(this,getCompress(file),path.substring(index+1),caseSensitive);
}
Resource file = toResource(path);//resources.getResource(path);
return new CompressResource(this,getCompress(file),"/",caseSensitive);
}
private Resource toResource(String path) {
PageContext pc = ThreadLocalPageContext.get();
if(pc!=null) {
return ResourceUtil.toResourceNotExisting(ThreadLocalPageContext.get(), path,true);
}
return resources.getResource(path);
}
public abstract Compress getCompress(Resource file);
/**
* @see railo.commons.io.res.ResourceProvider#getScheme()
*/
public String getScheme() {
return scheme;
}
public void setResources(Resources resources) {
this.resources=resources;
}
/**
* @throws IOException
* @see railo.commons.io.res.ResourceProvider#lock(railo.commons.io.res.Resource)
*/
public void lock(Resource res) throws IOException {
lock.lock(res);
}
/**
* @see railo.commons.io.res.ResourceProvider#unlock(railo.commons.io.res.Resource)
*/
public void unlock(Resource res) {
lock.unlock(res);
}
/**
* @see railo.commons.io.res.ResourceProvider#read(railo.commons.io.res.Resource)
*/
public void read(Resource res) throws IOException {
lock.read(res);
}
/**
* @see railo.commons.io.res.ResourceProvider#getArguments()
*/
public Map getArguments() {
return arguments;
}
/*public static void main(String[] args) throws IOException {
Resources rs=ResourcesImpl.getGlobal();
rs.registerResourceProvider(new ZipResourceProvider().init("zip", null));
rs.registerResourceProvider(new RamResourceProvider().init("ram", null));
Resource ra = rs.getResource("zip:///Users/mic/temp/test/ras111.zip!/dd/");
print.ln(ra);
print.ln(ra.getParent());
ra = rs.getResource("ram:///dd/");
print.ln(ra);
print.ln(ra.getParent());
Resource org = rs.getResource("/Users/mic/temp/test/org.zip");
Resource trg = rs.getResource("/Users/mic/temp/test/trg.zip");
Resource ras = rs.getResource("/Users/mic/temp/test/ras.zip");
ResourceUtil.copy(org, ras);
Resource res1 = rs.getResource("zip:///Users/mic/temp/test/rasx.zip!/dd");
Resource res2 = rs.getResource("zip:///Users/mic/temp/test/ras.zip!/ddd"+Math.random()+".txt");
res1.mkdirs();
res2.createNewFile();
ResourceUtil.copy(ras, trg);
print.ln("copy");
//Resource org2 = rs.getResource("/Users/mic/temp/test/org.zip");
Resource railotmp = rs.getResource("/Users/mic/temp/railotmp/");
Resource trg2 = rs.getResource("zip:///Users/mic/temp/railotmp.zip!");
trg2.delete();
long start=System.currentTimeMillis();
ResourceUtil.copyRecursive(railotmp, trg2);
print.ln("ende "+(System.currentTimeMillis()-start));
//print(res3);
}
private static void print(Resource r) {
print.ln("****************************************");
print.ln(r);
if(r==null) return;
print.ln("path:"+r.getPath());
print.ln("name:"+r.getName());
print.ln("parent:"+r.getParent());
print.ln("parent-res:"+r.getParentResource());
print.ln("exists:"+r.exists());
print.ln("isDirectory:"+r.isDirectory());
print.ln("isFile:"+r.isFile());
print.ln("lastModified:"+r.lastModified());
if(r.isFile()) {
//print.ln("->"+IOUtil.toString(r.getI nputStream(),null)+"<-");
}
if(r.isDirectory()) {
print.ln(" - children");
String[] children = r.list();
Resource[] ch2 = r.listResources();
for(int i=0;i<children.length;i++) {
print.ln(" - "+children[i]);
print.ln(" - "+ch2[i]);
}
}
}*/
/**
* @see railo.runtime.type.Sizeable#sizeOf()
*/
public long sizeOf() {
return SizeOf.size(lock);
}
}
|
railo-java/railo-core/src/railo/commons/io/res/type/compress/CompressResourceProvider.java
|
package railo.commons.io.res.type.compress;
import java.io.IOException;
import java.util.Map;
import railo.commons.io.res.Resource;
import railo.commons.io.res.ResourceProvider;
import railo.commons.io.res.Resources;
import railo.commons.io.res.util.ResourceLockImpl;
import railo.commons.io.res.util.ResourceUtil;
import railo.commons.lang.SizeOf;
import railo.commons.lang.StringUtil;
import railo.runtime.op.Caster;
import railo.runtime.type.Sizeable;
public abstract class CompressResourceProvider implements ResourceProvider,Sizeable {
private Resources resources;
protected String scheme=null;
protected boolean caseSensitive=true;
boolean async=true;
private long lockTimeout=10000;
private final ResourceLockImpl lock=new ResourceLockImpl(lockTimeout,caseSensitive);
private Map arguments;
/**
* @see railo.commons.io.res.ResourceProvider#init(java.lang.String, java.util.Map)
*/
public ResourceProvider init(String scheme, Map arguments) {
if(!StringUtil.isEmpty(scheme))this.scheme=scheme;
if(arguments!=null) {
this.arguments=arguments;
// case-sensitive
String strCaseSensitive=(String) arguments.get("case-sensitive");
if(strCaseSensitive!=null) {
caseSensitive=Caster.toBooleanValue(strCaseSensitive,true);
}
// sync
String strASync=(String) arguments.get("asynchronus");
if(strASync==null)strASync=(String) arguments.get("async");
if(strASync!=null) {
async=Caster.toBooleanValue(strASync,true);
}
// lock-timeout
String strTimeout = (String) arguments.get("lock-timeout");
if(strTimeout!=null) {
lockTimeout=Caster.toLongValue(arguments.get("lock-timeout"),lockTimeout);
}
}
lock.setLockTimeout(lockTimeout);
lock.setCaseSensitive(caseSensitive);
return this;
}
public ResourceProvider init(String scheme, boolean caseSensitive, boolean async) {
if(!StringUtil.isEmpty(scheme))this.scheme=scheme;
this.caseSensitive=caseSensitive;
this.async=async;
return this;
}
/**
* @see railo.commons.io.res.ResourceProvider#getResource(java.lang.String)
*/
public Resource getResource(String path) {
path=ResourceUtil.removeScheme(scheme,path);
int index=path.lastIndexOf('!');
if(index!=-1) {
Resource file = resources.getResource(path.substring(0,index));
return new CompressResource(this,getCompress(file),path.substring(index+1),caseSensitive);
}
Resource file = resources.getResource(path);
return new CompressResource(this,getCompress(file),"/",caseSensitive);
}
public abstract Compress getCompress(Resource file);
/**
* @see railo.commons.io.res.ResourceProvider#getScheme()
*/
public String getScheme() {
return scheme;
}
public void setResources(Resources resources) {
this.resources=resources;
}
/**
* @throws IOException
* @see railo.commons.io.res.ResourceProvider#lock(railo.commons.io.res.Resource)
*/
public void lock(Resource res) throws IOException {
lock.lock(res);
}
/**
* @see railo.commons.io.res.ResourceProvider#unlock(railo.commons.io.res.Resource)
*/
public void unlock(Resource res) {
lock.unlock(res);
}
/**
* @see railo.commons.io.res.ResourceProvider#read(railo.commons.io.res.Resource)
*/
public void read(Resource res) throws IOException {
lock.read(res);
}
/**
* @see railo.commons.io.res.ResourceProvider#getArguments()
*/
public Map getArguments() {
return arguments;
}
/*public static void main(String[] args) throws IOException {
Resources rs=ResourcesImpl.getGlobal();
rs.registerResourceProvider(new ZipResourceProvider().init("zip", null));
rs.registerResourceProvider(new RamResourceProvider().init("ram", null));
Resource ra = rs.getResource("zip:///Users/mic/temp/test/ras111.zip!/dd/");
print.ln(ra);
print.ln(ra.getParent());
ra = rs.getResource("ram:///dd/");
print.ln(ra);
print.ln(ra.getParent());
Resource org = rs.getResource("/Users/mic/temp/test/org.zip");
Resource trg = rs.getResource("/Users/mic/temp/test/trg.zip");
Resource ras = rs.getResource("/Users/mic/temp/test/ras.zip");
ResourceUtil.copy(org, ras);
Resource res1 = rs.getResource("zip:///Users/mic/temp/test/rasx.zip!/dd");
Resource res2 = rs.getResource("zip:///Users/mic/temp/test/ras.zip!/ddd"+Math.random()+".txt");
res1.mkdirs();
res2.createNewFile();
ResourceUtil.copy(ras, trg);
print.ln("copy");
//Resource org2 = rs.getResource("/Users/mic/temp/test/org.zip");
Resource railotmp = rs.getResource("/Users/mic/temp/railotmp/");
Resource trg2 = rs.getResource("zip:///Users/mic/temp/railotmp.zip!");
trg2.delete();
long start=System.currentTimeMillis();
ResourceUtil.copyRecursive(railotmp, trg2);
print.ln("ende "+(System.currentTimeMillis()-start));
//print(res3);
}
private static void print(Resource r) {
print.ln("****************************************");
print.ln(r);
if(r==null) return;
print.ln("path:"+r.getPath());
print.ln("name:"+r.getName());
print.ln("parent:"+r.getParent());
print.ln("parent-res:"+r.getParentResource());
print.ln("exists:"+r.exists());
print.ln("isDirectory:"+r.isDirectory());
print.ln("isFile:"+r.isFile());
print.ln("lastModified:"+r.lastModified());
if(r.isFile()) {
//print.ln("->"+IOUtil.toString(r.getI nputStream(),null)+"<-");
}
if(r.isDirectory()) {
print.ln(" - children");
String[] children = r.list();
Resource[] ch2 = r.listResources();
for(int i=0;i<children.length;i++) {
print.ln(" - "+children[i]);
print.ln(" - "+ch2[i]);
}
}
}*/
/**
* @see railo.runtime.type.Sizeable#sizeOf()
*/
public long sizeOf() {
return SizeOf.size(lock);
}
}
|
solved ticket https://issues.jboss.org/browse/RAILO-1133
|
railo-java/railo-core/src/railo/commons/io/res/type/compress/CompressResourceProvider.java
|
solved ticket https://issues.jboss.org/browse/RAILO-1133
|
|
Java
|
apache-2.0
|
204998ff9d17a6816d418c2c38306b43ef400232
| 0
|
consulo/consulo-properties
|
package icons;
import javax.swing.Icon;
import com.intellij.openapi.util.IconLoader;
/**
* NOTE THIS FILE IS AUTO-GENERATED by the build/scripts/icons.gant
* Don't repeat mistakes of others ;-)
*/
public class PropertiesIcons {
private static Icon load(String path) {
return IconLoader.getIcon(path, PropertiesIcons.class);
}
public static final Icon XmlProperties = load("/icons/xmlProperties.png"); // 16x16
}
|
src/icons/PropertiesIcons.java
|
package icons;
import com.intellij.openapi.util.IconLoader;
import javax.swing.*;
/**
* NOTE THIS FILE IS AUTO-GENERATED by the build/scripts/icons.gant
* Don't repeat mistakes of others ;-)
*/
public class PropertiesIcons {
private static Icon load(String path) {
return IconLoader.getIcon(path, PropertiesIcons.class);
}
public static final Icon XmlProperties = load("/resources/icons/xmlProperties.png"); // 16x16
}
|
fixed https://github.com/consulo/consulo/issues/123
|
src/icons/PropertiesIcons.java
|
fixed https://github.com/consulo/consulo/issues/123
|
|
Java
|
apache-2.0
|
383e1dfe44dc473e4d43eeeed5a4f6af16080552
| 0
|
cfriedt/sliderule,cfriedt/sliderule
|
/*
* Copyright (C) 2015 Christopher Friedt <chrisfriedt@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sliderule.runner;
import java.lang.annotation.*;
import java.lang.reflect.*;
import java.util.*;
class AnnotatedClass {
private static final int FIELD_PARAM;
private static final int METHOD_AFTER_EXPERIMENT;
private static final int METHOD_BEFORE_EXPERIMENT;
private static final int METHOD_BENCHMARK;
private static final int METHOD_AFTER_REP;
private static final int METHOD_BEFORE_REP;
private static final int METHOD_MACROBENCHMARK;
private static final Map<Class<?>,Integer> field_map;
private static final Map<Class<?>,Integer> method_map;
static {
int field=0;
FIELD_PARAM = field++;
field_map = new HashMap<Class<?>,Integer>();
field_map.put( org.sliderule.Param.class, FIELD_PARAM );
int method=0;
METHOD_AFTER_EXPERIMENT = method++;
METHOD_BEFORE_EXPERIMENT = method++;
METHOD_BENCHMARK = method++;
METHOD_AFTER_REP = method++;
METHOD_BEFORE_REP = method++;
METHOD_MACROBENCHMARK = method++;
method_map = new HashMap<Class<?>,Integer>();
method_map.put( org.sliderule.AfterExperiment.class, METHOD_AFTER_EXPERIMENT );
method_map.put( org.sliderule.BeforeExperiment.class, METHOD_BEFORE_EXPERIMENT );
method_map.put( org.sliderule.Benchmark.class, METHOD_BENCHMARK );
method_map.put( org.sliderule.api.AfterRep.class, METHOD_AFTER_REP );
method_map.put( org.sliderule.api.BeforeRep.class, METHOD_BEFORE_REP );
method_map.put( org.sliderule.api.Macrobenchmark.class, METHOD_MACROBENCHMARK );
}
private final Class<?> klass;
private final HashSet<Field>[] field_array;
private final HashSet<Method>[] method_array;
@SuppressWarnings("unchecked")
public AnnotatedClass( Class<?> klass ) {
this.klass = klass;
field_array = new HashSet[ field_map.size() ];
for( int i=0; i < field_array.length; i++ ) {
field_array[i] = new HashSet<Field>();
}
method_array = new HashSet[ method_map.size() ];
for( int i=0; i < method_array.length; i++ ) {
method_array[i] = new HashSet<Method>();
}
}
public Class<?> getAnnotatedClass() {
return klass;
}
@SuppressWarnings("unchecked")
public void filterField( Field f ) {
List<Annotation> anna;
for( Map.Entry<Class<?>,Integer> e: field_map.entrySet() ) {
Class<Annotation> klass = (Class<Annotation>) e.getKey();
int val = (int)(Integer)e.getValue();
anna = new ArrayList<Annotation>();
anna.addAll( Arrays.asList( getAnnotationsByType( f, klass ) ) );
anna.addAll( Arrays.asList( getDeclaredAnnotationsByType( f, klass ) ) );
if ( ! anna.isEmpty() ) {
field_array[ val ].add( f );
}
}
}
@SuppressWarnings("unchecked")
public void filterMethod( Method m ) {
List<Annotation> anna;
for( Map.Entry<Class<?>,Integer> e: method_map.entrySet() ) {
Class<Annotation> klass = (Class<Annotation>) e.getKey();
int val = (int)(Integer)e.getValue();
anna = new ArrayList<Annotation>();
anna.addAll( Arrays.asList( getAnnotationsByType( m, klass ) ) );
anna.addAll( Arrays.asList( getDeclaredAnnotationsByType( m, klass ) ) );
if ( ! anna.isEmpty() ) {
method_array[ val ].add( m );
}
}
}
@SuppressWarnings("unchecked")
private static <T extends Annotation> T[] getAnnotations( boolean declared, Object o, Class<T> klass ) {
T[] a = null;
if ( o instanceof Field ) {
Field f = (Field)o;
a = (T[]) ( declared ? f.getDeclaredAnnotations() : f.getAnnotations() );
} else if ( o instanceof Method ) {
Method m = (Method)o;
a = (T[]) ( declared ? m.getDeclaredAnnotations() : m.getAnnotations() );
} else {
throw new IllegalArgumentException();
}
ArrayList<T> ala = new ArrayList<T>();
for( T an: a ) {
Class<? extends Annotation> ant = an.annotationType();
if ( ant == klass ) {
ala.add( (T) an );
}
}
return ala.toArray( a );
}
private static <T extends Annotation> T[] getAnnotationsByType( Object o, Class<T> klass ) {
return getAnnotations( false, o, klass );
}
private static <T extends Annotation> T[] getDeclaredAnnotationsByType( Object o, Class<T> klass ) {
return getAnnotations( true, o, klass );
}
public Set<Field> getParamFields() {
return field_array[ FIELD_PARAM ];
}
public Set<Method> getAfterExperimentMethods() {
return method_array[ METHOD_AFTER_EXPERIMENT ];
}
public Set<Method> getBeforeExperimentMethods() {
return method_array[ METHOD_BEFORE_EXPERIMENT ];
}
public Set<Method> getBenchmarkMethods() {
return method_array[ METHOD_BENCHMARK ];
}
public Set<Method> getAfterRepMethods() {
return method_array[ METHOD_AFTER_REP ];
}
public Set<Method> getBeforeRepMethods() {
return method_array[ METHOD_BEFORE_REP ];
}
public Set<Method> getMacrobenchmarkMethods() {
return method_array[ METHOD_MACROBENCHMARK ];
}
}
|
src/org/sliderule/runner/AnnotatedClass.java
|
/*
* Copyright (C) 2015 Christopher Friedt <chrisfriedt@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sliderule.runner;
import java.lang.annotation.*;
import java.lang.reflect.*;
import java.util.*;
class AnnotatedClass {
private static final int FIELD_PARAM;
private static final int METHOD_AFTER_EXPERIMENT;
private static final int METHOD_BEFORE_EXPERIMENT;
private static final int METHOD_BENCHMARK;
private static final int METHOD_AFTER_REP;
private static final int METHOD_BEFORE_REP;
private static final int METHOD_MACROBENCHMARK;
private static final Map<Class<?>,Integer> field_map;
private static final Map<Class<?>,Integer> method_map;
static {
int field=0;
FIELD_PARAM = field++;
field_map = new HashMap<Class<?>,Integer>();
field_map.put( org.sliderule.Param.class, FIELD_PARAM );
int method=0;
METHOD_AFTER_EXPERIMENT = method++;
METHOD_BEFORE_EXPERIMENT = method++;
METHOD_BENCHMARK = method++;
METHOD_AFTER_REP = method++;
METHOD_BEFORE_REP = method++;
METHOD_MACROBENCHMARK = method++;
method_map = new HashMap<Class<?>,Integer>();
method_map.put( org.sliderule.AfterExperiment.class, METHOD_AFTER_EXPERIMENT );
method_map.put( org.sliderule.BeforeExperiment.class, METHOD_BEFORE_EXPERIMENT );
method_map.put( org.sliderule.Benchmark.class, METHOD_BENCHMARK );
method_map.put( org.sliderule.api.AfterRep.class, METHOD_AFTER_REP );
method_map.put( org.sliderule.api.BeforeRep.class, METHOD_BEFORE_REP );
method_map.put( org.sliderule.api.Macrobenchmark.class, METHOD_MACROBENCHMARK );
}
private final Class<?> klass;
private final HashSet<Field>[] field_array;
private final HashSet<Method>[] method_array;
@SuppressWarnings("unchecked")
public AnnotatedClass( Class<?> klass ) {
this.klass = klass;
field_array = new HashSet[ field_map.size() ];
for( int i=0; i < field_array.length; i++ ) {
field_array[i] = new HashSet<Field>();
}
method_array = new HashSet[ method_map.size() ];
for( int i=0; i < method_array.length; i++ ) {
method_array[i] = new HashSet<Method>();
}
}
public Class<?> getAnnotatedClass() {
return klass;
}
@SuppressWarnings("unchecked")
public void filterField( Field f ) {
List<Annotation> anna;
for( Map.Entry<Class<?>,Integer> e: field_map.entrySet() ) {
Class<Annotation> klass = (Class<Annotation>) e.getKey();
int val = (int)(Integer)e.getValue();
anna = new ArrayList<Annotation>();
anna.addAll( Arrays.asList( getAnnotationsByType( f, klass ) ) );
anna.addAll( Arrays.asList( getDeclaredAnnotationsByType( f, klass ) ) );
if ( ! anna.isEmpty() ) {
field_array[ val ].add( f );
}
}
}
@SuppressWarnings("unchecked")
public void filterMethod( Method m ) {
List<Annotation> anna;
for( Map.Entry<Class<?>,Integer> e: method_map.entrySet() ) {
Class<Annotation> klass = (Class<Annotation>) e.getKey();
int val = (int)(Integer)e.getValue();
anna = new ArrayList<Annotation>();
anna.addAll( Arrays.asList( getAnnotationsByType( m, klass ) ) );
anna.addAll( Arrays.asList( getDeclaredAnnotationsByType( m, klass ) ) );
if ( ! anna.isEmpty() ) {
method_array[ val ].add( m );
}
}
}
private static Annotation[] getAnnotations( boolean declared, Object o, Class<?> klass ) {
final Annotation[] template = new Annotation[0];
ArrayList<Annotation> ala = new ArrayList<Annotation>();
Annotation[] a = template;
if ( o instanceof Field ) {
Field f = (Field)o;
a = declared ? f.getDeclaredAnnotations() : f.getAnnotations();
} else if ( o instanceof Method ) {
Method m = (Method)o;
a = declared ? m.getDeclaredAnnotations() : m.getAnnotations();
} else {
throw new IllegalArgumentException();
}
for( Annotation an: a ) {
if ( an.getClass() == klass ) {
ala.add( an );
}
}
return ala.toArray( template );
}
private static Annotation[] getAnnotationsByType( Object o, Class<?> klass ) {
return getAnnotations( false, o, klass );
}
private static Annotation[] getDeclaredAnnotationsByType( Object o, Class<?> klass ) {
return getAnnotations( true, o, klass );
}
public Set<Field> getParamFields() {
return field_array[ FIELD_PARAM ];
}
public Set<Method> getAfterExperimentMethods() {
return method_array[ METHOD_AFTER_EXPERIMENT ];
}
public Set<Method> getBeforeExperimentMethods() {
return method_array[ METHOD_BEFORE_EXPERIMENT ];
}
public Set<Method> getBenchmarkMethods() {
return method_array[ METHOD_BENCHMARK ];
}
public Set<Method> getAfterRepMethods() {
return method_array[ METHOD_AFTER_REP ];
}
public Set<Method> getBeforeRepMethods() {
return method_array[ METHOD_BEFORE_REP ];
}
public Set<Method> getMacrobenchmarkMethods() {
return method_array[ METHOD_MACROBENCHMARK ];
}
}
|
fixed for compatibility with jdk 7
|
src/org/sliderule/runner/AnnotatedClass.java
|
fixed for compatibility with jdk 7
|
|
Java
|
apache-2.0
|
82731af46089a0b7ab4654e6716b82276ac4a2ee
| 0
|
ctripcorp/dal,ctripcorp/dal,ctripcorp/dal
|
package com.ctrip.platform.dal.dao.configure;
import com.ctrip.platform.dal.dao.helper.ConnectionStringKeyHelper;
import org.apache.commons.lang.StringUtils;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class ConnectionStringParser {
private static ConnectionStringParser parser = null;
public synchronized static ConnectionStringParser getInstance() {
if (parser == null) {
parser = new ConnectionStringParser();
}
return parser;
}
private static final Pattern dburlPattern =
Pattern.compile("(data\\ssource|server|address|addr|network)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbuserPattern = Pattern.compile("(uid|user\\sid)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbpasswdPattern = Pattern.compile("(password|pwd)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbnamePattern =
Pattern.compile("(database|initial\\scatalog)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbcharsetPattern = Pattern.compile("(charset)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbportPattern = Pattern.compile("(port)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern versionPattern = Pattern.compile("(version)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final String PORT_SPLIT = ",";
public static final String MYSQL_URL_PREFIX="jdbc:mysql://";
public static final String SQLSERVER_URL_PREFIX="jdbc:sqlserver://";
public static final String REPLICATION_MYSQL_URL_PREFIX = "jdbc:mysql:replication://";
public static final String DBURL_SQLSERVER = SQLSERVER_URL_PREFIX+"%s:%s;DatabaseName=%s";
public static final String DBURL_MYSQL = MYSQL_URL_PREFIX+"%s:%s/%s?useUnicode=true&characterEncoding=%s";
public static final String DEFAULT_ENCODING = "UTF-8";
public static final String DEFAULT_PORT = "3306";
public static final String DRIVER_MYSQL = "com.mysql.jdbc.Driver";
public static final String DRIVER_SQLSERVRE = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
private static final Pattern hostPortPatternInMySQLURL = Pattern.compile("(jdbc:mysql://)([[^\\f\\n\\r\\t\\v=/]:]+):([^/]+)");
private static final Pattern complexHostPatternInMySQLURL = Pattern.compile("(\\(host|,host)=([^\\),]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern complexPortPatternInMySQLURL = Pattern.compile("(\\(port|,port)=([^\\),]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern hostPortPatternInSQLServerURL = Pattern.compile("(jdbc:sqlserver://)([\\S:]+):([^;]+)");
private static final Pattern urlReplacePatternInMySQLURL = Pattern.compile("jdbc:mysql://([^/]+)");
private static final Pattern urlReplacePatternInMySQLMgrURL = Pattern.compile("jdbc:mysql:replication://([^/]+)");
private static final Pattern urlReplacePatternInSQLServerURL = Pattern.compile("jdbc:sqlserver://([^;]+)");
/**
* parse "Data Source=127.0.0.1,28747;UID=sa;password=sa;database=test;"
*
* @return DataSourceConfigure
*/
public DalConnectionStringConfigure parse(String name, String connectionString) {
DataSourceConfigure config = new DataSourceConfigure();
if (connectionString == null)
return config;
String version = null;
Matcher matcher = versionPattern.matcher(connectionString);
if (matcher.find()) {
version = matcher.group(2);
}
String dbname = null;
matcher = dbnamePattern.matcher(connectionString);
if (matcher.find()) {
dbname = matcher.group(2);
}
String dbhost = null;
String url = null;
String charset = null;
String driverClass = null;
String port = null;
matcher = dburlPattern.matcher(connectionString);
boolean isSqlServer;
if (matcher.find()) {
String[] dburls = matcher.group(2).split(PORT_SPLIT);
dbhost = dburls[0];
if (dburls.length == 2) {// is sqlserver
isSqlServer = true;
port = dburls[1];
url = String.format(DBURL_SQLSERVER, dbhost, port, dbname);
} else {// should be mysql
isSqlServer = false;
matcher = dbcharsetPattern.matcher(connectionString);
if (matcher.find()) {
charset = matcher.group(2);
} else {
charset = DEFAULT_ENCODING;
}
matcher = dbportPattern.matcher(connectionString);
if (matcher.find()) {
port = matcher.group(2);
} else {
port = DEFAULT_PORT;
}
url = String.format(DBURL_MYSQL, dbhost, port, dbname, charset);
}
driverClass = isSqlServer ? DRIVER_SQLSERVRE : DRIVER_MYSQL;
} else {
throw new RuntimeException("The format of connection string is incorrect for " + name);
}
String userName = null;
matcher = dbuserPattern.matcher(connectionString);
if (matcher.find()) {
userName = matcher.group(2);
}
String password = null;
matcher = dbpasswdPattern.matcher(connectionString);
if (matcher.find()) {
password = matcher.group(2);
}
String keyName = ConnectionStringKeyHelper.getKeyName(name);
config.setName(keyName);
config.setConnectionUrl(url);
config.setUserName(userName != null ? userName : "");
config.setPassword(password != null ? password : "");
config.setDriverClass(driverClass);
config.setVersion(version);
config.setHostName(dbhost);
return config;
}
public static String replaceHostAndPort(String url, String newHost, String newPort) {
Matcher matcher = null;
if (url.toLowerCase().startsWith(MYSQL_URL_PREFIX)) {
matcher = urlReplacePatternInMySQLURL.matcher(url);
if (matcher.find())
url = matcher.replaceFirst(String.format("jdbc:mysql://%s:%s", newHost, newPort));
}
else if (url.toLowerCase().startsWith(REPLICATION_MYSQL_URL_PREFIX)) {
matcher = urlReplacePatternInMySQLMgrURL.matcher(url);
if (matcher.find()) {
url = matcher.replaceFirst(String.format("jdbc:mysql://%s:%s", newHost, newPort));
}
}
else if (url.toLowerCase().startsWith(SQLSERVER_URL_PREFIX)) {
matcher = urlReplacePatternInSQLServerURL.matcher(url);
if (matcher.find())
url = matcher.replaceFirst(String.format("jdbc:sqlserver://%s:%s", newHost, newPort));
}
return url;
}
public static HostAndPort parseHostPortFromURL(String url) {
if (StringUtils.isEmpty(url)) {
return new HostAndPort();
}
if (url.toLowerCase().startsWith(MYSQL_URL_PREFIX)) {
String host = null;
Integer port = null;
// jdbc:mysql://host:port/db
Matcher matcher = hostPortPatternInMySQLURL.matcher(url);
if (matcher.find()) {
host = matcher.group(2);
port = parseInt(matcher.group(3));
}
if (host == null && port == null) {
// jdbc:mysql://address=(host=host)(port=port)/db
// jdbc:mysql://(host=host,port=port)/db
matcher = complexHostPatternInMySQLURL.matcher(url);
if (matcher.find())
host = matcher.group(2);
matcher = complexPortPatternInMySQLURL.matcher(url);
if (matcher.find())
port = parseInt(matcher.group(2));
}
return new HostAndPort(url, host, port);
}
if (url.toLowerCase().startsWith(SQLSERVER_URL_PREFIX)) {
Matcher matcher = hostPortPatternInSQLServerURL.matcher(url);
if (matcher.find())
return new HostAndPort(url, matcher.group(2), parseInt(matcher.group(3)));
}
return new HostAndPort(url);
}
private static Integer parseInt(String str) {
try {
return Integer.parseInt(str);
} catch (Throwable t) {
return null;
}
}
}
|
dal-client/src/main/java/com/ctrip/platform/dal/dao/configure/ConnectionStringParser.java
|
package com.ctrip.platform.dal.dao.configure;
import com.ctrip.platform.dal.dao.helper.ConnectionStringKeyHelper;
import org.apache.commons.lang.StringUtils;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class ConnectionStringParser {
private static ConnectionStringParser parser = null;
public synchronized static ConnectionStringParser getInstance() {
if (parser == null) {
parser = new ConnectionStringParser();
}
return parser;
}
private static final Pattern dburlPattern =
Pattern.compile("(data\\ssource|server|address|addr|network)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbuserPattern = Pattern.compile("(uid|user\\sid)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbpasswdPattern = Pattern.compile("(password|pwd)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbnamePattern =
Pattern.compile("(database|initial\\scatalog)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbcharsetPattern = Pattern.compile("(charset)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern dbportPattern = Pattern.compile("(port)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern versionPattern = Pattern.compile("(version)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern connectTimeoutPattern = Pattern.compile("(connectTimeout)=([^;]+)", Pattern.CASE_INSENSITIVE);
private static final String PORT_SPLIT = ",";
public static final String MYSQL_URL_PREFIX="jdbc:mysql://";
public static final String SQLSERVER_URL_PREFIX="jdbc:sqlserver://";
public static final String REPLICATION_MYSQL_URL_PREFIX = "jdbc:mysql:replication://";
public static final String DBURL_SQLSERVER = SQLSERVER_URL_PREFIX+"%s:%s;DatabaseName=%s";
public static final String DBURL_MYSQL = MYSQL_URL_PREFIX+"%s:%s/%s?useUnicode=true&characterEncoding=%s";
public static final String DEFAULT_ENCODING = "UTF-8";
public static final String DEFAULT_PORT = "3306";
public static final String DRIVER_MYSQL = "com.mysql.jdbc.Driver";
public static final String DRIVER_SQLSERVRE = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
private static final Pattern hostPortPatternInMySQLURL = Pattern.compile("(jdbc:mysql://)([[^\\f\\n\\r\\t\\v=/]:]+):([^/]+)");
private static final Pattern complexHostPatternInMySQLURL = Pattern.compile("(\\(host|,host)=([^\\),]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern complexPortPatternInMySQLURL = Pattern.compile("(\\(port|,port)=([^\\),]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern hostPortPatternInSQLServerURL = Pattern.compile("(jdbc:sqlserver://)([\\S:]+):([^;]+)");
private static final Pattern urlReplacePatternInMySQLURL = Pattern.compile("jdbc:mysql://([^/]+)");
private static final Pattern urlReplacePatternInMySQLMgrURL = Pattern.compile("jdbc:mysql:replication://([^/]+)");
private static final Pattern urlReplacePatternInSQLServerURL = Pattern.compile("jdbc:sqlserver://([^;]+)");
/**
* parse "Data Source=127.0.0.1,28747;UID=sa;password=sa;database=test;"
*
* @return DataSourceConfigure
*/
public DalConnectionStringConfigure parse(String name, String connectionString) {
DataSourceConfigure config = new DataSourceConfigure();
if (connectionString == null)
return config;
String version = null;
Matcher matcher = versionPattern.matcher(connectionString);
if (matcher.find()) {
version = matcher.group(2);
}
String dbname = null;
matcher = dbnamePattern.matcher(connectionString);
if (matcher.find()) {
dbname = matcher.group(2);
}
String dbhost = null;
String url = null;
String charset = null;
String driverClass = null;
String port = null;
matcher = dburlPattern.matcher(connectionString);
boolean isSqlServer;
if (matcher.find()) {
String[] dburls = matcher.group(2).split(PORT_SPLIT);
dbhost = dburls[0];
if (dburls.length == 2) {// is sqlserver
isSqlServer = true;
port = dburls[1];
url = String.format(DBURL_SQLSERVER, dbhost, port, dbname);
} else {// should be mysql
isSqlServer = false;
matcher = dbcharsetPattern.matcher(connectionString);
if (matcher.find()) {
charset = matcher.group(2);
} else {
charset = DEFAULT_ENCODING;
}
matcher = dbportPattern.matcher(connectionString);
if (matcher.find()) {
port = matcher.group(2);
} else {
port = DEFAULT_PORT;
}
url = String.format(DBURL_MYSQL, dbhost, port, dbname, charset);
}
driverClass = isSqlServer ? DRIVER_SQLSERVRE : DRIVER_MYSQL;
} else {
throw new RuntimeException("The format of connection string is incorrect for " + name);
}
String userName = null;
matcher = dbuserPattern.matcher(connectionString);
if (matcher.find()) {
userName = matcher.group(2);
}
String password = null;
matcher = dbpasswdPattern.matcher(connectionString);
if (matcher.find()) {
password = matcher.group(2);
}
String keyName = ConnectionStringKeyHelper.getKeyName(name);
config.setName(keyName);
config.setConnectionUrl(url);
config.setUserName(userName != null ? userName : "");
config.setPassword(password != null ? password : "");
config.setDriverClass(driverClass);
config.setVersion(version);
config.setHostName(dbhost);
return config;
}
public static String replaceHostAndPort(String url, String newHost, String newPort) {
Matcher matcher = null;
if (url.toLowerCase().startsWith(MYSQL_URL_PREFIX)) {
matcher = urlReplacePatternInMySQLURL.matcher(url);
if (matcher.find())
url = matcher.replaceFirst(String.format("jdbc:mysql://%s:%s", newHost, newPort));
}
else if (url.toLowerCase().startsWith(REPLICATION_MYSQL_URL_PREFIX)) {
matcher = urlReplacePatternInMySQLMgrURL.matcher(url);
if (matcher.find()) {
url = matcher.replaceFirst(String.format("jdbc:mysql://%s:%s", newHost, newPort));
}
}
else if (url.toLowerCase().startsWith(SQLSERVER_URL_PREFIX)) {
matcher = urlReplacePatternInSQLServerURL.matcher(url);
if (matcher.find())
url = matcher.replaceFirst(String.format("jdbc:sqlserver://%s:%s", newHost, newPort));
}
return url;
}
public static HostAndPort parseHostPortFromURL(String url) {
if (StringUtils.isEmpty(url)) {
return new HostAndPort();
}
if (url.toLowerCase().startsWith(MYSQL_URL_PREFIX)) {
String host = null;
Integer port = null;
// jdbc:mysql://host:port/db
Matcher matcher = hostPortPatternInMySQLURL.matcher(url);
if (matcher.find()) {
host = matcher.group(2);
port = parseInt(matcher.group(3));
}
if (host == null && port == null) {
// jdbc:mysql://address=(host=host)(port=port)/db
// jdbc:mysql://(host=host,port=port)/db
matcher = complexHostPatternInMySQLURL.matcher(url);
if (matcher.find())
host = matcher.group(2);
matcher = complexPortPatternInMySQLURL.matcher(url);
if (matcher.find())
port = parseInt(matcher.group(2));
}
return new HostAndPort(url, host, port);
}
if (url.toLowerCase().startsWith(SQLSERVER_URL_PREFIX)) {
Matcher matcher = hostPortPatternInSQLServerURL.matcher(url);
if (matcher.find())
return new HostAndPort(url, matcher.group(2), parseInt(matcher.group(3)));
}
return new HostAndPort(url);
}
private static Integer parseInt(String str) {
try {
return Integer.parseInt(str);
} catch (Throwable t) {
return null;
}
}
}
|
remove useless connectTimeoutPattern
|
dal-client/src/main/java/com/ctrip/platform/dal/dao/configure/ConnectionStringParser.java
|
remove useless connectTimeoutPattern
|
|
Java
|
apache-2.0
|
6dfe73b548f59b4866af53f6823e1e1d1e0d4076
| 0
|
googleinterns/step126-2020,googleinterns/step126-2020,googleinterns/step126-2020,googleinterns/step126-2020,googleinterns/step126-2020
|
package com.google.sps.servlets;
import com.google.gson.Gson;
import com.google.sps.data.AssociationData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
/** Servlet to fetch association data with most positive and negative associations */
@WebServlet("/associations")
public class AssociationServlet extends HttpServlet {
private static final String OUTPUT_TYPE = "applications/json;";
private final ArrayList<String> positive = new ArrayList(Arrays.asList("hi", "test1", "test2"));
private final ArrayList<String> negative = new ArrayList(Arrays.asList("yeet", "hi", "think"));
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
response.setContentType(OUTPUT_TYPE);
AssociationData output = new AssociationData(positive, negative);
Gson gson = new Gson();
response.getWriter().println(gson.toJson(output));
}
}
|
capstone/src/main/java/com/google/sps/servlets/AssociationServlet.java
|
package com.google.sps.servlets;
<<<<<<< HEAD:capstone/src/main/java/com/google/sps/servlets/AssociationServlet.java
import com.google.gson.Gson;
import com.google.sps.data.AssociationData;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException;
/** Servlet to fetch association data with most positive and negative associations */
@WebServlet("/associations")
public class AssociationServlet extends HttpServlet {
private static final String OUTPUT_TYPE = "applications/json;";
private final ArrayList<String> positive = new ArrayList(Arrays.asList("hi", "test1", "test2"));
private final ArrayList<String> negative = new ArrayList(Arrays.asList("yeet", "hi", "think"));
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
response.setContentType(OUTPUT_TYPE);
AssociationData output = new AssociationData(positive, negative);
Gson gson = new Gson();
response.getWriter().println(gson.toJson(output));
}
}
|
Removes conflict indicators from servlet
|
capstone/src/main/java/com/google/sps/servlets/AssociationServlet.java
|
Removes conflict indicators from servlet
|
|
Java
|
apache-2.0
|
f9659b2a213ae36747e4963227b9cde3dfbfe688
| 0
|
sguilhen/wildfly-elytron,wildfly-security/wildfly-elytron,sguilhen/wildfly-elytron,wildfly-security/wildfly-elytron
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.ssl;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.Provider;
import java.security.Provider.Service;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import org.wildfly.common.Assert;
import org.wildfly.security.OneTimeSecurityFactory;
import org.wildfly.security.SecurityFactory;
import org.wildfly.security._private.ElytronMessages;
import org.wildfly.security.auth.server.SecurityIdentity;
/**
* SSL factories and utilities.
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
*/
public final class SSLUtils {
private SSLUtils() {}
private static final String serviceType = SSLContext.class.getSimpleName();
/**
* The key used to store the authenticated {@link SecurityIdentity} onto the {@link SSLSession}.
*/
public static final String SSL_SESSION_IDENTITY_KEY = "org.wildfly.security.ssl.identity";
/**
* Create an SSL context factory which locates the best context by searching the preferred providers in order using
* the rules established in the given protocol selector. If there are no matches, a factory is returned which
*
* @param protocolSelector the protocol selector
* @param providerSupplier the provider supplier
* @return the SSL context factory
*/
public static SecurityFactory<SSLContext> createSslContextFactory(ProtocolSelector protocolSelector, Supplier<Provider[]> providerSupplier) {
Provider[] providers = providerSupplier.get();
Map<String, Provider> preferredProviderByAlgorithm = new IdentityHashMap<>();
// compile all the providers that support SSLContext.
for (Provider provider : providers) {
Set<Service> services = provider.getServices();
if (services != null) {
for (Provider.Service service : services) {
if (serviceType.equals(service.getType())) {
String protocolName = service.getAlgorithm();
if (!preferredProviderByAlgorithm.containsKey(protocolName)) {
preferredProviderByAlgorithm.put(protocolName, provider);
}
}
}
}
}
// now figure out the supported protocol set.
String[] supportedProtocols = protocolSelector.evaluate(preferredProviderByAlgorithm.keySet().toArray(new String[preferredProviderByAlgorithm.size()]));
for (String supportedProtocol : supportedProtocols) {
Provider provider = preferredProviderByAlgorithm.get(supportedProtocol);
if (provider != null) {
return createSimpleSslContextFactory(supportedProtocol, provider);
}
}
return SSLUtils::throwIt;
}
private static SSLContext throwIt() throws NoSuchAlgorithmException {
throw ElytronMessages.log.noAlgorithmForSslProtocol();
}
/**
* Create a simple security factory for SSL contexts.
*
* @param protocol the protocol name
* @param provider the provider to use
* @return the SSL context factory
*/
public static SecurityFactory<SSLContext> createSimpleSslContextFactory(String protocol, Provider provider) {
return () -> SSLContext.getInstance(protocol, provider);
}
/**
* Create a configured SSL context from an outside SSL context.
*
* @param original the original SSL context
* @param sslConfigurator the SSL configurator
* @return the configured SSL context
*/
public static SSLContext createConfiguredSslContext(SSLContext original, final SSLConfigurator sslConfigurator) {
return new DelegatingSSLContext(new ConfiguredSSLContextSpi(original, sslConfigurator));
}
/**
* Create a configured SSL context factory from an outside SSL context. The returned factory will create new instances
* for every call, so it might be necessary to wrap with a {@link OneTimeSecurityFactory} instance.
*
* @param originalFactory the original SSL context factory
* @param sslConfigurator the SSL configurator
* @return the configured SSL context
*/
public static SecurityFactory<SSLContext> createConfiguredSslContextFactory(SecurityFactory<SSLContext> originalFactory, final SSLConfigurator sslConfigurator) {
return () -> createConfiguredSslContext(originalFactory.create(), sslConfigurator);
}
private static final SecurityFactory<X509TrustManager> DEFAULT_TRUST_MANAGER_SECURITY_FACTORY = new OneTimeSecurityFactory<>(() -> {
final TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init((KeyStore) null);
for (TrustManager trustManager : trustManagerFactory.getTrustManagers()) {
if (trustManager instanceof X509TrustManager) {
return (X509TrustManager) trustManager;
}
}
throw ElytronMessages.log.noDefaultTrustManager();
});
/**
* Get the platform's default X.509 trust manager security factory. The factory caches the instance.
*
* @return the security factory for the default trust manager
*/
public static SecurityFactory<X509TrustManager> getDefaultX509TrustManagerSecurityFactory() {
return DEFAULT_TRUST_MANAGER_SECURITY_FACTORY;
}
/**
* Get a server SSL engine which dispatches to the appropriate SSL context based on the SNI information in the
* SSL greeting.
*
* @param selector the context selector to use (cannot be {@code null})
* @return the SSL engine (not {@code null})
*/
public static SSLEngine createSNIDispatchingSSLEngine(SNIServerSSLContextSelector selector) {
Assert.checkNotNullParam("selector", selector);
return new SNIServerSSLEngine(selector);
}
/**
* Get a factory which produces SSL engines which dispatch to the appropriate SSL context based on the SNI information
* in the SSL greeting.
*
* @param selector the context selector to use (cannot be {@code null})
* @return the SSL engine factory (not {@code null})
*/
public static SecurityFactory<SSLEngine> createSNIDispatchingSSLEngineFactory(SNIServerSSLContextSelector selector) {
Assert.checkNotNullParam("selector", selector);
return () -> new SNIServerSSLEngine(selector);
}
}
|
src/main/java/org/wildfly/security/ssl/SSLUtils.java
|
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.security.ssl;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.Provider;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.function.Supplier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLSession;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import org.wildfly.common.Assert;
import org.wildfly.security.OneTimeSecurityFactory;
import org.wildfly.security.SecurityFactory;
import org.wildfly.security._private.ElytronMessages;
import org.wildfly.security.auth.server.SecurityIdentity;
/**
* SSL factories and utilities.
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
*/
public final class SSLUtils {
private SSLUtils() {}
private static final String serviceType = SSLContext.class.getSimpleName();
/**
* The key used to store the authenticated {@link SecurityIdentity} onto the {@link SSLSession}.
*/
public static final String SSL_SESSION_IDENTITY_KEY = "org.wildfly.security.ssl.identity";
/**
* Create an SSL context factory which locates the best context by searching the preferred providers in order using
* the rules established in the given protocol selector. If there are no matches, a factory is returned which
*
* @param protocolSelector the protocol selector
* @param providerSupplier the provider supplier
* @return the SSL context factory
*/
public static SecurityFactory<SSLContext> createSslContextFactory(ProtocolSelector protocolSelector, Supplier<Provider[]> providerSupplier) {
Provider[] providers = providerSupplier.get();
Map<String, Provider> preferredProviderByAlgorithm = new IdentityHashMap<>();
// compile all the providers that support SSLContext.
for (Provider provider : providers) {
for (Provider.Service service : provider.getServices()) {
if (serviceType.equals(service.getType())) {
String protocolName = service.getAlgorithm();
if (! preferredProviderByAlgorithm.containsKey(protocolName)) {
preferredProviderByAlgorithm.put(protocolName, provider);
}
}
}
}
// now figure out the supported protocol set.
String[] supportedProtocols = protocolSelector.evaluate(preferredProviderByAlgorithm.keySet().toArray(new String[preferredProviderByAlgorithm.size()]));
for (String supportedProtocol : supportedProtocols) {
Provider provider = preferredProviderByAlgorithm.get(supportedProtocol);
if (provider != null) {
return createSimpleSslContextFactory(supportedProtocol, provider);
}
}
return SSLUtils::throwIt;
}
private static SSLContext throwIt() throws NoSuchAlgorithmException {
throw ElytronMessages.log.noAlgorithmForSslProtocol();
}
/**
* Create a simple security factory for SSL contexts.
*
* @param protocol the protocol name
* @param provider the provider to use
* @return the SSL context factory
*/
public static SecurityFactory<SSLContext> createSimpleSslContextFactory(String protocol, Provider provider) {
return () -> SSLContext.getInstance(protocol, provider);
}
/**
* Create a configured SSL context from an outside SSL context.
*
* @param original the original SSL context
* @param sslConfigurator the SSL configurator
* @return the configured SSL context
*/
public static SSLContext createConfiguredSslContext(SSLContext original, final SSLConfigurator sslConfigurator) {
return new DelegatingSSLContext(new ConfiguredSSLContextSpi(original, sslConfigurator));
}
/**
* Create a configured SSL context factory from an outside SSL context. The returned factory will create new instances
* for every call, so it might be necessary to wrap with a {@link OneTimeSecurityFactory} instance.
*
* @param originalFactory the original SSL context factory
* @param sslConfigurator the SSL configurator
* @return the configured SSL context
*/
public static SecurityFactory<SSLContext> createConfiguredSslContextFactory(SecurityFactory<SSLContext> originalFactory, final SSLConfigurator sslConfigurator) {
return () -> createConfiguredSslContext(originalFactory.create(), sslConfigurator);
}
private static final SecurityFactory<X509TrustManager> DEFAULT_TRUST_MANAGER_SECURITY_FACTORY = new OneTimeSecurityFactory<>(() -> {
final TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init((KeyStore) null);
for (TrustManager trustManager : trustManagerFactory.getTrustManagers()) {
if (trustManager instanceof X509TrustManager) {
return (X509TrustManager) trustManager;
}
}
throw ElytronMessages.log.noDefaultTrustManager();
});
/**
* Get the platform's default X.509 trust manager security factory. The factory caches the instance.
*
* @return the security factory for the default trust manager
*/
public static SecurityFactory<X509TrustManager> getDefaultX509TrustManagerSecurityFactory() {
return DEFAULT_TRUST_MANAGER_SECURITY_FACTORY;
}
/**
* Get a server SSL engine which dispatches to the appropriate SSL context based on the SNI information in the
* SSL greeting.
*
* @param selector the context selector to use (cannot be {@code null})
* @return the SSL engine (not {@code null})
*/
public static SSLEngine createSNIDispatchingSSLEngine(SNIServerSSLContextSelector selector) {
Assert.checkNotNullParam("selector", selector);
return new SNIServerSSLEngine(selector);
}
/**
* Get a factory which produces SSL engines which dispatch to the appropriate SSL context based on the SNI information
* in the SSL greeting.
*
* @param selector the context selector to use (cannot be {@code null})
* @return the SSL engine factory (not {@code null})
*/
public static SecurityFactory<SSLEngine> createSNIDispatchingSSLEngineFactory(SNIServerSSLContextSelector selector) {
Assert.checkNotNullParam("selector", selector);
return () -> new SNIServerSSLEngine(selector);
}
}
|
[ELY-424] Add a null check for the Set of Services returned from the Provider.
|
src/main/java/org/wildfly/security/ssl/SSLUtils.java
|
[ELY-424] Add a null check for the Set of Services returned from the Provider.
|
|
Java
|
apache-2.0
|
323b8a19db20c00a7a4e583207690a112172fa66
| 0
|
SergeyTravin/pentaho-kettle,sajeetharan/pentaho-kettle,mdamour1976/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,SergeyTravin/pentaho-kettle,yshakhau/pentaho-kettle,graimundo/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,drndos/pentaho-kettle,dkincade/pentaho-kettle,ma459006574/pentaho-kettle,aminmkhan/pentaho-kettle,ddiroma/pentaho-kettle,skofra0/pentaho-kettle,aminmkhan/pentaho-kettle,drndos/pentaho-kettle,pentaho/pentaho-kettle,YuryBY/pentaho-kettle,skofra0/pentaho-kettle,GauravAshara/pentaho-kettle,birdtsai/pentaho-kettle,DFieldFL/pentaho-kettle,mbatchelor/pentaho-kettle,emartin-pentaho/pentaho-kettle,HiromuHota/pentaho-kettle,sajeetharan/pentaho-kettle,dkincade/pentaho-kettle,sajeetharan/pentaho-kettle,zlcnju/kettle,e-cuellar/pentaho-kettle,matrix-stone/pentaho-kettle,marcoslarsen/pentaho-kettle,yshakhau/pentaho-kettle,cjsonger/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,kurtwalker/pentaho-kettle,pminutillo/pentaho-kettle,tmcsantos/pentaho-kettle,mdamour1976/pentaho-kettle,zlcnju/kettle,YuryBY/pentaho-kettle,matthewtckr/pentaho-kettle,pentaho/pentaho-kettle,wseyler/pentaho-kettle,emartin-pentaho/pentaho-kettle,pentaho/pentaho-kettle,alina-ipatina/pentaho-kettle,Advent51/pentaho-kettle,pminutillo/pentaho-kettle,e-cuellar/pentaho-kettle,brosander/pentaho-kettle,nicoben/pentaho-kettle,yshakhau/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,jbrant/pentaho-kettle,gretchiemoran/pentaho-kettle,mkambol/pentaho-kettle,mattyb149/pentaho-kettle,birdtsai/pentaho-kettle,stepanovdg/pentaho-kettle,DFieldFL/pentaho-kettle,ivanpogodin/pentaho-kettle,tkafalas/pentaho-kettle,drndos/pentaho-kettle,eayoungs/pentaho-kettle,nantunes/pentaho-kettle,pavel-sakun/pentaho-kettle,graimundo/pentaho-kettle,eayoungs/pentaho-kettle,EcoleKeine/pentaho-kettle,ma459006574/pentaho-kettle,pentaho/pentaho-kettle,skofra0/pentaho-kettle,ivanpogodin/pentaho-kettle,graimundo/pentaho-kettle,DFieldFL/pentaho-kettle,pymjer/pentaho-kettle,pavel-sakun/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,pedrofvteixeira/pentaho-kettle,mdamour1976/pentaho-kettle,rmansoor/pentaho-kettle,brosander/pentaho-kettle,EcoleKeine/pentaho-kettle,nantunes/pentaho-kettle,SergeyTravin/pentaho-kettle,brosander/pentaho-kettle,matthewtckr/pentaho-kettle,lgrill-pentaho/pentaho-kettle,denisprotopopov/pentaho-kettle,stepanovdg/pentaho-kettle,ccaspanello/pentaho-kettle,jbrant/pentaho-kettle,emartin-pentaho/pentaho-kettle,CapeSepias/pentaho-kettle,tmcsantos/pentaho-kettle,GauravAshara/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,ccaspanello/pentaho-kettle,flbrino/pentaho-kettle,matrix-stone/pentaho-kettle,jbrant/pentaho-kettle,alina-ipatina/pentaho-kettle,stepanovdg/pentaho-kettle,graimundo/pentaho-kettle,akhayrutdinov/pentaho-kettle,CapeSepias/pentaho-kettle,HiromuHota/pentaho-kettle,pminutillo/pentaho-kettle,mkambol/pentaho-kettle,bmorrise/pentaho-kettle,mkambol/pentaho-kettle,stepanovdg/pentaho-kettle,Advent51/pentaho-kettle,marcoslarsen/pentaho-kettle,cjsonger/pentaho-kettle,ddiroma/pentaho-kettle,rmansoor/pentaho-kettle,ccaspanello/pentaho-kettle,MikhailHubanau/pentaho-kettle,pymjer/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,eayoungs/pentaho-kettle,hudak/pentaho-kettle,andrei-viaryshka/pentaho-kettle,bmorrise/pentaho-kettle,kurtwalker/pentaho-kettle,lgrill-pentaho/pentaho-kettle,marcoslarsen/pentaho-kettle,nanata1115/pentaho-kettle,bmorrise/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,ivanpogodin/pentaho-kettle,eayoungs/pentaho-kettle,dkincade/pentaho-kettle,lgrill-pentaho/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,stevewillcock/pentaho-kettle,kurtwalker/pentaho-kettle,cjsonger/pentaho-kettle,emartin-pentaho/pentaho-kettle,flbrino/pentaho-kettle,denisprotopopov/pentaho-kettle,birdtsai/pentaho-kettle,birdtsai/pentaho-kettle,YuryBY/pentaho-kettle,nantunes/pentaho-kettle,pedrofvteixeira/pentaho-kettle,roboguy/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,tkafalas/pentaho-kettle,nicoben/pentaho-kettle,ma459006574/pentaho-kettle,DFieldFL/pentaho-kettle,gretchiemoran/pentaho-kettle,CapeSepias/pentaho-kettle,pymjer/pentaho-kettle,codek/pentaho-kettle,denisprotopopov/pentaho-kettle,mattyb149/pentaho-kettle,pavel-sakun/pentaho-kettle,matthewtckr/pentaho-kettle,tmcsantos/pentaho-kettle,airy-ict/pentaho-kettle,ivanpogodin/pentaho-kettle,EcoleKeine/pentaho-kettle,e-cuellar/pentaho-kettle,YuryBY/pentaho-kettle,ViswesvarSekar/pentaho-kettle,codek/pentaho-kettle,mbatchelor/pentaho-kettle,flbrino/pentaho-kettle,jbrant/pentaho-kettle,wseyler/pentaho-kettle,EcoleKeine/pentaho-kettle,akhayrutdinov/pentaho-kettle,lgrill-pentaho/pentaho-kettle,denisprotopopov/pentaho-kettle,rmansoor/pentaho-kettle,CapeSepias/pentaho-kettle,HiromuHota/pentaho-kettle,mbatchelor/pentaho-kettle,airy-ict/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,roboguy/pentaho-kettle,ddiroma/pentaho-kettle,mkambol/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,hudak/pentaho-kettle,mbatchelor/pentaho-kettle,hudak/pentaho-kettle,ccaspanello/pentaho-kettle,sajeetharan/pentaho-kettle,bmorrise/pentaho-kettle,nantunes/pentaho-kettle,tkafalas/pentaho-kettle,HiromuHota/pentaho-kettle,ma459006574/pentaho-kettle,pavel-sakun/pentaho-kettle,pedrofvteixeira/pentaho-kettle,marcoslarsen/pentaho-kettle,airy-ict/pentaho-kettle,MikhailHubanau/pentaho-kettle,roboguy/pentaho-kettle,cjsonger/pentaho-kettle,dkincade/pentaho-kettle,skofra0/pentaho-kettle,rmansoor/pentaho-kettle,mdamour1976/pentaho-kettle,airy-ict/pentaho-kettle,brosander/pentaho-kettle,andrei-viaryshka/pentaho-kettle,mattyb149/pentaho-kettle,yshakhau/pentaho-kettle,ViswesvarSekar/pentaho-kettle,drndos/pentaho-kettle,gretchiemoran/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,GauravAshara/pentaho-kettle,akhayrutdinov/pentaho-kettle,matrix-stone/pentaho-kettle,matrix-stone/pentaho-kettle,wseyler/pentaho-kettle,alina-ipatina/pentaho-kettle,zlcnju/kettle,andrei-viaryshka/pentaho-kettle,ViswesvarSekar/pentaho-kettle,ddiroma/pentaho-kettle,hudak/pentaho-kettle,akhayrutdinov/pentaho-kettle,pymjer/pentaho-kettle,nicoben/pentaho-kettle,nanata1115/pentaho-kettle,mattyb149/pentaho-kettle,pedrofvteixeira/pentaho-kettle,ViswesvarSekar/pentaho-kettle,gretchiemoran/pentaho-kettle,stevewillcock/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,codek/pentaho-kettle,e-cuellar/pentaho-kettle,roboguy/pentaho-kettle,pminutillo/pentaho-kettle,kurtwalker/pentaho-kettle,zlcnju/kettle,Advent51/pentaho-kettle,stevewillcock/pentaho-kettle,aminmkhan/pentaho-kettle,nanata1115/pentaho-kettle,matthewtckr/pentaho-kettle,nicoben/pentaho-kettle,MikhailHubanau/pentaho-kettle,codek/pentaho-kettle,nanata1115/pentaho-kettle,SergeyTravin/pentaho-kettle,GauravAshara/pentaho-kettle,tkafalas/pentaho-kettle,wseyler/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,aminmkhan/pentaho-kettle,tmcsantos/pentaho-kettle,alina-ipatina/pentaho-kettle,stevewillcock/pentaho-kettle,flbrino/pentaho-kettle,Advent51/pentaho-kettle
|
//CHECKSTYLE:FileLength:OFF
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.spoon;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.UIManager;
import javax.swing.plaf.metal.MetalLookAndFeel;
import org.apache.commons.vfs.FileObject;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.dialogs.MessageDialogWithToggle;
import org.eclipse.jface.window.ApplicationWindow;
import org.eclipse.jface.window.DefaultToolTip;
import org.eclipse.jface.window.ToolTip;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.browser.LocationEvent;
import org.eclipse.swt.browser.LocationListener;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.DropTarget;
import org.eclipse.swt.dnd.DropTargetEvent;
import org.eclipse.swt.dnd.DropTargetListener;
import org.eclipse.swt.dnd.FileTransfer;
import org.eclipse.swt.dnd.Transfer;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.MenuDetectEvent;
import org.eclipse.swt.events.MenuDetectListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.TreeAdapter;
import org.eclipse.swt.events.TreeEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.graphics.DeviceData;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.printing.Printer;
import org.eclipse.swt.program.Program;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Sash;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.ToolBar;
import org.eclipse.swt.widgets.ToolItem;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.pentaho.di.cluster.ClusterSchema;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.AddUndoPositionInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.DBCache;
import org.pentaho.di.core.EngineMetaInterface;
import org.pentaho.di.core.JndiUtil;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.LastUsedFile;
import org.pentaho.di.core.NotePadMeta;
import org.pentaho.di.core.ObjectUsageCount;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.SourceToTargetMapping;
import org.pentaho.di.core.changed.ChangedFlagInterface;
import org.pentaho.di.core.changed.PDIObserver;
import org.pentaho.di.core.clipboard.ImageDataTransfer;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleAuthException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleMissingPluginsException;
import org.pentaho.di.core.exception.KettleRowException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.gui.GUIFactory;
import org.pentaho.di.core.gui.OverwritePrompter;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.gui.SpoonFactory;
import org.pentaho.di.core.gui.SpoonInterface;
import org.pentaho.di.core.gui.UndoInterface;
import org.pentaho.di.core.lifecycle.LifeEventHandler;
import org.pentaho.di.core.lifecycle.LifeEventInfo;
import org.pentaho.di.core.lifecycle.LifecycleException;
import org.pentaho.di.core.lifecycle.LifecycleSupport;
import org.pentaho.di.core.logging.DefaultLogLevel;
import org.pentaho.di.core.logging.FileLoggingEventListener;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.logging.LoggingObjectInterface;
import org.pentaho.di.core.logging.LoggingObjectType;
import org.pentaho.di.core.logging.SimpleLoggingObject;
import org.pentaho.di.core.parameters.NamedParams;
import org.pentaho.di.core.plugins.JobEntryPluginType;
import org.pentaho.di.core.plugins.LifecyclePluginType;
import org.pentaho.di.core.plugins.PartitionerPluginType;
import org.pentaho.di.core.plugins.PluginFolder;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.PluginTypeInterface;
import org.pentaho.di.core.plugins.PluginTypeListener;
import org.pentaho.di.core.plugins.RepositoryPluginType;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.reflection.StringSearchResult;
import org.pentaho.di.core.row.RowBuffer;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.undo.TransAction;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.imp.ImportRules;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.job.JobEntryJob;
import org.pentaho.di.job.entries.trans.JobEntryTrans;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.job.entry.JobEntryDialogInterface;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.laf.BasePropertyHandler;
import org.pentaho.di.metastore.MetaStoreConst;
import org.pentaho.di.pan.CommandLineOption;
import org.pentaho.di.partition.PartitionSchema;
import org.pentaho.di.pkg.JarfileGenerator;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.RepositoriesMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryCapabilities;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryElementInterface;
import org.pentaho.di.repository.RepositoryMeta;
import org.pentaho.di.repository.RepositoryObjectType;
import org.pentaho.di.repository.RepositoryOperation;
import org.pentaho.di.repository.RepositorySecurityManager;
import org.pentaho.di.repository.RepositorySecurityProvider;
import org.pentaho.di.resource.ResourceExportInterface;
import org.pentaho.di.resource.ResourceUtil;
import org.pentaho.di.resource.TopLevelResource;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.shared.SharedObjects;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.HasDatabasesInterface;
import org.pentaho.di.trans.HasSlaveServersInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.RowDistributionInterface;
import org.pentaho.di.trans.step.RowDistributionPluginType;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepErrorMeta;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.step.StepPartitioningMeta;
import org.pentaho.di.trans.steps.selectvalues.SelectValuesMeta;
import org.pentaho.di.ui.cluster.dialog.ClusterSchemaDialog;
import org.pentaho.di.ui.cluster.dialog.SlaveServerDialog;
import org.pentaho.di.ui.core.ConstUI;
import org.pentaho.di.ui.core.PrintSpool;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.auth.AuthProviderDialog;
import org.pentaho.di.ui.core.database.wizard.CreateDatabaseWizard;
import org.pentaho.di.ui.core.dialog.CheckResultDialog;
import org.pentaho.di.ui.core.dialog.EnterMappingDialog;
import org.pentaho.di.ui.core.dialog.EnterOptionsDialog;
import org.pentaho.di.ui.core.dialog.EnterSearchDialog;
import org.pentaho.di.ui.core.dialog.EnterSelectionDialog;
import org.pentaho.di.ui.core.dialog.EnterStringsDialog;
import org.pentaho.di.ui.core.dialog.EnterTextDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.dialog.KettlePropertiesFileDialog;
import org.pentaho.di.ui.core.dialog.PopupOverwritePrompter;
import org.pentaho.di.ui.core.dialog.PreviewRowsDialog;
import org.pentaho.di.ui.core.dialog.ShowBrowserDialog;
import org.pentaho.di.ui.core.dialog.ShowMessageDialog;
import org.pentaho.di.ui.core.dialog.Splash;
import org.pentaho.di.ui.core.dialog.SubjectDataBrowserDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.OsHelper;
import org.pentaho.di.ui.core.widget.TreeMemory;
import org.pentaho.di.ui.imp.ImportRulesDialog;
import org.pentaho.di.ui.job.dialog.JobDialogPluginType;
import org.pentaho.di.ui.job.dialog.JobLoadProgressDialog;
import org.pentaho.di.ui.partition.dialog.PartitionSchemaDialog;
import org.pentaho.di.ui.repository.ILoginCallback;
import org.pentaho.di.ui.repository.RepositoriesDialog;
import org.pentaho.di.ui.repository.RepositorySecurityUI;
import org.pentaho.di.ui.repository.dialog.RepositoryDialogInterface;
import org.pentaho.di.ui.repository.dialog.RepositoryExportProgressDialog;
import org.pentaho.di.ui.repository.dialog.RepositoryImportProgressDialog;
import org.pentaho.di.ui.repository.dialog.RepositoryRevisionBrowserDialogInterface;
import org.pentaho.di.ui.repository.dialog.SelectDirectoryDialog;
import org.pentaho.di.ui.repository.dialog.SelectObjectDialog;
import org.pentaho.di.ui.repository.repositoryexplorer.RepositoryExplorer;
import org.pentaho.di.ui.repository.repositoryexplorer.RepositoryExplorerCallback;
import org.pentaho.di.ui.repository.repositoryexplorer.UISupportRegistery;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIRepositoryContent;
import org.pentaho.di.ui.repository.repositoryexplorer.uisupport.BaseRepositoryExplorerUISupport;
import org.pentaho.di.ui.repository.repositoryexplorer.uisupport.ManageUserUISupport;
import org.pentaho.di.ui.spoon.SpoonLifecycleListener.SpoonLifeCycleEvent;
import org.pentaho.di.ui.spoon.TabMapEntry.ObjectType;
import org.pentaho.di.ui.spoon.delegates.SpoonDelegates;
import org.pentaho.di.ui.spoon.dialog.AnalyseImpactProgressDialog;
import org.pentaho.di.ui.spoon.dialog.CheckTransProgressDialog;
import org.pentaho.di.ui.spoon.dialog.LogSettingsDialog;
import org.pentaho.di.ui.spoon.dialog.MetaStoreExplorerDialog;
import org.pentaho.di.ui.spoon.dialog.SaveProgressDialog;
import org.pentaho.di.ui.spoon.dialog.TipsDialog;
import org.pentaho.di.ui.spoon.job.JobGraph;
import org.pentaho.di.ui.spoon.trans.TransGraph;
import org.pentaho.di.ui.spoon.wizards.CopyTableWizardPage1;
import org.pentaho.di.ui.spoon.wizards.CopyTableWizardPage2;
import org.pentaho.di.ui.trans.dialog.TransDialogPluginType;
import org.pentaho.di.ui.trans.dialog.TransHopDialog;
import org.pentaho.di.ui.trans.dialog.TransLoadProgressDialog;
import org.pentaho.di.ui.util.HelpUtils;
import org.pentaho.di.ui.util.ThreadGuiResources;
import org.pentaho.di.ui.xul.KettleXulLoader;
import org.pentaho.di.version.BuildVersion;
import org.pentaho.metastore.api.IMetaStore;
import org.pentaho.metastore.api.exceptions.MetaStoreException;
import org.pentaho.metastore.stores.delegate.DelegatingMetaStore;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.XulEventSource;
import org.pentaho.ui.xul.binding.BindingFactory;
import org.pentaho.ui.xul.binding.DefaultBindingFactory;
import org.pentaho.ui.xul.components.WaitBoxRunnable;
import org.pentaho.ui.xul.components.XulMenuitem;
import org.pentaho.ui.xul.components.XulMenuseparator;
import org.pentaho.ui.xul.components.XulToolbarbutton;
import org.pentaho.ui.xul.components.XulWaitBox;
import org.pentaho.ui.xul.containers.XulMenupopup;
import org.pentaho.ui.xul.containers.XulToolbar;
import org.pentaho.ui.xul.impl.XulEventHandler;
import org.pentaho.ui.xul.jface.tags.ApplicationWindowLocal;
import org.pentaho.ui.xul.jface.tags.JfaceMenuitem;
import org.pentaho.ui.xul.jface.tags.JfaceMenupopup;
import org.pentaho.ui.xul.swt.SwtXulLoader;
import org.pentaho.ui.xul.swt.tags.SwtDeck;
import org.pentaho.vfs.ui.VfsFileChooserDialog;
import org.pentaho.xul.swt.tab.TabItem;
import org.pentaho.xul.swt.tab.TabListener;
import org.pentaho.xul.swt.tab.TabSet;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
/**
* This class handles the main window of the Spoon graphical transformation editor.
*
* @author Matt
* @since 16-may-2003, i18n at 07-Feb-2006, redesign 01-Dec-2006
*/
public class Spoon extends ApplicationWindow implements AddUndoPositionInterface, TabListener, SpoonInterface,
OverwritePrompter, PDIObserver, LifeEventHandler, XulEventSource, XulEventHandler {
private static Class<?> PKG = Spoon.class;
public static final LoggingObjectInterface loggingObject = new SimpleLoggingObject( "Spoon", LoggingObjectType.SPOON,
null );
public static final String STRING_TRANSFORMATIONS = BaseMessages.getString( PKG, "Spoon.STRING_TRANSFORMATIONS" );
public static final String STRING_JOBS = BaseMessages.getString( PKG, "Spoon.STRING_JOBS" );
public static final String STRING_BUILDING_BLOCKS = BaseMessages.getString( PKG, "Spoon.STRING_BUILDING_BLOCKS" );
public static final String STRING_ELEMENTS = BaseMessages.getString( PKG, "Spoon.STRING_ELEMENTS" );
public static final String STRING_CONNECTIONS = BaseMessages.getString( PKG, "Spoon.STRING_CONNECTIONS" );
public static final String STRING_STEPS = BaseMessages.getString( PKG, "Spoon.STRING_STEPS" );
public static final String STRING_JOB_ENTRIES = BaseMessages.getString( PKG, "Spoon.STRING_JOB_ENTRIES" );
public static final String STRING_HOPS = BaseMessages.getString( PKG, "Spoon.STRING_HOPS" );
public static final String STRING_PARTITIONS = BaseMessages.getString( PKG, "Spoon.STRING_PARTITIONS" );
public static final String STRING_SLAVES = BaseMessages.getString( PKG, "Spoon.STRING_SLAVES" );
public static final String STRING_CLUSTERS = BaseMessages.getString( PKG, "Spoon.STRING_CLUSTERS" );
public static final String STRING_TRANS_BASE = BaseMessages.getString( PKG, "Spoon.STRING_BASE" );
public static final String STRING_HISTORY = BaseMessages.getString( PKG, "Spoon.STRING_HISTORY" );
public static final String STRING_TRANS_NO_NAME = BaseMessages.getString( PKG, "Spoon.STRING_TRANS_NO_NAME" );
public static final String STRING_JOB_NO_NAME = BaseMessages.getString( PKG, "Spoon.STRING_JOB_NO_NAME" );
public static final String STRING_TRANSFORMATION = BaseMessages.getString( PKG, "Spoon.STRING_TRANSFORMATION" );
public static final String STRING_JOB = BaseMessages.getString( PKG, "Spoon.STRING_JOB" );
private static final String SYNC_TRANS = "sync_trans_name_to_file_name";
public static final String APP_NAME = BaseMessages.getString( PKG, "Spoon.Application.Name" );
private static final String STRING_SPOON_MAIN_TREE = BaseMessages.getString( PKG, "Spoon.MainTree.Label" );
private static final String STRING_SPOON_CORE_OBJECTS_TREE = BaseMessages
.getString( PKG, "Spoon.CoreObjectsTree.Label" );
public static final String XML_TAG_TRANSFORMATION_STEPS = "transformation-steps";
public static final String XML_TAG_JOB_JOB_ENTRIES = "job-jobentries";
private static final String XML_TAG_STEPS = "steps";
public static final int MESSAGE_DIALOG_WITH_TOGGLE_YES_BUTTON_ID = 256;
public static final int MESSAGE_DIALOG_WITH_TOGGLE_NO_BUTTON_ID = 257;
public static final int MESSAGE_DIALOG_WITH_TOGGLE_CUSTOM_DISTRIBUTION_BUTTON_ID = 258;
private static Spoon staticSpoon;
private static LogChannelInterface log;
private Display display;
private Shell shell;
private static Splash splash;
private static FileLoggingEventListener fileLoggingEventListener;
private boolean destroy;
private SashForm sashform;
public TabSet tabfolder;
// THE HANDLERS
public SpoonDelegates delegates = new SpoonDelegates( this );
public RowMetaAndData variables = new RowMetaAndData( new RowMeta() );
/**
* These are the arguments that were given at Spoon launch time...
*/
private String[] arguments;
private boolean stopped;
private Cursor cursor_hourglass, cursor_hand;
public PropsUI props;
public Repository rep;
// private RepositorySecurityManager securityManager;
public RepositoryCapabilities capabilities;
// Save the last directory saved to for new files
// TODO: Save the last saved position to the defaultSaveLocation
private RepositoryDirectoryInterface defaultSaveLocation = null;
// Associate the defaultSaveLocation with a given repository; We should clear this out on a repo change
private Repository defaultSaveLocationRepository = null;
private CTabItem view, design;
private Label selectionLabel;
public Text selectionFilter;
private org.eclipse.swt.widgets.Menu fileMenus;
private static final String APP_TITLE = APP_NAME;
private static final String STRING_WELCOME_TAB_NAME = BaseMessages.getString( PKG, "Spoon.Title.STRING_WELCOME" );
private static final String STRING_DOCUMENT_TAB_NAME = BaseMessages.getString( PKG, "Spoon.Documentation" );
// "docs/English/welcome/index.html";
private static final String FILE_WELCOME_PAGE = Const
.safeAppendDirectory( BasePropertyHandler.getProperty( "documentationDirBase", "docs/" ),
BaseMessages.getString( PKG, "Spoon.Title.STRING_DOCUMENT_WELCOME" ) );
// "docs/English/welcome/index.html";
private static final String FILE_DOCUMENT_MAP = Const
.safeAppendDirectory( BasePropertyHandler.getProperty( "documentationDirBase", "docs/" ),
BaseMessages.getString( PKG, "Spoon.Title.STRING_DOCUMENT_MAP" ) );
private static final String UNDO_MENU_ITEM = "edit-undo";
private static final String REDO_MENU_ITEM = "edit-redo";
// "Undo : not available \tCTRL-Z"
private static final String UNDO_UNAVAILABLE = BaseMessages.getString( PKG, "Spoon.Menu.Undo.NotAvailable" );
// "Redo : not available \tCTRL-Y"
private static final String REDO_UNAVAILABLE = BaseMessages.getString( PKG, "Spoon.Menu.Redo.NotAvailable" );
private Composite tabComp;
private Tree selectionTree;
private Tree coreObjectsTree;
private TransExecutionConfiguration transExecutionConfiguration;
private TransExecutionConfiguration transPreviewExecutionConfiguration;
private TransExecutionConfiguration transDebugExecutionConfiguration;
private JobExecutionConfiguration jobExecutionConfiguration;
// private Menu spoonMenu; // Connections,
private int coreObjectsState = STATE_CORE_OBJECTS_NONE;
protected Map<String, FileListener> fileExtensionMap = new HashMap<String, FileListener>();
private List<Object[]> menuListeners = new ArrayList<Object[]>();
// loads the lifecycle listeners
private LifecycleSupport lifecycleSupport = new LifecycleSupport();
private Composite mainComposite;
private boolean viewSelected;
private boolean designSelected;
private Composite variableComposite;
private Map<String, String> coreStepToolTipMap;
private Map<String, String> coreJobToolTipMap;
private DefaultToolTip toolTip;
public Map<String, SharedObjects> sharedObjectsFileMap;
/**
* We can use this to set a default filter path in the open and save dialogs
*/
public String lastDirOpened;
private List<FileListener> fileListeners = new ArrayList<FileListener>();
private XulDomContainer mainSpoonContainer;
// Menu controllers to modify the main spoon menu
private List<ISpoonMenuController> menuControllers = new ArrayList<ISpoonMenuController>();
private XulToolbar mainToolbar;
private SwtDeck deck;
public static final String XUL_FILE_MAIN = "ui/spoon.xul";
private Map<String, XulComponent> menuMap = new HashMap<String, XulComponent>();
private RepositoriesDialog loginDialog;
private VfsFileChooserDialog vfsFileChooserDialog;
// the id of the perspective to start in, if any
protected String startupPerspective = null;
private CommandLineOption[] commandLineOptions;
public DelegatingMetaStore metaStore;
/**
* This is the main procedure for Spoon.
*
* @param a
* Arguments are available in the "Get System Info" step.
*/
public static void main( String[] a ) throws KettleException {
ExecutorService executor = Executors.newCachedThreadPool();
Future<KettleException> pluginRegistryFuture = executor.submit( new Callable<KettleException>() {
@Override
public KettleException call() throws Exception {
registerUIPluginObjectTypes();
try {
KettleEnvironment.init();
} catch ( KettleException e ) {
return e;
}
KettleClientEnvironment.getInstance().setClient( KettleClientEnvironment.ClientType.SPOON );
return null;
}
} );
try {
OsHelper.setAppName();
// Bootstrap Kettle
//
Display display;
if ( System.getProperties().containsKey( "SLEAK" ) ) {
DeviceData data = new DeviceData();
data.tracking = true;
display = new Display( data );
Sleak sleak = new Sleak();
Shell sleakShell = new Shell( display );
sleakShell.setText( "S-Leak" );
org.eclipse.swt.graphics.Point size = sleakShell.getSize();
sleakShell.setSize( size.x / 2, size.y / 2 );
sleak.create( sleakShell );
sleakShell.open();
} else {
display = new Display();
}
// Note: this needs to be done before the look and feel is set
OsHelper.initOsHandlers( display );
UIManager.setLookAndFeel( new MetalLookAndFeel() );
// The core plugin types don't know about UI classes. Add them in now
// before the PluginRegistry is inited.
splash = new Splash( display );
List<String> args = new ArrayList<String>( Arrays.asList( a ) );
CommandLineOption[] commandLineOptions = getCommandLineArgs( args );
KettleException registryException = pluginRegistryFuture.get();
if ( registryException != null ) {
throw registryException;
}
PropsUI.init( display, Props.TYPE_PROPERTIES_SPOON );
KettleLogStore
.init( PropsUI.getInstance().getMaxNrLinesInLog(), PropsUI.getInstance().getMaxLogLineTimeoutMinutes() );
initLogging( commandLineOptions );
// remember...
staticSpoon = new Spoon();
staticSpoon.commandLineOptions = commandLineOptions;
// pull the startup perspective id from the command line options and hand it to Spoon
String pId;
StringBuffer perspectiveIdBuff = Spoon.getCommandLineOption( commandLineOptions, "perspective" ).getArgument();
pId = perspectiveIdBuff.toString();
if ( !Const.isEmpty( pId ) ) {
Spoon.staticSpoon.startupPerspective = pId;
}
SpoonFactory.setSpoonInstance( staticSpoon );
staticSpoon.setDestroy( true );
GUIFactory.setThreadDialogs( new ThreadGuiResources() );
staticSpoon.setArguments( args.toArray( new String[ args.size() ] ) );
staticSpoon.start();
} catch ( Throwable t ) {
// avoid calls to Messages i18n method getString() in this block
// We do this to (hopefully) also catch Out of Memory Exceptions
//
t.printStackTrace();
if ( staticSpoon != null ) {
log.logError( "Fatal error : " + Const.NVL( t.toString(), Const.NVL( t.getMessage(), "Unknown error" ) ) );
log.logError( Const.getStackTracker( t ) );
}
}
// Kill all remaining things in this VM!
System.exit( 0 );
}
private static void initLogging( CommandLineOption[] options ) throws KettleException {
StringBuffer optionLogFile = getCommandLineOption( options, "logfile" ).getArgument();
StringBuffer optionLogLevel = getCommandLineOption( options, "level" ).getArgument();
// Set default Locale:
Locale.setDefault( Const.DEFAULT_LOCALE );
if ( !Const.isEmpty( optionLogFile ) ) {
fileLoggingEventListener = new FileLoggingEventListener( optionLogFile.toString(), true );
if ( log.isBasic() ) {
String filename = fileLoggingEventListener.getFilename();
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.LoggingToFile" ) + filename );
}
KettleLogStore.getAppender().addLoggingEventListener( fileLoggingEventListener );
} else {
fileLoggingEventListener = null;
}
if ( !Const.isEmpty( optionLogLevel ) ) {
log.setLogLevel( LogLevel.getLogLevelForCode( optionLogLevel.toString() ) );
if ( log.isBasic() ) {
// "Logging is at level : "
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.LoggingAtLevel" ) + log.getLogLevel().getDescription() );
}
}
}
public Spoon() {
this( null );
}
public Spoon( Repository rep ) {
super( null );
this.addMenuBar();
log = new LogChannel( APP_NAME );
SpoonFactory.setSpoonInstance( this );
// Load at least one local Pentaho metastore and add it to the delegating metastore
//
metaStore = new DelegatingMetaStore();
try {
IMetaStore localMetaStore = MetaStoreConst.openLocalPentahoMetaStore();
metaStore.addMetaStore( localMetaStore );
metaStore.setActiveMetaStoreName( localMetaStore.getName() );
if ( rep != null ) {
metaStore.addMetaStore( 0, rep.getMetaStore() );
metaStore.setActiveMetaStoreName( rep.getMetaStore().getName() );
}
} catch ( MetaStoreException e ) {
new ErrorDialog( shell, "Error opening Pentaho Metastore", "Unable to open local Pentaho Metastore", e );
}
setRepository( rep );
props = PropsUI.getInstance();
sharedObjectsFileMap = new Hashtable<String, SharedObjects>();
Thread uiThread = Thread.currentThread();
display = Display.findDisplay( uiThread );
staticSpoon = this;
try {
JndiUtil.initJNDI();
} catch ( Exception e ) {
new ErrorDialog( shell, "Unable to init simple JNDI", "Unable to init simple JNDI", e );
}
}
/**
* The core plugin types don't know about UI classes. This method adds those in before initialization.
*
* TODO: create a SpoonLifecycle listener that can notify interested parties of a pre-initialization state so this can
* happen in those listeners.
*/
private static void registerUIPluginObjectTypes() {
RepositoryPluginType.getInstance()
.addObjectType( RepositoryRevisionBrowserDialogInterface.class, "version-browser-classname" );
RepositoryPluginType.getInstance().addObjectType( RepositoryDialogInterface.class, "dialog-classname" );
PluginRegistry.addPluginType( SpoonPluginType.getInstance() );
SpoonPluginType.getInstance().getPluginFolders().add( new PluginFolder( "plugins/repositories", false, true ) );
LifecyclePluginType.getInstance().getPluginFolders().add( new PluginFolder( "plugins/spoon", false, true ) );
LifecyclePluginType.getInstance().getPluginFolders().add( new PluginFolder( "plugins/repositories", false, true ) );
PluginRegistry.addPluginType( JobDialogPluginType.getInstance() );
PluginRegistry.addPluginType( TransDialogPluginType.getInstance() );
}
public void init( TransMeta ti ) {
FormLayout layout = new FormLayout();
layout.marginWidth = 0;
layout.marginHeight = 0;
shell.setLayout( layout );
addFileListener( new TransFileListener() );
addFileListener( new JobFileListener() );
// INIT Data structure
if ( ti != null ) {
delegates.trans.addTransformation( ti );
}
// Load settings in the props
loadSettings();
transExecutionConfiguration = new TransExecutionConfiguration();
transExecutionConfiguration.setGatheringMetrics( true );
transPreviewExecutionConfiguration = new TransExecutionConfiguration();
transPreviewExecutionConfiguration.setGatheringMetrics( true );
transDebugExecutionConfiguration = new TransExecutionConfiguration();
transDebugExecutionConfiguration.setGatheringMetrics( true );
jobExecutionConfiguration = new JobExecutionConfiguration();
// Clean out every time we start, auto-loading etc, is not a good idea
// If they are needed that often, set them in the kettle.properties file
//
variables = new RowMetaAndData( new RowMeta() );
// props.setLook(shell);
Image[] images = { GUIResource.getInstance().getImageSpoonHigh(), GUIResource.getInstance().getImageSpoon() };
shell.setImages( images );
// shell.setImage(GUIResource.getInstance().getImageSpoon());
cursor_hourglass = new Cursor( display, SWT.CURSOR_WAIT );
cursor_hand = new Cursor( display, SWT.CURSOR_HAND );
Composite sashComposite = null;
MainSpoonPerspective mainPerspective = null;
try {
SwtXulLoader xulLoader = new KettleXulLoader();
xulLoader.setOuterContext( shell );
xulLoader.setSettingsManager( XulSpoonSettingsManager.getInstance() );
ApplicationWindowLocal.setApplicationWindow( this );
mainSpoonContainer = xulLoader.loadXul( XUL_FILE_MAIN, new XulSpoonResourceBundle() );
BindingFactory bf = new DefaultBindingFactory();
bf.setDocument( mainSpoonContainer.getDocumentRoot() );
mainSpoonContainer.addEventHandler( this );
/* menuBar = (XulMenubar) */
mainSpoonContainer.getDocumentRoot().getElementById( "spoon-menubar" );
mainToolbar = (XulToolbar) mainSpoonContainer.getDocumentRoot().getElementById( "main-toolbar" );
/* canvas = (XulVbox) */
mainSpoonContainer.getDocumentRoot().getElementById( "trans-job-canvas" );
deck = (SwtDeck) mainSpoonContainer.getDocumentRoot().getElementById( "canvas-deck" );
final Composite tempSashComposite = new Composite( shell, SWT.None );
sashComposite = tempSashComposite;
mainPerspective = new MainSpoonPerspective( tempSashComposite, tabfolder );
if ( startupPerspective == null ) {
startupPerspective = mainPerspective.getId();
}
SpoonPerspectiveManager.getInstance().setStartupPerspective( startupPerspective );
SpoonPerspectiveManager.getInstance().addPerspective( mainPerspective );
SpoonPluginManager.getInstance().applyPluginsForContainer( "spoon", mainSpoonContainer );
SpoonPerspectiveManager.getInstance().setDeck( deck );
SpoonPerspectiveManager.getInstance().setXulDoc( mainSpoonContainer );
SpoonPerspectiveManager.getInstance().initialize();
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Error initializing transformation", e );
}
// addBar();
// Set the shell size, based upon previous time...
WindowProperty windowProperty = props.getScreen( APP_TITLE );
if ( windowProperty != null ) {
windowProperty.setShell( shell );
} else {
shell.pack();
shell.setMaximized( true ); // Default = maximized!
}
layout = new FormLayout();
layout.marginWidth = 0;
layout.marginHeight = 0;
GridData data = new GridData();
data.grabExcessHorizontalSpace = true;
data.grabExcessVerticalSpace = true;
data.verticalAlignment = SWT.FILL;
data.horizontalAlignment = SWT.FILL;
sashComposite.setLayoutData( data );
sashComposite.setLayout( layout );
sashform = new SashForm( sashComposite, SWT.HORIZONTAL );
FormData fdSash = new FormData();
fdSash.left = new FormAttachment( 0, 0 );
// fdSash.top = new FormAttachment((org.eclipse.swt.widgets.ToolBar)
// toolbar.getNativeObject(), 0);
fdSash.top = new FormAttachment( 0, 0 );
fdSash.bottom = new FormAttachment( 100, 0 );
fdSash.right = new FormAttachment( 100, 0 );
sashform.setLayoutData( fdSash );
createPopupMenus();
addTree();
addTabs();
mainPerspective.setTabset( this.tabfolder );
( (Composite) deck.getManagedObject() ).layout( true, true );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.STARTUP );
// Add a browser widget
if ( props.showWelcomePageOnStartup() ) {
showWelcomePage();
}
// Allow data to be copied or moved to the drop target
int operations = DND.DROP_COPY | DND.DROP_DEFAULT;
DropTarget target = new DropTarget( shell, operations );
// Receive data in File format
final FileTransfer fileTransfer = FileTransfer.getInstance();
Transfer[] types = new Transfer[] { fileTransfer };
target.setTransfer( types );
target.addDropListener( new DropTargetListener() {
public void dragEnter( DropTargetEvent event ) {
if ( event.detail == DND.DROP_DEFAULT ) {
if ( ( event.operations & DND.DROP_COPY ) != 0 ) {
event.detail = DND.DROP_COPY;
} else {
event.detail = DND.DROP_NONE;
}
}
}
public void dragOver( DropTargetEvent event ) {
event.feedback = DND.FEEDBACK_SELECT | DND.FEEDBACK_SCROLL;
}
public void dragOperationChanged( DropTargetEvent event ) {
if ( event.detail == DND.DROP_DEFAULT ) {
if ( ( event.operations & DND.DROP_COPY ) != 0 ) {
event.detail = DND.DROP_COPY;
} else {
event.detail = DND.DROP_NONE;
}
}
}
public void dragLeave( DropTargetEvent event ) {
}
public void dropAccept( DropTargetEvent event ) {
}
public void drop( DropTargetEvent event ) {
if ( fileTransfer.isSupportedType( event.currentDataType ) ) {
String[] files = (String[]) event.data;
for ( String file : files ) {
openFile( file, false );
}
}
}
} );
// listen for steps being added or removed
PluginRegistry.getInstance().addPluginListener( StepPluginType.class, new PluginTypeListener() {
@Override
public void pluginAdded( Object serviceObject ) {
previousShowTrans = false; // hack to get the tree to reload
Display.getDefault().asyncExec( new Runnable() {
@Override
public void run() {
refreshCoreObjects();
}
} );
}
@Override
public void pluginRemoved( Object serviceObject ) {
previousShowTrans = false; // hack to get the tree to reload
Display.getDefault().asyncExec( new Runnable() {
@Override
public void run() {
refreshCoreObjects();
}
} );
}
@Override
public void pluginChanged( Object serviceObject ) {
}
} );
}
public XulDomContainer getMainSpoonContainer() {
return mainSpoonContainer;
}
public void loadPerspective( String id ) {
List<SpoonPerspective> perspectives = SpoonPerspectiveManager.getInstance().getPerspectives();
for ( int pos = 0; pos < perspectives.size(); pos++ ) {
SpoonPerspective perspective = perspectives.get( pos );
if ( perspective.getId().equals( id ) ) {
loadPerspective( pos );
return;
}
}
}
public void loadPerspective( int pos ) {
try {
SpoonPerspectiveManager.getInstance().activatePerspective(
SpoonPerspectiveManager.getInstance().getPerspectives().get( pos ).getClass() );
} catch ( KettleException e ) {
log.logError( "Error loading perspective", e );
}
}
public static Spoon getInstance() {
return staticSpoon;
}
public VfsFileChooserDialog getVfsFileChooserDialog( FileObject rootFile, FileObject initialFile ) {
if ( vfsFileChooserDialog == null ) {
vfsFileChooserDialog = new VfsFileChooserDialog( shell, KettleVFS.getInstance().getFileSystemManager(), rootFile,
initialFile );
}
vfsFileChooserDialog.setRootFile( rootFile );
vfsFileChooserDialog.setInitialFile( initialFile );
return vfsFileChooserDialog;
}
public boolean closeFile() {
boolean closed = true;
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
// If a transformation is the current active tab, close it
closed = tabCloseSelected();
}
return closed;
}
public boolean closeAllFiles() {
int numTabs = delegates.tabs.getTabs().size();
for ( int i = numTabs - 1; i >= 0; i-- ) {
tabfolder.setSelected( i );
if ( !closeFile() ) {
return false; // A single cancel aborts the rest of the operation
}
}
return true;
}
/**
* Prompt user to close all open Jobs & Transformations if they have execute permissions.
* If they don't have execute permission then warn user if they really want to disconnect
* from repository. If yes, close all tabs.
*
* @return If user agrees with closing of tabs then return true so we can disconnect from the repo.
*/
public boolean closeAllJobsAndTransformations() {
// Check to see if there are any open jobs/trans. If there are not any then we don't need to close anything.
// Keep in mind that the 'Welcome' tab can be active.
final List<TransMeta> transList = delegates.trans.getTransformationList();
final List<JobMeta> jobList = delegates.jobs.getJobList();
if ( ( transList.size() == 0 ) && ( jobList.size() == 0 ) ) {
return true;
}
boolean createPerms = !RepositorySecurityUI
.verifyOperations( shell, rep, false, RepositoryOperation.MODIFY_TRANSFORMATION,
RepositoryOperation.MODIFY_JOB );
boolean executePerms = !RepositorySecurityUI
.verifyOperations( shell, rep, false, RepositoryOperation.EXECUTE_TRANSFORMATION,
RepositoryOperation.EXECUTE_JOB );
boolean readPerms = !RepositorySecurityUI
.verifyOperations( shell, rep, false, RepositoryOperation.READ_TRANSFORMATION, RepositoryOperation.READ_JOB );
// Check to see if display of warning dialog has been disabled
String warningTitle = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Title" );
String warningText = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Message" );
int buttons = SWT.OK;
if ( readPerms && createPerms && executePerms ) {
warningTitle = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllOption.Disconnect.Title" );
warningText = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllOption.Disconnect.Message" );
buttons = SWT.YES | SWT.NO;
}
MessageBox mb = new MessageBox( Spoon.getInstance().getShell(), buttons | SWT.ICON_WARNING );
mb.setMessage( warningText );
mb.setText( warningTitle );
final int isCloseAllFiles = mb.open();
if ( ( isCloseAllFiles == SWT.YES ) || ( isCloseAllFiles == SWT.OK ) ) {
// Yes - User specified that they want to close all.
return Spoon.getInstance().closeAllFiles();
} else if ( ( isCloseAllFiles == SWT.NO ) && ( executePerms ) ) {
// No - don't close tabs only if user has execute permissions.
// Return true so we can disconnect from repo
return true;
} else {
// Cancel - don't close tabs and don't disconnect from repo
return false;
}
}
public void closeSpoonBrowser() {
TabMapEntry browserTab = delegates.tabs.findTabMapEntry( STRING_WELCOME_TAB_NAME, ObjectType.BROWSER );
if ( browserTab != null ) {
delegates.tabs.removeTab( browserTab );
}
}
/**
* Search the transformation meta-data.
*
*/
public void searchMetaData() {
TransMeta[] transMetas = getLoadedTransformations();
JobMeta[] jobMetas = getLoadedJobs();
if ( ( transMetas == null || transMetas.length == 0 ) && ( jobMetas == null || jobMetas.length == 0 ) ) {
return;
}
EnterSearchDialog esd = new EnterSearchDialog( shell );
if ( !esd.open() ) {
return;
}
List<Object[]> rows = new ArrayList<Object[]>();
for ( TransMeta transMeta : transMetas ) {
String filter = esd.getFilterString();
if ( filter != null ) {
filter = filter.toUpperCase();
}
List<StringSearchResult> stringList =
transMeta.getStringList( esd.isSearchingSteps(), esd.isSearchingDatabases(), esd.isSearchingNotes() );
for ( StringSearchResult result : stringList ) {
boolean add = Const.isEmpty( filter );
if ( filter != null && result.getString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getFieldName().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getGrandParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( add ) {
rows.add( result.toRow() );
}
}
}
for ( JobMeta jobMeta : jobMetas ) {
String filter = esd.getFilterString();
if ( filter != null ) {
filter = filter.toUpperCase();
}
List<StringSearchResult> stringList =
jobMeta.getStringList( esd.isSearchingSteps(), esd.isSearchingDatabases(), esd.isSearchingNotes() );
for ( StringSearchResult result : stringList ) {
boolean add = Const.isEmpty( filter );
if ( filter != null && result.getString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getFieldName().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getGrandParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( add ) {
rows.add( result.toRow() );
}
}
}
if ( rows.size() != 0 ) {
PreviewRowsDialog prd =
new PreviewRowsDialog( shell, Variables.getADefaultVariableSpace(), SWT.NONE, BaseMessages.getString(
PKG, "Spoon.StringSearchResult.Subtitle" ), StringSearchResult.getResultRowMeta(), rows );
String title = BaseMessages.getString( PKG, "Spoon.StringSearchResult.Title" );
String message = BaseMessages.getString( PKG, "Spoon.StringSearchResult.Message" );
prd.setTitleMessage( title, message );
prd.open();
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.NothingFound.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.NothingFound.Title" ) ); // Sorry!
mb.open();
}
}
public void showArguments() {
RowMetaAndData allArgs = new RowMetaAndData();
for ( int ii = 0; ii < arguments.length; ++ii ) {
allArgs.addValue( new ValueMeta(
Props.STRING_ARGUMENT_NAME_PREFIX + ( 1 + ii ), ValueMetaInterface.TYPE_STRING ), arguments[ii] );
}
// Now ask the use for more info on these!
EnterStringsDialog esd = new EnterStringsDialog( shell, SWT.NONE, allArgs );
esd.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.ShowArguments.Title" ) );
esd.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ShowArguments.Message" ) );
esd.setReadOnly( true );
esd.setShellImage( GUIResource.getInstance().getImageLogoSmall() );
esd.open();
}
private void fillVariables( RowMetaAndData vars ) {
TransMeta[] transMetas = getLoadedTransformations();
JobMeta[] jobMetas = getLoadedJobs();
if ( ( transMetas == null || transMetas.length == 0 ) && ( jobMetas == null || jobMetas.length == 0 ) ) {
return;
}
Properties sp = new Properties();
sp.putAll( System.getProperties() );
VariableSpace space = Variables.getADefaultVariableSpace();
String[] keys = space.listVariables();
for ( String key : keys ) {
sp.put( key, space.getVariable( key ) );
}
for ( TransMeta transMeta : transMetas ) {
List<String> list = transMeta.getUsedVariables();
for ( String varName : list ) {
String varValue = sp.getProperty( varName, "" );
if ( vars.getRowMeta().indexOfValue( varName ) < 0 && !varName.startsWith( Const.INTERNAL_VARIABLE_PREFIX ) ) {
vars.addValue( new ValueMeta( varName, ValueMetaInterface.TYPE_STRING ), varValue );
}
}
}
for ( JobMeta jobMeta : jobMetas ) {
List<String> list = jobMeta.getUsedVariables();
for ( String varName : list ) {
String varValue = sp.getProperty( varName, "" );
if ( vars.getRowMeta().indexOfValue( varName ) < 0 && !varName.startsWith( Const.INTERNAL_VARIABLE_PREFIX ) ) {
vars.addValue( new ValueMeta( varName, ValueMetaInterface.TYPE_STRING ), varValue );
}
}
}
}
public void setVariables() {
fillVariables( variables );
// Now ask the use for more info on these!
EnterStringsDialog esd = new EnterStringsDialog( shell, SWT.NONE, variables );
esd.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.SetVariables.Title" ) );
esd.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.SetVariables.Message" ) );
esd.setReadOnly( false );
esd.setShellImage( GUIResource.getInstance().getImageVariable() );
if ( esd.open() != null ) {
applyVariables();
}
}
public void applyVariables() {
for ( int i = 0; i < variables.size(); i++ ) {
try {
String name = variables.getValueMeta( i ).getName();
String value = variables.getString( i, "" );
applyVariableToAllLoadedObjects( name, value );
} catch ( KettleValueException e ) {
// Just eat the exception. getString() should never give an
// exception.
log.logDebug( "Unexpected exception occurred : " + e.getMessage() );
}
}
}
public void applyVariableToAllLoadedObjects( String name, String value ) {
// We want to insert the variables into all loaded jobs and
// transformations
//
for ( TransMeta transMeta : getLoadedTransformations() ) {
transMeta.setVariable( name, Const.NVL( value, "" ) );
}
for ( JobMeta jobMeta : getLoadedJobs() ) {
jobMeta.setVariable( name, Const.NVL( value, "" ) );
}
// Not only that, we also want to set the variables in the
// execution configurations...
//
transExecutionConfiguration.getVariables().put( name, value );
jobExecutionConfiguration.getVariables().put( name, value );
transDebugExecutionConfiguration.getVariables().put( name, value );
}
public void showVariables() {
fillVariables( variables );
// Now ask the use for more info on these!
EnterStringsDialog esd = new EnterStringsDialog( shell, SWT.NONE, variables );
esd.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.ShowVariables.Title" ) );
esd.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ShowVariables.Message" ) );
esd.setReadOnly( true );
esd.setShellImage( GUIResource.getInstance().getImageVariable() );
esd.open();
}
public void openSpoon() {
shell = getShell();
shell.setText( APP_TITLE );
mainComposite.setRedraw( true );
mainComposite.setVisible( false );
mainComposite.setVisible( true );
mainComposite.redraw();
// Perhaps the transformation contains elements at startup?
refreshTree(); // Do a complete refresh then...
setShellText();
}
public boolean readAndDispatch() {
return display.readAndDispatch();
}
/**
* @return check whether or not the application was stopped.
*/
public boolean isStopped() {
return stopped;
}
/**
* @param stopped
* True to stop this application.
*/
public void setStopped( boolean stopped ) {
this.stopped = stopped;
}
/**
* @param destroy
* Whether or not to destroy the display.
*/
public void setDestroy( boolean destroy ) {
this.destroy = destroy;
}
/**
* @return Returns whether or not we should destroy the display.
*/
public boolean doDestroy() {
return destroy;
}
/**
* @param arguments
* The arguments to set.
*/
public void setArguments( String[] arguments ) {
this.arguments = arguments;
}
/**
* @return Returns the arguments.
*/
public String[] getArguments() {
return arguments;
}
public synchronized void dispose() {
setStopped( true );
cursor_hand.dispose();
cursor_hourglass.dispose();
if ( destroy && ( display != null ) && !display.isDisposed() ) {
try {
display.dispose();
} catch ( SWTException e ) {
// dispose errors
}
}
}
public boolean isDisposed() {
return display.isDisposed();
}
public void sleep() {
display.sleep();
}
public void undoAction() {
undoAction( getActiveUndoInterface() );
}
public void redoAction() {
redoAction( getActiveUndoInterface() );
}
/**
* It's called copySteps, but the job entries also arrive at this location
*/
public void copySteps() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
copySelected( transMeta, transMeta.getSelectedSteps(), transMeta.getSelectedNotes() );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
copyJobentries();
}
}
public void copyJobentries() {
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
delegates.jobs.copyJobEntries( jobMeta, jobMeta.getSelectedEntries() );
}
}
public void copy() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
boolean transActive = transMeta != null;
boolean jobActive = jobMeta != null;
if ( transActive ) {
if ( transMeta.getSelectedSteps().size() > 0 ) {
copySteps();
} else {
copyTransformation();
}
} else if ( jobActive ) {
if ( jobMeta.getSelectedEntries().size() > 0 ) {
copyJobentries();
} else {
copyJob();
}
}
}
public void copyFile() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
boolean transActive = transMeta != null;
boolean jobActive = jobMeta != null;
if ( transActive ) {
copyTransformation();
} else if ( jobActive ) {
copyJob();
}
}
public void cut() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
boolean transActive = transMeta != null;
boolean jobActive = jobMeta != null;
if ( transActive ) {
List<StepMeta> stepMetas = transMeta.getSelectedSteps();
if ( stepMetas != null && stepMetas.size() > 0 ) {
copySteps();
for ( StepMeta stepMeta : stepMetas ) {
delStep( transMeta, stepMeta );
}
}
} else if ( jobActive ) {
List<JobEntryCopy> jobEntryCopies = jobMeta.getSelectedEntries();
if ( jobEntryCopies != null && jobEntryCopies.size() > 0 ) {
copyJobentries();
for ( JobEntryCopy jobEntryCopy : jobEntryCopies ) {
deleteJobEntryCopies( jobMeta, jobEntryCopy );
}
}
}
}
public void removeMenuItem( String itemid, boolean removeTrailingSeparators ) {
XulMenuitem item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( itemid );
if ( item != null ) {
XulComponent menu = item.getParent();
item.getParent().removeChild( item );
if ( removeTrailingSeparators ) {
List<XulComponent> children = menu.getChildNodes();
if ( children.size() > 0 ) {
XulComponent lastMenuItem = children.get( children.size() - 1 );
if ( lastMenuItem instanceof XulMenuseparator ) {
menu.removeChild( lastMenuItem );
// above call should work, but doesn't for some reason, removing separator by force
// the menu separators seem to not be modeled as individual objects in XUL
try {
Menu swtm = (Menu) menu.getManagedObject();
swtm.getItems()[swtm.getItemCount() - 1].dispose();
} catch ( Throwable t ) {
LogChannel.GENERAL.logError( "Error removing XUL menu item", t );
}
}
}
}
} else {
log.logError( "Could not find menu item with id " + itemid + " to remove from Spoon menu" );
}
}
public void createPopupMenus() {
try {
menuMap.put( "trans-class", mainSpoonContainer.getDocumentRoot().getElementById( "trans-class" ) );
menuMap.put( "trans-class-new", mainSpoonContainer.getDocumentRoot().getElementById( "trans-class-new" ) );
menuMap.put( "job-class", mainSpoonContainer.getDocumentRoot().getElementById( "job-class" ) );
menuMap.put( "trans-hop-class", mainSpoonContainer.getDocumentRoot().getElementById( "trans-hop-class" ) );
menuMap.put( "database-class", mainSpoonContainer.getDocumentRoot().getElementById( "database-class" ) );
menuMap.put( "partition-schema-class", mainSpoonContainer.getDocumentRoot().getElementById(
"partition-schema-class" ) );
menuMap.put( "cluster-schema-class", mainSpoonContainer.getDocumentRoot().getElementById(
"cluster-schema-class" ) );
menuMap.put( "slave-cluster-class", mainSpoonContainer.getDocumentRoot().getElementById(
"slave-cluster-class" ) );
menuMap.put( "trans-inst", mainSpoonContainer.getDocumentRoot().getElementById( "trans-inst" ) );
menuMap.put( "job-inst", mainSpoonContainer.getDocumentRoot().getElementById( "job-inst" ) );
menuMap.put( "step-plugin", mainSpoonContainer.getDocumentRoot().getElementById( "step-plugin" ) );
menuMap.put( "database-inst", mainSpoonContainer.getDocumentRoot().getElementById( "database-inst" ) );
menuMap.put( "step-inst", mainSpoonContainer.getDocumentRoot().getElementById( "step-inst" ) );
menuMap.put( "job-entry-copy-inst", mainSpoonContainer.getDocumentRoot().getElementById(
"job-entry-copy-inst" ) );
menuMap.put( "trans-hop-inst", mainSpoonContainer.getDocumentRoot().getElementById( "trans-hop-inst" ) );
menuMap.put( "partition-schema-inst", mainSpoonContainer.getDocumentRoot().getElementById(
"partition-schema-inst" ) );
menuMap.put( "cluster-schema-inst", mainSpoonContainer.getDocumentRoot().getElementById(
"cluster-schema-inst" ) );
menuMap
.put( "slave-server-inst", mainSpoonContainer.getDocumentRoot().getElementById( "slave-server-inst" ) );
} catch ( Throwable t ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Title" ), BaseMessages
.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Message", XUL_FILE_MAIN ), new Exception( t ) );
}
addMenuLast();
}
public void executeTransformation() {
executeTransformation(
getActiveTransformation(), true, false, false, false, false, transExecutionConfiguration.getReplayDate(),
false, transExecutionConfiguration.getLogLevel() );
}
public void previewTransformation() {
executeTransformation(
getActiveTransformation(), true, false, false, true, false, transDebugExecutionConfiguration
.getReplayDate(), true, transDebugExecutionConfiguration.getLogLevel() );
}
public void debugTransformation() {
executeTransformation(
getActiveTransformation(), true, false, false, false, true, transPreviewExecutionConfiguration
.getReplayDate(), true, transPreviewExecutionConfiguration.getLogLevel() );
}
public void checkTrans() {
checkTrans( getActiveTransformation() );
}
public void analyseImpact() {
analyseImpact( getActiveTransformation() );
}
public void showLastImpactAnalyses() {
showLastImpactAnalyses( getActiveTransformation() );
}
public void showLastTransPreview() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.showLastPreviewResults();
}
}
public void showExecutionResults() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.showExecutionResults();
enableMenus();
} else {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.showExecutionResults();
enableMenus();
}
}
}
public boolean isExecutionResultsPaneVisible() {
TransGraph transGraph = getActiveTransGraph();
return ( transGraph != null ) && ( transGraph.isExecutionResultsPaneVisible() );
}
public void copyTransformation() {
copyTransformation( getActiveTransformation() );
}
public void copyTransformationImage() {
copyTransformationImage( getActiveTransformation() );
}
public boolean editTransformationProperties() {
return TransGraph.editProperties( getActiveTransformation(), this, rep, true );
}
public boolean editProperties() {
if ( getActiveTransformation() != null ) {
return editTransformationProperties();
} else if ( getActiveJob() != null ) {
return editJobProperties( "job-settings" );
}
// no properties were edited, so no cancel was clicked
return true;
}
public void executeJob() {
executeJob( getActiveJob(), true, false, null, false, null, 0 );
}
public void copyJob() {
copyJob( getActiveJob() );
}
public void showTips() {
new TipsDialog( shell ).open();
}
public void showWelcomePage() {
try {
LocationListener listener = new LocationListener() {
public void changing( LocationEvent event ) {
if ( event.location.endsWith( ".pdf" ) ) {
Program.launch( event.location );
event.doit = false;
} else if ( event.location.contains( "samples/transformations" )
|| event.location.contains( "samples/jobs" ) || event.location.contains( "samples/mapping" ) ) {
try {
FileObject fileObject = KettleVFS.getFileObject( event.location );
if ( fileObject.exists() ) {
if ( event.location.endsWith( ".ktr" ) || event.location.endsWith( ".kjb" ) ) {
openFile( event.location, false );
} else {
lastDirOpened = KettleVFS.getFilename( fileObject );
openFile( true );
}
event.doit = false;
}
} catch ( Exception e ) {
log.logError( "Error handling samples location: " + event.location, e );
}
}
}
public void changed( LocationEvent event ) {
// System.out.println("Changed to: " + event.location);
}
};
// see if we are in webstart mode
String webstartRoot = System.getProperty( "spoon.webstartroot" );
if ( webstartRoot != null ) {
URL url = new URL( webstartRoot + '/' + FILE_WELCOME_PAGE );
addSpoonBrowser( STRING_WELCOME_TAB_NAME, url.toString(), listener ); // ./docs/English/tips/index.htm
} else {
// see if we can find the welcome file on the file system
File file = new File( FILE_WELCOME_PAGE );
if ( file.exists() ) {
// ./docs/English/tips/index.htm
addSpoonBrowser( STRING_WELCOME_TAB_NAME, file.toURI().toURL().toString(), listener );
}
}
} catch ( MalformedURLException e1 ) {
log.logError( Const.getStackTracker( e1 ) );
}
}
public void showDocumentMap() {
try {
LocationListener listener = new LocationListener() {
public void changing( LocationEvent event ) {
if ( event.location.endsWith( ".pdf" ) ) {
Program.launch( event.location );
event.doit = false;
}
}
public void changed( LocationEvent event ) {
System.out.println( "Changed to: " + event.location );
}
};
// see if we are in webstart mode
String webstartRoot = System.getProperty( "spoon.webstartroot" );
if ( webstartRoot != null ) {
URL url = new URL( webstartRoot + '/' + FILE_DOCUMENT_MAP );
addSpoonBrowser( STRING_DOCUMENT_TAB_NAME, url.toString(), listener ); // ./docs/English/tips/index.htm
} else {
// see if we can find the welcome file on the file system
File file = new File( FILE_DOCUMENT_MAP );
if ( file.exists() ) {
// ./docs/English/tips/index.htm
addSpoonBrowser( STRING_DOCUMENT_TAB_NAME, file.toURI().toURL().toString(), listener );
}
}
} catch ( MalformedURLException e1 ) {
log.logError( Const.getStackTracker( e1 ) );
}
}
public void addMenuLast() {
org.pentaho.ui.xul.dom.Document doc = mainSpoonContainer.getDocumentRoot();
JfaceMenupopup recentFilesPopup = (JfaceMenupopup) doc.getElementById( "file-open-recent-popup" );
recentFilesPopup.removeChildren();
// Previously loaded files...
List<LastUsedFile> lastUsedFiles = props.getLastUsedFiles();
for ( int i = 0; i < lastUsedFiles.size(); i++ ) {
final LastUsedFile lastUsedFile = lastUsedFiles.get( i );
char chr = (char) ( '1' + i );
String accessKey = "ctrl-" + chr;
String accessText = "CTRL-" + chr;
String text = lastUsedFile.toString();
String id = "last-file-" + i;
if ( i > 8 ) {
accessKey = null;
accessText = null;
}
final String lastFileId = Integer.toString( i );
Action action = new Action( "open-last-file-" + ( i + 1 ), Action.AS_DROP_DOWN_MENU ) {
public void run() {
lastFileSelect( lastFileId );
}
};
// shorten the filename if necessary
int targetLength = 40;
if ( text.length() > targetLength ) {
int lastSep = text.replace( '\\', '/' ).lastIndexOf( '/' );
if ( lastSep != -1 ) {
String fileName = "..." + text.substring( lastSep );
if ( fileName.length() < targetLength ) {
// add the start of the file path
int leadSize = targetLength - fileName.length();
text = text.substring( 0, leadSize ) + fileName;
} else {
text = fileName;
}
}
}
JfaceMenuitem miFileLast = new JfaceMenuitem( null, recentFilesPopup, mainSpoonContainer, text, 0, action );
miFileLast.setLabel( text );
miFileLast.setId( id );
if ( accessText != null && accessKey != null ) {
miFileLast.setAcceltext( accessText );
miFileLast.setAccesskey( accessKey );
}
if ( lastUsedFile.isTransformation() ) {
miFileLast.setImage( GUIResource.getInstance().getImageTransGraph() );
} else if ( lastUsedFile.isJob() ) {
miFileLast.setImage( GUIResource.getInstance().getImageJobGraph() );
}
miFileLast.setCommand( "spoon.lastFileSelect('" + i + "')" );
}
}
public void lastFileSelect( String id ) {
int idx = Integer.parseInt( id );
List<LastUsedFile> lastUsedFiles = props.getLastUsedFiles();
final LastUsedFile lastUsedFile = lastUsedFiles.get( idx );
// If the file comes from a repository and it's not the same as
// the one we're connected to, ask for a username/password!
//
if ( lastUsedFile.isSourceRepository()
&& ( rep == null || !rep.getName().equalsIgnoreCase( lastUsedFile.getRepositoryName() ) ) ) {
// Ask for a username password to get the required repository access
//
loginDialog = new RepositoriesDialog( shell, lastUsedFile.getRepositoryName(), new ILoginCallback() {
public void onSuccess( Repository repository ) {
// Close the previous connection...
if ( rep != null ) {
rep.disconnect();
SpoonPluginManager
.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED );
}
setRepository( repository );
try {
loadLastUsedFile( lastUsedFile, rep == null ? null : rep.getName() );
addMenuLast();
} catch ( KettleException ke ) {
// "Error loading transformation", "I was unable to load this
// transformation from the
// XML file because of an error"
new ErrorDialog( loginDialog.getShell(),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Message" ), ke );
}
}
public void onError( Throwable t ) {
onLoginError( t );
}
public void onCancel() {
}
} );
loginDialog.show();
} else if ( !lastUsedFile.isSourceRepository() ) {
// This file must have been on the file system.
openFile( lastUsedFile.getFilename(), false );
} else {
// read from a repository...
//
try {
loadLastUsedFile( lastUsedFile, rep == null ? null : rep.getName() );
addMenuLast();
} catch ( KettleException ke ) {
// "Error loading transformation", "I was unable to load this
// transformation from the
// XML file because of an error"
new ErrorDialog( loginDialog.getShell(),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Message" ), ke );
}
}
}
private void addTree() {
mainComposite = new Composite( sashform, SWT.BORDER );
mainComposite.setLayout( new FormLayout() );
// int mainMargin = 4;
// TODO: add i18n keys
//
Label sep0 = new Label( mainComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
sep0.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdSep0 = new FormData();
fdSep0.left = new FormAttachment( 0, 0 );
fdSep0.right = new FormAttachment( 100, 0 );
fdSep0.top = new FormAttachment( 0, 0 );
sep0.setLayoutData( fdSep0 );
// empty panel to correct background color.
Composite tabWrapper = new Composite( mainComposite, SWT.NONE );
tabWrapper.setLayout( new FormLayout() );
tabWrapper.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdTabWrapper = new FormData();
fdTabWrapper.left = new FormAttachment( 0, 0 );
fdTabWrapper.top = new FormAttachment( sep0, 0 );
fdTabWrapper.right = new FormAttachment( 100, 0 );
tabWrapper.setLayoutData( fdTabWrapper );
CTabFolder tabFolder = new CTabFolder( tabWrapper, SWT.HORIZONTAL | SWT.FLAT );
tabFolder.setSimple( false ); // Set simple what!!?? Well it sets the style of
// the tab folder to simple or stylish (curvy
// borders)
tabFolder.setBackground( GUIResource.getInstance().getColorWhite() );
tabFolder.setBorderVisible( false );
tabFolder.setSelectionBackground( new Color[] {
display.getSystemColor( SWT.COLOR_WIDGET_NORMAL_SHADOW ),
display.getSystemColor( SWT.COLOR_WIDGET_LIGHT_SHADOW ), }, new int[] { 55, }, true );
FormData fdTab = new FormData();
fdTab.left = new FormAttachment( 0, 0 );
fdTab.top = new FormAttachment( sep0, 0 );
fdTab.right = new FormAttachment( 100, 0 );
fdTab.height = 0;
tabFolder.setLayoutData( fdTab );
view = new CTabItem( tabFolder, SWT.NONE );
view.setControl( new Composite( tabFolder, SWT.NONE ) );
view.setText( STRING_SPOON_MAIN_TREE );
view.setImage( GUIResource.getInstance().getImageExploreSolutionSmall() );
design = new CTabItem( tabFolder, SWT.NONE );
design.setText( STRING_SPOON_CORE_OBJECTS_TREE );
design.setControl( new Composite( tabFolder, SWT.NONE ) );
design.setImage( GUIResource.getInstance().getImageEditSmall() );
Label sep3 = new Label( mainComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
sep3.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdSep3 = new FormData();
fdSep3.left = new FormAttachment( 0, 0 );
fdSep3.right = new FormAttachment( 100, 0 );
fdSep3.top = new FormAttachment( tabWrapper, 0 );
sep3.setLayoutData( fdSep3 );
selectionLabel = new Label( mainComposite, SWT.HORIZONTAL );
FormData fdsLabel = new FormData();
fdsLabel.left = new FormAttachment( 0, 0 );
fdsLabel.top = new FormAttachment( sep3, 5 );
selectionLabel.setLayoutData( fdsLabel );
ToolBar treeTb = new ToolBar( mainComposite, SWT.HORIZONTAL | SWT.FLAT | SWT.BORDER );
/*
This contains a map with all the unnamed transformation (just a filename)
*/
ToolItem expandAll = new ToolItem( treeTb, SWT.PUSH );
expandAll.setImage( GUIResource.getInstance().getImageExpandAll() );
ToolItem collapseAll = new ToolItem( treeTb, SWT.PUSH );
collapseAll.setImage( GUIResource.getInstance().getImageCollapseAll() );
FormData fdTreeToolbar = new FormData();
fdTreeToolbar.top = new FormAttachment( sep3, 0 );
fdTreeToolbar.right = new FormAttachment( 95, 5 );
treeTb.setLayoutData( fdTreeToolbar );
selectionFilter =
new Text( mainComposite, SWT.SINGLE
| SWT.BORDER | SWT.LEFT | SWT.SEARCH | SWT.ICON_SEARCH | SWT.ICON_CANCEL );
selectionFilter.setToolTipText( BaseMessages.getString( PKG, "Spoon.SelectionFilter.Tooltip" ) );
FormData fdSelectionFilter = new FormData();
fdSelectionFilter.top =
new FormAttachment( treeTb, -( GUIResource.getInstance().getImageExpandAll().getBounds().height + 5 ) );
fdSelectionFilter.right = new FormAttachment( 95, -55 );
fdSelectionFilter.left = new FormAttachment( selectionLabel, 10 );
selectionFilter.setLayoutData( fdSelectionFilter );
selectionFilter.addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent arg0 ) {
if ( coreObjectsTree != null && !coreObjectsTree.isDisposed() ) {
previousShowTrans = false;
previousShowJob = false;
refreshCoreObjects();
if ( !Const.isEmpty( selectionFilter.getText() ) ) {
tidyBranches( coreObjectsTree.getItems(), true ); // expand all
} else { // no filter: collapse all
tidyBranches( coreObjectsTree.getItems(), false );
}
}
if ( selectionTree != null && !selectionTree.isDisposed() ) {
refreshTree();
if ( !Const.isEmpty( selectionFilter.getText() ) ) {
tidyBranches( selectionTree.getItems(), true ); // expand all
} else { // no filter: collapse all
tidyBranches( selectionTree.getItems(), false );
}
selectionFilter.setFocus();
}
}
} );
expandAll.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
if ( designSelected ) {
tidyBranches( coreObjectsTree.getItems(), true );
}
if ( viewSelected ) {
tidyBranches( selectionTree.getItems(), true );
}
}
} );
collapseAll.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
if ( designSelected ) {
tidyBranches( coreObjectsTree.getItems(), false );
}
if ( viewSelected ) {
tidyBranches( selectionTree.getItems(), false );
}
}
} );
tabFolder.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent arg0 ) {
if ( arg0.item == view ) {
setViewMode();
} else {
setDesignMode();
}
}
} );
Label sep4 = new Label( mainComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
sep4.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdSep4 = new FormData();
fdSep4.left = new FormAttachment( 0, 0 );
fdSep4.right = new FormAttachment( 100, 0 );
fdSep4.top = new FormAttachment( treeTb, 5 );
sep4.setLayoutData( fdSep4 );
variableComposite = new Composite( mainComposite, SWT.NONE );
variableComposite.setBackground( GUIResource.getInstance().getColorBackground() );
variableComposite.setLayout( new FillLayout() );
FormData fdVariableComposite = new FormData();
fdVariableComposite.left = new FormAttachment( 0, 0 );
fdVariableComposite.right = new FormAttachment( 100, 0 );
fdVariableComposite.top = new FormAttachment( sep4, 0 );
fdVariableComposite.bottom = new FormAttachment( 100, 0 );
variableComposite.setLayoutData( fdVariableComposite );
disposeVariableComposite( true, false, false, false );
coreStepToolTipMap = new Hashtable<String, String>();
coreJobToolTipMap = new Hashtable<String, String>();
addDefaultKeyListeners( tabFolder );
addDefaultKeyListeners( mainComposite );
}
public void addDefaultKeyListeners( Control control ) {
control.addKeyListener( new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
// CTRL-W or CTRL-F4 : close tab
//
if ( ( e.keyCode == 'w' && ( e.stateMask & SWT.CONTROL ) != 0 )
|| ( e.keyCode == SWT.F4 && ( e.stateMask & SWT.CONTROL ) != 0 ) ) {
closeFile();
}
// CTRL-F5 : metastore explorer
//
if ( e.keyCode == SWT.F5 && ( e.stateMask & SWT.CONTROL ) != 0 ) {
new MetaStoreExplorerDialog( shell, metaStore ).open();
}
}
} );
}
public boolean setViewMode() {
if ( viewSelected ) {
return true;
}
selectionFilter.setText( "" ); // reset filter when switched to view
disposeVariableComposite( true, false, false, false );
refreshTree();
return false;
}
public boolean setDesignMode() {
if ( designSelected ) {
return true;
}
selectionFilter.setText( "" ); // reset filter when switched to design
disposeVariableComposite( false, false, true, false );
refreshCoreObjects();
return false;
}
private void tidyBranches( TreeItem[] items, boolean expand ) {
for ( TreeItem item : items ) {
item.setExpanded( expand );
tidyBranches( item.getItems(), expand );
}
}
public void disposeVariableComposite( boolean tree, boolean shared, boolean core, boolean history ) {
viewSelected = tree;
view.getParent().setSelection( viewSelected ? view : design );
designSelected = core;
// historySelected = history;
// sharedSelected = shared;
for ( Control control : variableComposite.getChildren() ) {
// PDI-1247 - these menus are coded for reuse, so make sure
// they don't get disposed of here (alert: dirty design)
if ( control instanceof Tree ) {
( control ).setMenu( null );
}
control.dispose();
}
previousShowTrans = false;
previousShowJob = false;
// stepHistoryChanged=true;
selectionLabel.setText( tree ? BaseMessages.getString( PKG, "Spoon.Explorer" ) : BaseMessages.getString(
PKG, "Spoon.Steps" ) );
}
public void addCoreObjectsTree() {
// Now create a new expand bar inside that item
// We're going to put the core object in there
//
coreObjectsTree = new Tree( variableComposite, SWT.V_SCROLL | SWT.SINGLE );
props.setLook( coreObjectsTree );
coreObjectsTree.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
// expand the selected tree item, collapse the rest
//
if ( props.getAutoCollapseCoreObjectsTree() ) {
TreeItem[] selection = coreObjectsTree.getSelection();
if ( selection.length == 1 ) {
// expand if clicked on the the top level entry only...
//
TreeItem top = selection[0];
while ( top.getParentItem() != null ) {
top = top.getParentItem();
}
if ( top == selection[0] ) {
boolean expanded = top.getExpanded();
for ( TreeItem item : coreObjectsTree.getItems() ) {
item.setExpanded( false );
}
top.setExpanded( !expanded );
}
}
}
}
} );
coreObjectsTree.addTreeListener( new TreeAdapter() {
public void treeExpanded( TreeEvent treeEvent ) {
if ( props.getAutoCollapseCoreObjectsTree() ) {
TreeItem treeItem = (TreeItem) treeEvent.item;
/*
* Trick for WSWT on Windows systems: a SelectionEvent is called after the TreeEvent if setSelection() is not
* used here. Otherwise the first item in the list is selected as default and collapsed again but wrong, see
* PDI-1480
*/
coreObjectsTree.setSelection( treeItem );
// expand the selected tree item, collapse the rest
//
for ( TreeItem item : coreObjectsTree.getItems() ) {
if ( item != treeItem ) {
item.setExpanded( false );
} else {
treeItem.setExpanded( true );
}
}
}
}
} );
coreObjectsTree.addMouseMoveListener( new MouseMoveListener() {
public void mouseMove( MouseEvent move ) {
// don't show tooltips in the tree if the option is not set
if ( !getProperties().showToolTips() ) {
return;
}
toolTip.hide();
TreeItem item = searchMouseOverTreeItem( coreObjectsTree.getItems(), move.x, move.y );
if ( item != null ) {
String name = item.getText();
String tip = coreStepToolTipMap.get( name );
if ( tip != null ) {
PluginInterface plugin = PluginRegistry.getInstance().findPluginWithName( StepPluginType.class, name );
if ( plugin != null ) {
Image image = GUIResource.getInstance().getImagesSteps().get( plugin.getIds()[0] );
if ( image == null ) {
toolTip.hide();
}
toolTip.setImage( image );
toolTip.setText( name + Const.CR + Const.CR + tip );
toolTip.show( new org.eclipse.swt.graphics.Point( move.x + 10, move.y + 10 ) );
}
}
tip = coreJobToolTipMap.get( name );
if ( tip != null ) {
PluginInterface plugin =
PluginRegistry.getInstance().findPluginWithName( JobEntryPluginType.class, name );
if ( plugin != null ) {
Image image = GUIResource.getInstance().getImagesJobentries().get( plugin.getIds()[0] );
toolTip.setImage( image );
toolTip.setText( name + Const.CR + Const.CR + tip );
toolTip.show( new org.eclipse.swt.graphics.Point( move.x + 10, move.y + 10 ) );
}
}
}
}
} );
addDragSourceToTree( coreObjectsTree );
addDefaultKeyListeners( coreObjectsTree );
coreObjectsTree.addMouseListener( new MouseAdapter() {
@Override
public void mouseDoubleClick( MouseEvent event ) {
boolean shift = ( event.stateMask & SWT.SHIFT ) != 0;
doubleClickedInTree( coreObjectsTree, shift );
}
} );
toolTip = new DefaultToolTip( variableComposite, ToolTip.RECREATE, true );
toolTip.setRespectMonitorBounds( true );
toolTip.setRespectDisplayBounds( true );
toolTip.setPopupDelay( 350 );
toolTip.setHideDelay( 5000 );
toolTip.setShift( new org.eclipse.swt.graphics.Point( ConstUI.TOOLTIP_OFFSET, ConstUI.TOOLTIP_OFFSET ) );
}
protected TreeItem searchMouseOverTreeItem( TreeItem[] treeItems, int x, int y ) {
for ( TreeItem treeItem : treeItems ) {
if ( treeItem.getBounds().contains( x, y ) ) {
return treeItem;
}
if ( treeItem.getItemCount() > 0 ) {
treeItem = searchMouseOverTreeItem( treeItem.getItems(), x, y );
if ( treeItem != null ) {
return treeItem;
}
}
}
return null;
}
private boolean previousShowTrans;
private boolean previousShowJob;
public boolean showTrans;
public boolean showJob;
public void refreshCoreObjects() {
if ( shell.isDisposed() ) {
return;
}
if ( !designSelected ) {
return;
}
if ( coreObjectsTree == null || coreObjectsTree.isDisposed() ) {
addCoreObjectsTree();
}
showTrans = getActiveTransformation() != null;
showJob = getActiveJob() != null;
if ( showTrans == previousShowTrans && showJob == previousShowJob ) {
return;
}
// First remove all the entries that where present...
//
TreeItem[] expandItems = coreObjectsTree.getItems();
for ( TreeItem item : expandItems ) {
item.dispose();
}
if ( showTrans ) {
selectionLabel.setText( BaseMessages.getString( PKG, "Spoon.Steps" ) );
// Fill the base components...
//
// ////////////////////////////////////////////////////////////////////////////////////////////////
// TRANSFORMATIONS
// ////////////////////////////////////////////////////////////////////////////////////////////////
PluginRegistry registry = PluginRegistry.getInstance();
final List<PluginInterface> baseSteps = registry.getPlugins( StepPluginType.class );
final List<String> baseCategories = registry.getCategories( StepPluginType.class );
for ( String baseCategory : baseCategories ) {
TreeItem item = new TreeItem( coreObjectsTree, SWT.NONE );
item.setText( baseCategory );
item.setImage( GUIResource.getInstance().getImageArrow() );
for ( PluginInterface baseStep : baseSteps ) {
if ( baseStep.getCategory().equalsIgnoreCase( baseCategory ) ) {
final Image stepImage =
GUIResource.getInstance().getImagesStepsSmall().get( baseStep.getIds()[ 0 ] );
String pluginName = baseStep.getName();
String pluginDescription = baseStep.getDescription();
if ( !filterMatch( pluginName ) && !filterMatch( pluginDescription ) ) {
continue;
}
TreeItem stepItem = new TreeItem( item, SWT.NONE );
stepItem.setImage( stepImage );
stepItem.setText( pluginName );
stepItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event event ) {
System.out.println( "Tree item Listener fired" );
}
} );
coreStepToolTipMap.put( pluginName, pluginDescription );
}
}
}
// Add History Items...
TreeItem item = new TreeItem( coreObjectsTree, SWT.NONE );
item.setText( BaseMessages.getString( PKG, "Spoon.History" ) );
item.setImage( GUIResource.getInstance().getImageArrow() );
List<ObjectUsageCount> pluginHistory = props.getPluginHistory();
// The top 10 at most, the rest is not interesting anyway
//
for ( int i = 0; i < pluginHistory.size() && i < 10; i++ ) {
ObjectUsageCount usage = pluginHistory.get( i );
PluginInterface stepPlugin =
PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, usage.getObjectName() );
if ( stepPlugin != null ) {
final Image stepImage = GUIResource.getInstance().getImagesSteps().get( stepPlugin.getIds()[0] );
String pluginName = Const.NVL( stepPlugin.getName(), "" );
String pluginDescription = Const.NVL( stepPlugin.getDescription(), "" );
if ( !filterMatch( pluginName ) && !filterMatch( pluginDescription ) ) {
continue;
}
TreeItem stepItem = new TreeItem( item, SWT.NONE );
stepItem.setImage( stepImage );
stepItem.setText( pluginName );
stepItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event event ) {
System.out.println( "Tree item Listener fired" );
}
} );
coreStepToolTipMap.put( stepPlugin.getDescription(), pluginDescription + " (" + usage.getNrUses() + ")" );
}
}
}
if ( showJob ) {
// Fill the base components...
//
// ////////////////////////////////////////////////////////////////////////////////////////////////
// JOBS
// ////////////////////////////////////////////////////////////////////////////////////////////////
selectionLabel.setText( BaseMessages.getString( PKG, "Spoon.Entries" ) );
PluginRegistry registry = PluginRegistry.getInstance();
List<PluginInterface> baseJobEntries = registry.getPlugins( JobEntryPluginType.class );
List<String> baseCategories = registry.getCategories( JobEntryPluginType.class );
TreeItem generalItem = null;
for ( String baseCategory : baseCategories ) {
TreeItem item = new TreeItem( coreObjectsTree, SWT.NONE );
item.setText( baseCategory );
item.setImage( GUIResource.getInstance().getImageArrow() );
if ( baseCategory.equalsIgnoreCase( JobEntryPluginType.GENERAL_CATEGORY ) ) {
generalItem = item;
}
for ( int j = 0; j < baseJobEntries.size(); j++ ) {
if ( !baseJobEntries.get( j ).getIds()[ 0 ].equals( "SPECIAL" ) ) {
if ( baseJobEntries.get( j ).getCategory().equalsIgnoreCase( baseCategory ) ) {
final Image jobEntryImage =
GUIResource.getInstance().getImagesJobentriesSmall().get( baseJobEntries.get( j ).getIds()[ 0 ] );
String pluginName = Const.NVL( baseJobEntries.get( j ).getName(), "" );
String pluginDescription = Const.NVL( baseJobEntries.get( j ).getDescription(), "" );
if ( !filterMatch( pluginName ) && !filterMatch( pluginDescription ) ) {
continue;
}
TreeItem stepItem = new TreeItem( item, SWT.NONE );
stepItem.setImage( jobEntryImage );
stepItem.setText( pluginName );
stepItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event arg0 ) {
System.out.println( "Tree item Listener fired" );
}
} );
// if (isPlugin)
// stepItem.setFont(GUIResource.getInstance().getFontBold());
coreJobToolTipMap.put( pluginName, pluginDescription );
}
}
}
}
// First add a few "Special entries: Start, Dummy, OK, ERROR
// We add these to the top of the base category, we don't care about
// the sort order here.
//
JobEntryCopy startEntry = JobMeta.createStartEntry();
JobEntryCopy dummyEntry = JobMeta.createDummyEntry();
String[] specialText = new String[] { startEntry.getName(), dummyEntry.getName(), };
String[] specialTooltip = new String[] { startEntry.getDescription(), dummyEntry.getDescription(), };
Image[] specialImage =
new Image[] {
GUIResource.getInstance().getImageStartSmall(), GUIResource.getInstance().getImageDummySmall() };
for ( int i = 0; i < specialText.length; i++ ) {
TreeItem specialItem = new TreeItem( generalItem, SWT.NONE, i );
specialItem.setImage( specialImage[i] );
specialItem.setText( specialText[i] );
specialItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event arg0 ) {
System.out.println( "Tree item Listener fired" );
}
} );
coreJobToolTipMap.put( specialText[i], specialTooltip[i] );
}
}
variableComposite.layout( true, true );
previousShowTrans = showTrans;
previousShowJob = showJob;
}
protected void shareObject( SharedObjectInterface sharedObject ) {
sharedObject.setShared( true );
EngineMetaInterface meta = getActiveMeta();
try {
if ( meta != null ) {
SharedObjects sharedObjects = null;
if ( meta instanceof TransMeta ) {
sharedObjects = ( (TransMeta) meta ).getSharedObjects();
}
if ( meta instanceof JobMeta ) {
sharedObjects = ( (JobMeta) meta ).getSharedObjects();
}
if ( sharedObjects != null ) {
sharedObjects.storeObject( sharedObject );
sharedObjects.saveToFile();
}
}
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Message" ), e );
}
refreshTree();
}
protected void unShareObject( SharedObjectInterface sharedObject ) {
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.ICON_WARNING );
// "Are you sure you want to stop sharing?"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.StopSharing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.StopSharing.Title" ) ); // Warning!
int answer = mb.open();
if ( answer == SWT.YES ) {
sharedObject.setShared( false );
EngineMetaInterface meta = getActiveMeta();
try {
if ( meta != null ) {
SharedObjects sharedObjects = null;
if ( meta instanceof TransMeta ) {
sharedObjects = ( (TransMeta) meta ).getSharedObjects();
}
if ( meta instanceof JobMeta ) {
sharedObjects = ( (JobMeta) meta ).getSharedObjects();
}
if ( sharedObjects != null ) {
sharedObjects.removeObject( sharedObject );
sharedObjects.saveToFile();
}
}
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Message" ), e );
}
refreshTree();
}
}
/**
* @return The object that is selected in the tree or null if we couldn't figure it out. (titles etc. == null)
*/
public TreeSelection[] getTreeObjects( final Tree tree ) {
return delegates.tree.getTreeObjects( tree, selectionTree, coreObjectsTree );
}
private void addDragSourceToTree( final Tree tree ) {
delegates.tree.addDragSourceToTree( tree, selectionTree, coreObjectsTree );
}
public void hideToolTips() {
if ( toolTip != null ) {
toolTip.hide();
}
}
/**
* If you click in the tree, you might want to show the corresponding window.
*/
public void showSelection() {
TreeSelection[] objects = getTreeObjects( selectionTree );
if ( objects.length != 1 ) {
return; // not yet supported, we can do this later when the OSX bug
// goes away
}
TreeSelection object = objects[0];
final Object selection = object.getSelection();
final Object parent = object.getParent();
TransMeta transMeta = null;
if ( selection instanceof TransMeta ) {
transMeta = (TransMeta) selection;
}
if ( parent instanceof TransMeta ) {
transMeta = (TransMeta) parent;
}
if ( transMeta != null ) {
TabMapEntry entry = delegates.tabs.findTabMapEntry( transMeta );
if ( entry != null ) {
int current = tabfolder.getSelectedIndex();
int desired = tabfolder.indexOf( entry.getTabItem() );
if ( current != desired ) {
tabfolder.setSelected( desired );
}
transMeta.setInternalKettleVariables();
if ( getCoreObjectsState() != STATE_CORE_OBJECTS_SPOON ) {
// Switch the core objects in the lower left corner to the
// spoon trans types
refreshCoreObjects();
}
}
}
JobMeta jobMeta = null;
if ( selection instanceof JobMeta ) {
jobMeta = (JobMeta) selection;
}
if ( parent instanceof JobMeta ) {
jobMeta = (JobMeta) parent;
}
if ( jobMeta != null ) {
TabMapEntry entry = delegates.tabs.findTabMapEntry( transMeta );
if ( entry != null ) {
int current = tabfolder.getSelectedIndex();
int desired = tabfolder.indexOf( entry.getTabItem() );
if ( current != desired ) {
tabfolder.setSelected( desired );
}
jobMeta.setInternalKettleVariables();
if ( getCoreObjectsState() != STATE_CORE_OBJECTS_CHEF ) {
// Switch the core objects in the lower left corner to the
// spoon job types
//
refreshCoreObjects();
}
}
}
}
private Object selectionObjectParent = null;
private Object selectionObject = null;
public void newHop() {
newHop( (TransMeta) selectionObjectParent );
}
public void sortHops() {
( (TransMeta) selectionObjectParent ).sortHops();
refreshTree();
}
public void newDatabasePartitioningSchema() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
newPartitioningSchema( transMeta );
}
}
public void newClusteringSchema() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
newClusteringSchema( transMeta );
}
}
public void newSlaveServer() {
newSlaveServer( (HasSlaveServersInterface) selectionObjectParent );
}
public void editTransformationPropertiesPopup() {
TransGraph.editProperties( (TransMeta) selectionObject, this, rep, true );
}
public void addTransLog() {
TransGraph activeTransGraph = getActiveTransGraph();
if ( activeTransGraph != null ) {
activeTransGraph.transLogDelegate.addTransLog();
activeTransGraph.transGridDelegate.addTransGrid();
}
}
public void addTransHistory() {
TransGraph activeTransGraph = getActiveTransGraph();
if ( activeTransGraph != null ) {
activeTransGraph.transHistoryDelegate.addTransHistory();
}
}
public boolean editJobProperties( String id ) {
if ( "job-settings".equals( id ) ) {
return JobGraph.editProperties( getActiveJob(), this, rep, true );
} else if ( "job-inst-settings".equals( id ) ) {
return JobGraph.editProperties( (JobMeta) selectionObject, this, rep, true );
}
return false;
}
public void editJobPropertiesPopup() {
JobGraph.editProperties( (JobMeta) selectionObject, this, rep, true );
}
public void addJobLog() {
JobGraph activeJobGraph = getActiveJobGraph();
if ( activeJobGraph != null ) {
activeJobGraph.jobLogDelegate.addJobLog();
activeJobGraph.jobGridDelegate.addJobGrid();
}
}
public void addJobHistory() {
addJobHistory( (JobMeta) selectionObject, true );
}
public void newStep() {
newStep( getActiveTransformation() );
}
public void editConnection() {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_DATABASE ) ) {
return;
}
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.editConnection( databaseMeta );
}
public void dupeConnection() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
final HasDatabasesInterface hasDatabasesInterface = (HasDatabasesInterface) selectionObjectParent;
delegates.db.dupeConnection( hasDatabasesInterface, databaseMeta );
}
public void clipConnection() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.clipConnection( databaseMeta );
}
public void delConnection() {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.DELETE_DATABASE ) ) {
return;
}
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION );
mb.setMessage( BaseMessages.getString(
PKG, "Spoon.ExploreDB.DeleteConnectionAsk.Message", databaseMeta.getName() ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.ExploreDB.DeleteConnectionAsk.Title" ) );
int response = mb.open();
if ( response != SWT.YES ) {
return;
}
final HasDatabasesInterface hasDatabasesInterface = (HasDatabasesInterface) selectionObjectParent;
delegates.db.delConnection( hasDatabasesInterface, databaseMeta );
}
public void sqlConnection() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.sqlConnection( databaseMeta );
}
public void clearDBCache( String id ) {
if ( "database-class-clear-cache".equals( id ) ) {
delegates.db.clearDBCache( null );
}
if ( "database-inst-clear-cache".equals( id ) ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.clearDBCache( databaseMeta );
}
}
public void exploreDatabase() {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.EXPLORE_DATABASE ) ) {
return;
}
// Show a minimal window to allow you to quickly select the database
// connection to explore
//
List<DatabaseMeta> databases = new ArrayList<DatabaseMeta>();
// First load the connections from the loaded file
//
HasDatabasesInterface databasesInterface = getActiveHasDatabasesInterface();
if ( databasesInterface != null ) {
databases.addAll( databasesInterface.getDatabases() );
}
// Overwrite the information with the connections from the repository
//
if ( rep != null ) {
try {
List<DatabaseMeta> list = rep.readDatabases();
for ( DatabaseMeta databaseMeta : list ) {
int index = databases.indexOf( databaseMeta );
if ( index < 0 ) {
databases.add( databaseMeta );
} else {
databases.set( index, databaseMeta );
}
}
} catch ( KettleException e ) {
log.logError( "Unexpected repository error", e.getMessage() );
}
}
if ( databases.size() == 0 ) {
return;
}
// OK, get a list of all the database names...
//
String[] databaseNames = new String[databases.size()];
for ( int i = 0; i < databases.size(); i++ ) {
databaseNames[i] = databases.get( i ).getName();
}
// show the shell...
//
EnterSelectionDialog dialog = new EnterSelectionDialog( shell, databaseNames,
BaseMessages.getString( PKG, "Spoon.ExploreDB.SelectDB.Title" ),
BaseMessages.getString( PKG, "Spoon.ExploreDB.SelectDB.Message" ) );
String name = dialog.open();
if ( name != null ) {
selectionObject = DatabaseMeta.findDatabase( databases, name );
exploreDB();
}
}
public void exploreDB() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.exploreDB( databaseMeta, true );
}
public void editStep() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final StepMeta stepMeta = (StepMeta) selectionObject;
delegates.steps.editStep( transMeta, stepMeta );
}
public void dupeStep() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final StepMeta stepMeta = (StepMeta) selectionObject;
delegates.steps.dupeStep( transMeta, stepMeta );
}
public void delStep() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final StepMeta stepMeta = (StepMeta) selectionObject;
delegates.steps.delStep( transMeta, stepMeta );
}
public void helpStep() {
final StepMeta stepMeta = (StepMeta) selectionObject;
PluginInterface stepPlugin =
PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, stepMeta.getStepID() );
HelpUtils.openHelpDialog( shell, stepPlugin );
}
public void shareObject( String id ) {
if ( "database-inst-share".equals( id ) ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
if ( databaseMeta.isShared() ) {
unShareObject( databaseMeta );
} else {
shareObject( databaseMeta );
}
}
if ( "step-inst-share".equals( id ) ) {
final StepMeta stepMeta = (StepMeta) selectionObject;
shareObject( stepMeta );
}
if ( "partition-schema-inst-share".equals( id ) ) {
final PartitionSchema partitionSchema = (PartitionSchema) selectionObject;
shareObject( partitionSchema );
}
if ( "cluster-schema-inst-share".equals( id ) ) {
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
shareObject( clusterSchema );
}
if ( "slave-server-inst-share".equals( id ) ) {
final SlaveServer slaveServer = (SlaveServer) selectionObject;
shareObject( slaveServer );
}
}
public void editJobEntry() {
final JobMeta jobMeta = (JobMeta) selectionObjectParent;
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
editJobEntry( jobMeta, jobEntry );
}
public void dupeJobEntry() {
final JobMeta jobMeta = (JobMeta) selectionObjectParent;
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
delegates.jobs.dupeJobEntry( jobMeta, jobEntry );
}
public void deleteJobEntryCopies() {
final JobMeta jobMeta = (JobMeta) selectionObjectParent;
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
deleteJobEntryCopies( jobMeta, jobEntry );
}
public void helpJobEntry() {
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
String jobName = jobEntry.getName();
PluginInterface jobEntryPlugin =
PluginRegistry.getInstance().findPluginWithName( JobEntryPluginType.class, jobName );
HelpUtils.openHelpDialog( shell, jobEntryPlugin );
}
public void editHop() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final TransHopMeta transHopMeta = (TransHopMeta) selectionObject;
editHop( transMeta, transHopMeta );
}
public void delHop() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final TransHopMeta transHopMeta = (TransHopMeta) selectionObject;
delHop( transMeta, transHopMeta );
}
public void editPartitionSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final PartitionSchema partitionSchema = (PartitionSchema) selectionObject;
editPartitionSchema( transMeta, partitionSchema );
}
public void delPartitionSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final PartitionSchema partitionSchema = (PartitionSchema) selectionObject;
delPartitionSchema( transMeta, partitionSchema );
}
public void editClusterSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
editClusterSchema( transMeta, clusterSchema );
}
public void delClusterSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
delClusterSchema( transMeta, clusterSchema );
}
public void monitorClusterSchema() throws KettleException {
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
monitorClusterSchema( clusterSchema );
}
public void editSlaveServer() {
final SlaveServer slaveServer = (SlaveServer) selectionObject;
editSlaveServer( slaveServer );
}
public void delSlaveServer() {
final HasSlaveServersInterface hasSlaveServersInterface = (HasSlaveServersInterface) selectionObjectParent;
final SlaveServer slaveServer = (SlaveServer) selectionObject;
delSlaveServer( hasSlaveServersInterface, slaveServer );
}
public void addSpoonSlave() {
final SlaveServer slaveServer = (SlaveServer) selectionObject;
addSpoonSlave( slaveServer );
}
private synchronized void setMenu( Tree tree ) {
TreeSelection[] objects = getTreeObjects( tree );
if ( objects.length != 1 ) {
return; // not yet supported, we can do this later when the OSX bug
// goes away
}
TreeSelection object = objects[0];
selectionObject = object.getSelection();
Object selection = selectionObject;
selectionObjectParent = object.getParent();
// Not clicked on a real object: returns a class
XulMenupopup spoonMenu = null;
if ( selection instanceof Class<?> ) {
if ( selection.equals( TransMeta.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "trans-class" );
} else if ( selection.equals( JobMeta.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "job-class" );
} else if ( selection.equals( TransHopMeta.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "trans-hop-class" );
} else if ( selection.equals( DatabaseMeta.class ) ) {
spoonMenu = (XulMenupopup) menuMap.get( "database-class" );
} else if ( selection.equals( PartitionSchema.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "partition-schema-class" );
} else if ( selection.equals( ClusterSchema.class ) ) {
spoonMenu = (XulMenupopup) menuMap.get( "cluster-schema-class" );
} else if ( selection.equals( SlaveServer.class ) ) {
spoonMenu = (XulMenupopup) menuMap.get( "slave-cluster-class" );
} else {
spoonMenu = null;
}
} else {
if ( selection instanceof TransMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "trans-inst" );
} else if ( selection instanceof JobMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "job-inst" );
} else if ( selection instanceof PluginInterface ) {
spoonMenu = (XulMenupopup) menuMap.get( "step-plugin" );
} else if ( selection instanceof DatabaseMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "database-inst" );
// disable for now if the connection is an SAP ERP type of database...
//
XulMenuitem item =
(XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( "database-inst-explore" );
if ( item != null ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selection;
item.setDisabled( !databaseMeta.isExplorable() );
}
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( "database-inst-clear-cache" );
if ( item != null ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
item.setLabel( BaseMessages.getString( PKG, "Spoon.Menu.Popup.CONNECTIONS.ClearDBCache" )
+ databaseMeta.getName() ); // Clear
}
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( "database-inst-share" );
if ( item != null ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selection;
if ( databaseMeta.isShared() ) {
item.setLabel( BaseMessages.getString( PKG, "Spoon.Menu.Popup.CONNECTIONS.UnShare" ) );
} else {
item.setLabel( BaseMessages.getString( PKG, "Spoon.Menu.Popup.CONNECTIONS.Share" ) );
}
}
} else if ( selection instanceof StepMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "step-inst" );
} else if ( selection instanceof JobEntryCopy ) {
spoonMenu = (XulMenupopup) menuMap.get( "job-entry-copy-inst" );
} else if ( selection instanceof TransHopMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "trans-hop-inst" );
} else if ( selection instanceof PartitionSchema ) {
spoonMenu = (XulMenupopup) menuMap.get( "partition-schema-inst" );
} else if ( selection instanceof ClusterSchema ) {
spoonMenu = (XulMenupopup) menuMap.get( "cluster-schema-inst" );
} else if ( selection instanceof SlaveServer ) {
spoonMenu = (XulMenupopup) menuMap.get( "slave-server-inst" );
}
}
if ( spoonMenu != null ) {
ConstUI.displayMenu( spoonMenu, tree );
} else {
tree.setMenu( null );
}
}
/**
* Reaction to double click
*
*/
private void doubleClickedInTree( Tree tree ) {
doubleClickedInTree( tree, false );
}
/**
* Reaction to double click
*
*/
private void doubleClickedInTree( Tree tree, boolean shift ) {
TreeSelection[] objects = getTreeObjects( tree );
if ( objects.length != 1 ) {
return; // not yet supported, we can do this later when the OSX bug
// goes away
}
TreeSelection object = objects[0];
final Object selection = object.getSelection();
final Object parent = object.getParent();
if ( selection instanceof Class<?> ) {
if ( selection.equals( TransMeta.class ) ) {
newTransFile();
}
if ( selection.equals( JobMeta.class ) ) {
newJobFile();
}
if ( selection.equals( TransHopMeta.class ) ) {
newHop( (TransMeta) parent );
}
if ( selection.equals( DatabaseMeta.class ) ) {
delegates.db.newConnection();
}
if ( selection.equals( PartitionSchema.class ) ) {
newPartitioningSchema( (TransMeta) parent );
}
if ( selection.equals( ClusterSchema.class ) ) {
newClusteringSchema( (TransMeta) parent );
}
if ( selection.equals( SlaveServer.class ) ) {
newSlaveServer( (HasSlaveServersInterface) parent );
}
} else {
if ( selection instanceof TransMeta ) {
TransGraph.editProperties( (TransMeta) selection, this, rep, true );
}
if ( selection instanceof JobMeta ) {
JobGraph.editProperties( (JobMeta) selection, this, rep, true );
}
if ( selection instanceof PluginInterface ) {
PluginInterface plugin = (PluginInterface) selection;
if ( plugin.getPluginType().equals( StepPluginType.class ) ) {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.addStepToChain( plugin, shift );
}
}
if ( plugin.getPluginType().equals( JobEntryPluginType.class ) ) {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.addJobEntryToChain( object.getItemText(), shift );
}
}
// newStep( getActiveTransformation() );
}
if ( selection instanceof DatabaseMeta ) {
delegates.db.editConnection( (DatabaseMeta) selection );
}
if ( selection instanceof StepMeta ) {
delegates.steps.editStep( (TransMeta) parent, (StepMeta) selection );
}
if ( selection instanceof JobEntryCopy ) {
editJobEntry( (JobMeta) parent, (JobEntryCopy) selection );
}
if ( selection instanceof TransHopMeta ) {
editHop( (TransMeta) parent, (TransHopMeta) selection );
}
if ( selection instanceof PartitionSchema ) {
editPartitionSchema( (TransMeta) parent, (PartitionSchema) selection );
}
if ( selection instanceof ClusterSchema ) {
editClusterSchema( (TransMeta) parent, (ClusterSchema) selection );
}
if ( selection instanceof SlaveServer ) {
editSlaveServer( (SlaveServer) selection );
}
}
}
protected void monitorClusterSchema( ClusterSchema clusterSchema ) throws KettleException {
for ( int i = 0; i < clusterSchema.getSlaveServers().size(); i++ ) {
SlaveServer slaveServer = clusterSchema.getSlaveServers().get( i );
addSpoonSlave( slaveServer );
}
}
protected void editSlaveServer( SlaveServer slaveServer ) {
// slaveServer.getVariable("MASTER_HOST")
SlaveServerDialog dialog = new SlaveServerDialog( shell, slaveServer );
if ( dialog.open() ) {
refreshTree();
refreshGraph();
}
}
private void addTabs() {
if ( tabComp != null ) {
tabComp.dispose();
}
tabComp = new Composite( sashform, SWT.BORDER );
props.setLook( tabComp );
tabComp.setLayout( new FillLayout() );
tabfolder = new TabSet( tabComp );
tabfolder.setChangedFont( GUIResource.getInstance().getFontBold() );
props.setLook( tabfolder.getSwtTabset(), Props.WIDGET_STYLE_TAB );
final CTabFolder cTabFolder = tabfolder.getSwtTabset();
cTabFolder.addMenuDetectListener( new MenuDetectListener() {
@Override
public void menuDetected( MenuDetectEvent event ) {
org.eclipse.swt.graphics.Point real = new org.eclipse.swt.graphics.Point( event.x, event.y );
org.eclipse.swt.graphics.Point point = display.map( null, cTabFolder, real );
final CTabItem item = cTabFolder.getItem( point );
if ( item != null ) {
Menu menu = new Menu( cTabFolder );
MenuItem closeItem = new MenuItem( menu, SWT.NONE );
closeItem.setText( BaseMessages.getString( PKG, "Spoon.Tab.Close" ) );
closeItem.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
int index = tabfolder.getSwtTabset().indexOf( item );
if ( index >= 0 ) {
TabMapEntry entry = delegates.tabs.getTabs().get( index );
tabClose( entry.getTabItem() );
}
}
} );
MenuItem closeAllItems = new MenuItem( menu, SWT.NONE );
closeAllItems.setText( BaseMessages.getString( PKG, "Spoon.Tab.CloseAll" ) );
closeAllItems.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
tabClose( entry.getTabItem() );
}
}
} );
MenuItem closeOtherItems = new MenuItem( menu, SWT.NONE );
closeOtherItems.setText( BaseMessages.getString( PKG, "Spoon.Tab.CloseOthers" ) );
closeOtherItems.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
int index = tabfolder.getSwtTabset().indexOf( item );
if ( index >= 0 ) {
TabMapEntry entry = delegates.tabs.getTabs().get( index );
for ( TabMapEntry closeEntry : delegates.tabs.getTabs() ) {
if ( !closeEntry.equals( entry ) ) {
tabClose( closeEntry.getTabItem() );
}
}
}
}
} );
menu.setLocation( real );
menu.setVisible( true );
}
}
} );
int[] weights = props.getSashWeights();
sashform.setWeights( weights );
sashform.setVisible( true );
// Set a minimum width on the sash so that the view and design buttons
// on the left panel are always visible.
//
Control[] comps = sashform.getChildren();
for ( Control comp : comps ) {
if ( comp instanceof Sash ) {
int limit = 10;
final int SASH_LIMIT = Const.isOSX() ? 150 : limit;
final Sash sash = (Sash) comp;
sash.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
Rectangle rect = sash.getParent().getClientArea();
event.x = Math.min( Math.max( event.x, SASH_LIMIT ), rect.width - SASH_LIMIT );
if ( event.detail != SWT.DRAG ) {
sash.setBounds( event.x, event.y, event.width, event.height );
sashform.layout();
}
}
} );
}
}
tabfolder.addListener( this ); // methods: tabDeselected, tabClose,
// tabSelected
}
public void tabDeselected( TabItem item ) {
}
public boolean tabCloseSelected() {
// this gets called on by the file-close menu item
String activePerspectiveId = SpoonPerspectiveManager.getInstance().getActivePerspective().getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( etlPerspective ) {
return tabClose( tabfolder.getSelected() );
}
// hack to make the plugins see file-close commands
// this should be resolved properly when resolving PDI-6054
// maybe by extending the SpoonPerspectiveInterface to register event handlers from Spoon?
try {
SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();
Class<? extends SpoonPerspective> cls = activePerspective.getClass();
Method m = cls.getMethod( "onFileClose" );
return (Boolean) m.invoke( activePerspective );
} catch ( Exception e ) {
// ignore any errors resulting from the hack
// e.printStackTrace();
}
return false;
}
public boolean tabClose( TabItem item ) {
try {
return delegates.tabs.tabClose( item );
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Unexpected error closing tab!", e );
return false;
}
}
public TabSet getTabSet() {
return tabfolder;
}
public void tabSelected( TabItem item ) {
delegates.tabs.tabSelected( item );
enableMenus();
}
public String getRepositoryName() {
if ( rep == null ) {
return null;
}
return rep.getName();
}
public void pasteXML( TransMeta transMeta, String clipcontent, Point loc ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
try {
Document doc = XMLHandler.loadXMLString( clipcontent );
Node transNode = XMLHandler.getSubNode( doc, Spoon.XML_TAG_TRANSFORMATION_STEPS );
// De-select all, re-select pasted steps...
transMeta.unselectAll();
Node stepsNode = XMLHandler.getSubNode( transNode, "steps" );
int nr = XMLHandler.countNodes( stepsNode, "step" );
if ( getLog().isDebug() ) {
// "I found "+nr+" steps to paste on location: "
getLog().logDebug( BaseMessages.getString( PKG, "Spoon.Log.FoundSteps", "" + nr ) + loc );
}
StepMeta[] steps = new StepMeta[nr];
ArrayList<String> stepOldNames = new ArrayList<String>( nr );
// Point min = new Point(loc.x, loc.y);
Point min = new Point( 99999999, 99999999 );
// Load the steps...
for ( int i = 0; i < nr; i++ ) {
Node stepNode = XMLHandler.getSubNodeByNr( stepsNode, "step", i );
steps[i] = new StepMeta( stepNode, transMeta.getDatabases(), metaStore );
if ( loc != null ) {
Point p = steps[i].getLocation();
if ( min.x > p.x ) {
min.x = p.x;
}
if ( min.y > p.y ) {
min.y = p.y;
}
}
}
// Load the hops...
Node hopsNode = XMLHandler.getSubNode( transNode, "order" );
nr = XMLHandler.countNodes( hopsNode, "hop" );
if ( getLog().isDebug() ) {
// "I found "+nr+" hops to paste."
getLog().logDebug( BaseMessages.getString( PKG, "Spoon.Log.FoundHops", "" + nr ) );
}
TransHopMeta[] hops = new TransHopMeta[nr];
ArrayList<StepMeta> alSteps = new ArrayList<StepMeta>();
Collections.addAll( alSteps, steps );
for ( int i = 0; i < nr; i++ ) {
Node hopNode = XMLHandler.getSubNodeByNr( hopsNode, "hop", i );
hops[i] = new TransHopMeta( hopNode, alSteps );
}
// This is the offset:
//
Point offset = new Point( loc.x - min.x, loc.y - min.y );
// Undo/redo object positions...
int[] position = new int[steps.length];
for ( int i = 0; i < steps.length; i++ ) {
Point p = steps[i].getLocation();
String name = steps[i].getName();
steps[i].setLocation( p.x + offset.x, p.y + offset.y );
steps[i].setDraw( true );
// Check the name, find alternative...
stepOldNames.add( name );
steps[i].setName( transMeta.getAlternativeStepname( name ) );
transMeta.addStep( steps[i] );
position[i] = transMeta.indexOfStep( steps[i] );
steps[i].setSelected( true );
}
// Add the hops too...
for ( TransHopMeta hop : hops ) {
transMeta.addTransHop( hop );
}
// Load the notes...
Node notesNode = XMLHandler.getSubNode( transNode, "notepads" );
nr = XMLHandler.countNodes( notesNode, "notepad" );
if ( getLog().isDebug() ) {
// "I found "+nr+" notepads to paste."
getLog().logDebug( BaseMessages.getString( PKG, "Spoon.Log.FoundNotepads", "" + nr ) );
}
NotePadMeta[] notes = new NotePadMeta[nr];
for ( int i = 0; i < notes.length; i++ ) {
Node noteNode = XMLHandler.getSubNodeByNr( notesNode, "notepad", i );
notes[i] = new NotePadMeta( noteNode );
Point p = notes[i].getLocation();
notes[i].setLocation( p.x + offset.x, p.y + offset.y );
transMeta.addNote( notes[i] );
notes[i].setSelected( true );
}
// Set the source and target steps ...
for ( StepMeta step : steps ) {
StepMetaInterface smi = step.getStepMetaInterface();
smi.searchInfoAndTargetSteps( transMeta.getSteps() );
}
// Set the error handling hops
Node errorHandlingNode = XMLHandler.getSubNode( transNode, TransMeta.XML_TAG_STEP_ERROR_HANDLING );
int nrErrorHandlers = XMLHandler.countNodes( errorHandlingNode, StepErrorMeta.XML_TAG );
for ( int i = 0; i < nrErrorHandlers; i++ ) {
Node stepErrorMetaNode = XMLHandler.getSubNodeByNr( errorHandlingNode, StepErrorMeta.XML_TAG, i );
StepErrorMeta stepErrorMeta =
new StepErrorMeta( transMeta.getParentVariableSpace(), stepErrorMetaNode, transMeta.getSteps() );
// Handle pasting multiple times, need to update source and target step names
int srcStepPos = stepOldNames.indexOf( stepErrorMeta.getSourceStep().getName() );
int tgtStepPos = stepOldNames.indexOf( stepErrorMeta.getTargetStep().getName() );
StepMeta sourceStep = transMeta.findStep( steps[srcStepPos].getName() );
if ( sourceStep != null ) {
sourceStep.setStepErrorMeta( stepErrorMeta );
}
StepMeta targetStep = transMeta.findStep( steps[tgtStepPos].getName() );
stepErrorMeta.setSourceStep( sourceStep );
stepErrorMeta.setTargetStep( targetStep );
}
// Save undo information too...
addUndoNew( transMeta, steps, position, false );
int[] hopPos = new int[hops.length];
for ( int i = 0; i < hops.length; i++ ) {
hopPos[i] = transMeta.indexOfTransHop( hops[i] );
}
addUndoNew( transMeta, hops, hopPos, true );
int[] notePos = new int[notes.length];
for ( int i = 0; i < notes.length; i++ ) {
notePos[i] = transMeta.indexOfNote( notes[i] );
}
addUndoNew( transMeta, notes, notePos, true );
if ( transMeta.haveStepsChanged() ) {
refreshTree();
refreshGraph();
}
} catch ( KettleException e ) {
// "Error pasting steps...",
// "I was unable to paste steps to this transformation"
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Dialog.UnablePasteSteps.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.UnablePasteSteps.Message" ), e );
}
}
public void copySelected( TransMeta transMeta, List<StepMeta> steps, List<NotePadMeta> notes ) {
if ( steps == null || steps.size() == 0 ) {
return;
}
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
StringBuilder xml = new StringBuilder( 5000 ).append( XMLHandler.getXMLHeader() );
try {
xml.append( XMLHandler.openTag( Spoon.XML_TAG_TRANSFORMATION_STEPS ) ).append( Const.CR );
xml.append( XMLHandler.openTag( Spoon.XML_TAG_STEPS ) ).append( Const.CR );
for ( StepMeta step : steps ) {
xml.append( step.getXML() );
}
xml.append( XMLHandler.closeTag( Spoon.XML_TAG_STEPS ) ).append( Const.CR );
// Also check for the hops in between the selected steps...
xml.append( XMLHandler.openTag( TransMeta.XML_TAG_ORDER ) ).append( Const.CR );
for ( StepMeta step1 : steps ) {
for ( StepMeta step2 : steps ) {
if ( step1 != step2 ) {
TransHopMeta hop = transMeta.findTransHop( step1, step2, true );
if ( hop != null ) {
// Ok, we found one...
xml.append( hop.getXML() ).append( Const.CR );
}
}
}
}
xml.append( XMLHandler.closeTag( TransMeta.XML_TAG_ORDER ) ).append( Const.CR );
xml.append( XMLHandler.openTag( TransMeta.XML_TAG_NOTEPADS ) ).append( Const.CR );
if ( notes != null ) {
for ( NotePadMeta note : notes ) {
xml.append( note.getXML() );
}
}
xml.append( XMLHandler.closeTag( TransMeta.XML_TAG_NOTEPADS ) ).append( Const.CR );
xml.append( XMLHandler.openTag( TransMeta.XML_TAG_STEP_ERROR_HANDLING ) ).append( Const.CR );
for ( StepMeta step : steps ) {
if ( step.getStepErrorMeta() != null ) {
xml.append( step.getStepErrorMeta().getXML() ).append( Const.CR );
}
}
xml.append( XMLHandler.closeTag( TransMeta.XML_TAG_STEP_ERROR_HANDLING ) ).append( Const.CR );
xml.append( XMLHandler.closeTag( Spoon.XML_TAG_TRANSFORMATION_STEPS ) ).append( Const.CR );
toClipboard( xml.toString() );
} catch ( Exception ex ) {
new ErrorDialog( getShell(), "Error", "Error encoding to XML", ex );
}
}
public void editHop( TransMeta transMeta, TransHopMeta transHopMeta ) {
// Backup situation BEFORE edit:
String name = transHopMeta.toString();
TransHopMeta before = (TransHopMeta) transHopMeta.clone();
TransHopDialog hd = new TransHopDialog( shell, SWT.NONE, transHopMeta, transMeta );
if ( hd.open() != null ) {
// Backup situation for redo/undo:
TransHopMeta after = (TransHopMeta) transHopMeta.clone();
addUndoChange( transMeta, new TransHopMeta[] { before }, new TransHopMeta[] { after }, new int[] { transMeta
.indexOfTransHop( transHopMeta ) } );
String newName = transHopMeta.toString();
if ( !name.equalsIgnoreCase( newName ) ) {
refreshTree();
refreshGraph(); // color, nr of copies...
}
}
setShellText();
}
public void delHop( TransMeta transMeta, TransHopMeta transHopMeta ) {
int index = transMeta.indexOfTransHop( transHopMeta );
addUndoDelete( transMeta, new Object[] { (TransHopMeta) transHopMeta.clone() }, new int[] { index } );
transMeta.removeTransHop( index );
// If this is an error handling hop, disable it
//
if ( transHopMeta.getFromStep().isDoingErrorHandling() ) {
StepErrorMeta stepErrorMeta = transHopMeta.getFromStep().getStepErrorMeta();
// We can only disable error handling if the target of the hop is the same as the target of the error handling.
//
if ( stepErrorMeta.getTargetStep() != null
&& stepErrorMeta.getTargetStep().equals( transHopMeta.getToStep() ) ) {
StepMeta stepMeta = transHopMeta.getFromStep();
// Only if the target step is where the error handling is going to...
//
StepMeta before = (StepMeta) stepMeta.clone();
stepErrorMeta.setEnabled( false );
index = transMeta.indexOfStep( stepMeta );
addUndoChange( transMeta, new Object[] { before }, new Object[] { stepMeta }, new int[] { index } );
}
}
refreshTree();
refreshGraph();
}
public void newHop( TransMeta transMeta, StepMeta fr, StepMeta to ) {
TransHopMeta hi = new TransHopMeta( fr, to );
TransHopDialog hd = new TransHopDialog( shell, SWT.NONE, hi, transMeta );
if ( hd.open() != null ) {
newHop( transMeta, hi );
}
}
public void newHop( TransMeta transMeta, TransHopMeta transHopMeta ) {
if ( checkIfHopAlreadyExists( transMeta, transHopMeta ) ) {
transMeta.addTransHop( transHopMeta );
int idx = transMeta.indexOfTransHop( transHopMeta );
if ( !performNewTransHopChecks( transMeta, transHopMeta ) ) {
// Some error occurred: loops, existing hop, etc.
// Remove it again...
//
transMeta.removeTransHop( idx );
} else {
addUndoNew( transMeta, new TransHopMeta[] { transHopMeta }, new int[] { transMeta
.indexOfTransHop( transHopMeta ) } );
}
// Just to make sure
transHopMeta.getFromStep().drawStep();
transHopMeta.getToStep().drawStep();
refreshTree();
refreshGraph();
}
}
/**
* @param transMeta transformation's meta
* @param newHop hop to be checked
* @return true when the hop was added, false if there was an error
*/
public boolean checkIfHopAlreadyExists( TransMeta transMeta, TransHopMeta newHop ) {
boolean ok = true;
if ( transMeta.findTransHop( newHop.getFromStep(), newHop.getToStep() ) != null ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.HopExists.Message" ) ); // "This hop already exists!"
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.HopExists.Title" ) ); // Error!
mb.open();
ok = false;
}
return ok;
}
/**
* @param transMeta transformation's meta
* @param newHop hop to be checked
* @return true when the hop was added, false if there was an error
*/
public boolean performNewTransHopChecks( TransMeta transMeta, TransHopMeta newHop ) {
boolean ok = true;
if ( transMeta.hasLoop( newHop.getFromStep() ) || transMeta.hasLoop( newHop.getToStep() ) ) {
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.ICON_WARNING );
mb.setMessage( BaseMessages.getString( PKG, "TransGraph.Dialog.HopCausesLoop.Message" ) );
mb.setText( BaseMessages.getString( PKG, "TransGraph.Dialog.HopCausesLoop.Title" ) );
mb.open();
ok = false;
}
if ( ok ) { // only do the following checks, e.g. checkRowMixingStatically
// when not looping, otherwise we get a loop with
// StackOverflow there ;-)
try {
if ( !newHop.getToStep().getStepMetaInterface().excludeFromRowLayoutVerification() ) {
transMeta.checkRowMixingStatically( newHop.getToStep(), null );
}
} catch ( KettleRowException re ) {
// Show warning about mixing rows with conflicting layouts...
new ErrorDialog(
shell, BaseMessages.getString( PKG, "TransGraph.Dialog.HopCausesRowMixing.Title" ), BaseMessages
.getString( PKG, "TransGraph.Dialog.HopCausesRowMixing.Message" ), re );
}
verifyCopyDistribute( transMeta, newHop.getFromStep() );
}
return ok;
}
public void verifyCopyDistribute( TransMeta transMeta, StepMeta fr ) {
List<StepMeta> nextSteps = transMeta.findNextSteps( fr );
int nrNextSteps = nextSteps.size();
// don't show it for 3 or more hops, by then you should have had the
// message
if ( nrNextSteps == 2 ) {
boolean distributes = fr.getStepMetaInterface().excludeFromCopyDistributeVerification();
boolean customDistribution = false;
if ( props.showCopyOrDistributeWarning()
&& !fr.getStepMetaInterface().excludeFromCopyDistributeVerification() ) {
MessageDialogWithToggle md =
new MessageDialogWithToggle(
shell, BaseMessages.getString( PKG, "System.Warning" ), null, BaseMessages.getString(
PKG, "Spoon.Dialog.CopyOrDistribute.Message", fr.getName(), Integer.toString( nrNextSteps ) ),
MessageDialog.WARNING, getRowDistributionLabels(), 0, BaseMessages.getString(
PKG, "Spoon.Message.Warning.NotShowWarning" ), !props.showCopyOrDistributeWarning() );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
int idx = md.open();
props.setShowCopyOrDistributeWarning( !md.getToggleState() );
props.saveProps();
distributes = idx == Spoon.MESSAGE_DIALOG_WITH_TOGGLE_YES_BUTTON_ID;
customDistribution = idx == Spoon.MESSAGE_DIALOG_WITH_TOGGLE_CUSTOM_DISTRIBUTION_BUTTON_ID;
}
if ( distributes ) {
fr.setDistributes( true );
fr.setRowDistribution( null );
} else if ( customDistribution ) {
RowDistributionInterface rowDistribution = getActiveTransGraph().askUserForCustomDistributionMethod();
fr.setDistributes( true );
fr.setRowDistribution( rowDistribution );
} else {
fr.setDistributes( false );
fr.setDistributes( false );
}
refreshTree();
refreshGraph();
}
}
private String[] getRowDistributionLabels() {
ArrayList<String> labels = new ArrayList<String>();
labels.add( BaseMessages.getString( PKG, "Spoon.Dialog.CopyOrDistribute.Distribute" ) );
labels.add( BaseMessages.getString( PKG, "Spoon.Dialog.CopyOrDistribute.Copy" ) );
if ( PluginRegistry.getInstance().getPlugins( RowDistributionPluginType.class ).size() > 0 ) {
labels.add( BaseMessages.getString( PKG, "Spoon.Dialog.CopyOrDistribute.CustomRowDistribution" ) );
}
return labels.toArray( new String[labels.size()] );
}
public void newHop( TransMeta transMeta ) {
newHop( transMeta, null, null );
}
public void openRepository() {
// Check to tabs are dirty and warn user that they must save tabs prior to connecting. Don't connect!
if ( Spoon.getInstance().isTabsChanged() ) {
MessageBox mb = new MessageBox( Spoon.getInstance().getShell(), SWT.OK );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.WarnToSaveAllPriorToConnect.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Title" ) );
mb.open();
// Don't connect, user will need to save all their dirty tabs.
return;
}
loginDialog = new RepositoriesDialog( shell, null, new ILoginCallback() {
public void onSuccess( Repository repository ) {
// Close previous repository...
if ( rep != null ) {
rep.disconnect();
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED );
}
setRepository( repository );
loadSessionInformation( repository, true );
refreshTree();
setShellText();
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CONNECTED );
}
public void onError( Throwable t ) {
closeRepository();
onLoginError( t );
}
public void onCancel() {
}
} );
loginDialog.show();
}
private void loadSessionInformation( Repository repository, boolean saveOldDatabases ) {
JobMeta[] jobMetas = getLoadedJobs();
for ( JobMeta jobMeta : jobMetas ) {
for ( int i = 0; i < jobMeta.nrDatabases(); i++ ) {
jobMeta.getDatabase( i ).setObjectId( null );
}
// Set for the existing job the ID at -1!
jobMeta.setObjectId( null );
// Keep track of the old databases for now.
List<DatabaseMeta> oldDatabases = jobMeta.getDatabases();
// In order to re-match the databases on name (not content), we
// need to load the databases from the new repository.
// NOTE: for purposes such as DEVELOP - TEST - PRODUCTION
// cycles.
// first clear the list of databases and slave servers
jobMeta.setDatabases( new ArrayList<DatabaseMeta>() );
jobMeta.setSlaveServers( new ArrayList<SlaveServer>() );
// Read them from the new repository.
try {
SharedObjects sharedObjects =
repository != null ? repository.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", makeTabName( jobMeta, true ) ),
e
);
}
// Then we need to re-match the databases at save time...
for ( DatabaseMeta oldDatabase : oldDatabases ) {
DatabaseMeta newDatabase = DatabaseMeta.findDatabase( jobMeta.getDatabases(), oldDatabase.getName() );
// If it exists, change the settings...
if ( newDatabase != null ) {
//
// A database connection with the same name exists in
// the new repository.
// Change the old connections to reflect the settings in
// the new repository
//
oldDatabase.setDatabaseInterface( newDatabase.getDatabaseInterface() );
} else {
if ( saveOldDatabases ) {
//
// The old database is not present in the new
// repository: simply add it to the list.
// When the job gets saved, it will be added
// to the repository.
//
jobMeta.addDatabase( oldDatabase );
}
}
}
if ( repository != null ) {
try {
// For the existing job, change the directory too:
// Try to find the same directory in the new repository...
RepositoryDirectoryInterface rdi =
repository.findDirectory( jobMeta.getRepositoryDirectory().getPath() );
if ( rdi != null ) {
jobMeta.setRepositoryDirectory( rdi );
} else {
// the root is the default!
jobMeta.setRepositoryDirectory( repository.loadRepositoryDirectoryTree() );
}
} catch ( KettleException ke ) {
rep = null;
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Message", Const.CR ), ke
);
}
}
}
TransMeta[] transMetas = getLoadedTransformations();
for ( TransMeta transMeta : transMetas ) {
for ( int i = 0; i < transMeta.nrDatabases(); i++ ) {
transMeta.getDatabase( i ).setObjectId( null );
}
// Set for the existing transformation the ID at -1!
transMeta.setObjectId( null );
// Keep track of the old databases for now.
List<DatabaseMeta> oldDatabases = transMeta.getDatabases();
// In order to re-match the databases on name (not content), we
// need to load the databases from the new repository.
// NOTE: for purposes such as DEVELOP - TEST - PRODUCTION
// cycles.
// first clear the list of databases, partition schemas, slave
// servers, clusters
transMeta.setDatabases( new ArrayList<DatabaseMeta>() );
transMeta.setPartitionSchemas( new ArrayList<PartitionSchema>() );
transMeta.setSlaveServers( new ArrayList<SlaveServer>() );
transMeta.setClusterSchemas( new ArrayList<ClusterSchema>() );
// Read them from the new repository.
try {
SharedObjects sharedObjects =
repository != null ? repository.readTransSharedObjects( transMeta ) : transMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", makeTabName(
transMeta, true ) ), e
);
}
// Then we need to re-match the databases at save time...
for ( DatabaseMeta oldDatabase : oldDatabases ) {
DatabaseMeta newDatabase = DatabaseMeta.findDatabase( transMeta.getDatabases(), oldDatabase.getName() );
// If it exists, change the settings...
if ( newDatabase != null ) {
//
// A database connection with the same name exists in
// the new repository.
// Change the old connections to reflect the settings in
// the new repository
//
oldDatabase.setDatabaseInterface( newDatabase.getDatabaseInterface() );
} else {
if ( saveOldDatabases ) {
//
// The old database is not present in the new
// repository: simply add it to the list.
// When the transformation gets saved, it will be added
// to the repository.
//
transMeta.addDatabase( oldDatabase );
}
}
}
if ( repository != null ) {
try {
// For the existing transformation, change the directory too:
// Try to find the same directory in the new repository...
RepositoryDirectoryInterface rdi =
repository.findDirectory( transMeta.getRepositoryDirectory().getPath() );
if ( rdi != null ) {
transMeta.setRepositoryDirectory( rdi );
} else {
// the root is the default!
transMeta.setRepositoryDirectory( repository.loadRepositoryDirectoryTree() );
}
} catch ( KettleException ke ) {
rep = null;
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Message", Const.CR ), ke
);
}
}
}
}
public void clearSharedObjectCache() throws KettleException {
if ( rep != null ) {
rep.clearSharedObjectCache();
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
rep.readTransSharedObjects( transMeta );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
rep.readJobMetaSharedObjects( jobMeta );
}
}
}
public void exploreRepository() {
if ( rep != null ) {
final RepositoryExplorerCallback cb = new RepositoryExplorerCallback() {
public boolean open( UIRepositoryContent element, String revision ) {
String objName = element.getName();
if ( objName != null ) {
RepositoryObjectType objectType = element.getRepositoryElementType();
RepositoryDirectory repDir = element.getRepositoryDirectory();
if ( element.getObjectId() != null ) { // new way
loadObjectFromRepository( element.getObjectId(), objectType, revision );
} else { // old way
loadObjectFromRepository( objName, objectType, repDir, revision );
}
}
return false; // do not close explorer
}
};
try {
final XulWaitBox box = (XulWaitBox) this.mainSpoonContainer.getDocumentRoot().createElement( "waitbox" );
box.setIndeterminate( true );
box.setCanCancel( false );
box.setTitle( BaseMessages.getString(
RepositoryDialogInterface.class, "RepositoryExplorerDialog.Connection.Wait.Title" ) );
box.setMessage( BaseMessages.getString(
RepositoryDialogInterface.class, "RepositoryExplorerDialog.Explorer.Wait.Message" ) );
box.setDialogParent( shell );
box.setRunnable( new WaitBoxRunnable( box ) {
@Override
public void run() {
shell.getDisplay().syncExec( new Runnable() {
public void run() {
try {
RepositoryExplorer explorer =
new RepositoryExplorer( shell, rep, cb, Variables.getADefaultVariableSpace() );
box.stop();
explorer.show();
explorer.dispose();
} catch ( final Throwable e ) {
shell.getDisplay().asyncExec( new Runnable() {
public void run() {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), e.getMessage(), e );
}
} );
}
}
} );
}
@Override
public void cancel() {
}
} );
box.start();
} catch ( Throwable e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), e.getMessage(), e );
}
}
}
private void loadObjectFromRepository( ObjectId objectId, RepositoryObjectType objectType, String revision ) {
// Try to open the selected transformation.
if ( objectType.equals( RepositoryObjectType.TRANSFORMATION ) ) {
try {
TransLoadProgressDialog progressDialog = new TransLoadProgressDialog( shell, rep, objectId, revision );
TransMeta transMeta = progressDialog.open();
transMeta.clearChanged();
if ( transMeta != null ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString(
PKG, "Spoon.Log.LoadToTransformation", transMeta.getName(), transMeta
.getRepositoryDirectory().getName() ) );
}
props.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, transMeta.getName(), transMeta
.getRepositoryDirectory().getPath(), true, rep.getName() );
addMenuLast();
addTransGraph( transMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
new ErrorDialog( ( (Spoon) SpoonFactory.getInstance() ).getShell(), BaseMessages.getString(
Spoon.class, "Spoon.Dialog.ErrorOpeningById.Message", objectId ), e.getMessage(), e );
}
} else if ( objectType.equals( RepositoryObjectType.JOB ) ) {
try {
JobLoadProgressDialog progressDialog = new JobLoadProgressDialog( shell, rep, objectId, revision );
JobMeta jobMeta = progressDialog.open();
jobMeta.clearChanged();
if ( jobMeta != null ) {
props.addLastFile( LastUsedFile.FILE_TYPE_JOB, jobMeta.getName(), jobMeta
.getRepositoryDirectory().getPath(), true, rep.getName() );
saveSettings();
addMenuLast();
addJobGraph( jobMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
new ErrorDialog( ( (Spoon) SpoonFactory.getInstance() ).getShell(), BaseMessages.getString(
Spoon.class, "Spoon.Dialog.ErrorOpeningById.Message", objectId ), e.getMessage(), e );
}
}
}
public void loadObjectFromRepository( String objName, RepositoryObjectType objectType,
RepositoryDirectoryInterface repDir, String versionLabel ) {
// Try to open the selected transformation.
if ( objectType.equals( RepositoryObjectType.TRANSFORMATION ) ) {
try {
TransLoadProgressDialog progressDialog =
new TransLoadProgressDialog( shell, rep, objName, repDir, versionLabel );
TransMeta transMeta = progressDialog.open();
transMeta.clearChanged();
if ( transMeta != null ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.LoadToTransformation", objName, repDir
.getName() ) );
}
props
.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, objName, repDir.getPath(), true, rep.getName() );
addMenuLast();
addTransGraph( transMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Message" )
+ objName + Const.CR + e.getMessage() ); // "Error opening : "
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Title" ) );
mb.open();
}
} else if ( objectType.equals( RepositoryObjectType.JOB ) ) {
// Try to open the selected job.
try {
JobLoadProgressDialog progressDialog =
new JobLoadProgressDialog( shell, rep, objName, repDir, versionLabel );
JobMeta jobMeta = progressDialog.open();
jobMeta.clearChanged();
if ( jobMeta != null ) {
props.addLastFile( LastUsedFile.FILE_TYPE_JOB, objName, repDir.getPath(), true, rep.getName() );
saveSettings();
addMenuLast();
addJobGraph( jobMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Message" )
+ objName + Const.CR + e.getMessage() ); // "Error opening : "
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Title" ) );
mb.open();
}
}
}
public void closeRepository() {
if ( rep != null ) {
// Prompt and close all tabs as user disconnected from the repo
boolean shouldDisconnect = Spoon.getInstance().closeAllJobsAndTransformations();
if ( shouldDisconnect ) {
loadSessionInformation( null, false );
rep.disconnect();
if ( metaStore.getMetaStoreList().size() > 1 ) {
try {
metaStore.getMetaStoreList().remove( 0 );
metaStore.setActiveMetaStoreName( metaStore.getMetaStoreList().get( 0 ).getName() );
} catch ( MetaStoreException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.ErrorRemovingMetaStore.Title" ),
BaseMessages.getString( PKG, "Spoon.ErrorRemovingMetaStore.Message" ), e );
}
}
setRepository( null );
setShellText();
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED );
enableMenus();
}
}
}
public void openFile() {
openFile( false );
}
public void importFile() {
openFile( true );
}
public void openFile( boolean importfile ) {
SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();
// In case the perspective wants to handle open/save itself, let it...
//
if ( !importfile ) {
if ( activePerspective instanceof SpoonPerspectiveOpenSaveInterface ) {
( (SpoonPerspectiveOpenSaveInterface) activePerspective ).open();
return;
}
}
String activePerspectiveId = activePerspective.getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( rep == null || importfile || !etlPerspective ) { // Load from XML
FileDialog dialog = new FileDialog( shell, SWT.OPEN );
LinkedHashSet<String> extensions = new LinkedHashSet<String>();
LinkedHashSet<String> extensionNames = new LinkedHashSet<String>();
StringBuilder allExtensions = new StringBuilder();
for ( FileListener l : fileListeners ) {
for ( String ext : l.getSupportedExtensions() ) {
extensions.add( "*." + ext );
allExtensions.append( "*." ).append( ext ).append( ";" );
}
Collections.addAll( extensionNames, l.getFileTypeDisplayNames( Locale.getDefault() ) );
}
extensions.add( "*" );
extensionNames.add( BaseMessages.getString( PKG, "Spoon.Dialog.OpenFile.AllFiles" ) );
String[] exts = new String[extensions.size() + 1];
exts[0] = allExtensions.toString();
System.arraycopy( extensions.toArray( new String[extensions.size()] ), 0, exts, 1, extensions.size() );
String[] extNames = new String[extensionNames.size() + 1];
extNames[0] = BaseMessages.getString( PKG, "Spoon.Dialog.OpenFile.AllTypes" );
System.arraycopy(
extensionNames.toArray( new String[extensionNames.size()] ), 0, extNames, 1, extensionNames.size() );
dialog.setFilterExtensions( exts );
setFilterPath( dialog );
String filename = dialog.open();
if ( filename != null ) {
if ( importfile ) {
if ( activePerspective instanceof SpoonPerspectiveOpenSaveInterface ) {
( (SpoonPerspectiveOpenSaveInterface) activePerspective ).importFile( filename );
return;
}
}
lastDirOpened = dialog.getFilterPath();
openFile( filename, importfile );
}
} else {
SelectObjectDialog sod = new SelectObjectDialog( shell, rep );
if ( sod.open() != null ) {
RepositoryObjectType type = sod.getObjectType();
String name = sod.getObjectName();
RepositoryDirectoryInterface repDir = sod.getDirectory();
// Load a transformation
if ( RepositoryObjectType.TRANSFORMATION.equals( type ) ) {
TransLoadProgressDialog tlpd = new TransLoadProgressDialog( shell, rep, name, repDir, null ); // Loads
// the
// last
// version
TransMeta transMeta = tlpd.open();
sharedObjectsFileMap.put( transMeta.getSharedObjects().getFilename(), transMeta.getSharedObjects() );
setTransMetaVariables( transMeta );
if ( transMeta != null ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.LoadToTransformation", name, repDir
.getName() ) );
}
props.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, name, repDir.getPath(), true, rep.getName() );
addMenuLast();
transMeta.clearChanged();
// transMeta.setFilename(name); // Don't do it, it's a bad idea!
addTransGraph( transMeta );
}
refreshGraph();
refreshTree();
} else if ( RepositoryObjectType.JOB.equals( type ) ) {
// Load a job
JobLoadProgressDialog jlpd = new JobLoadProgressDialog( shell, rep, name, repDir, null ); // Loads
// the last version
JobMeta jobMeta = jlpd.open();
sharedObjectsFileMap.put( jobMeta.getSharedObjects().getFilename(), jobMeta.getSharedObjects() );
setJobMetaVariables( jobMeta );
if ( jobMeta != null ) {
props.addLastFile( LastUsedFile.FILE_TYPE_JOB, name, repDir.getPath(), true, rep.getName() );
saveSettings();
addMenuLast();
addJobGraph( jobMeta );
}
refreshGraph();
refreshTree();
}
}
}
}
private void setFilterPath( FileDialog dialog ) {
if ( !Const.isEmpty( lastDirOpened ) ) {
if ( new File( lastDirOpened ).exists() ) {
dialog.setFilterPath( lastDirOpened );
}
}
}
private String lastFileOpened = null;
public String getLastFileOpened() {
if ( lastFileOpened == null ) {
lastFileOpened = System.getProperty( "org.pentaho.di.defaultVFSPath", "" );
}
return lastFileOpened;
}
public void setLastFileOpened( String inLastFileOpened ) {
lastFileOpened = inLastFileOpened;
}
public void displayCmdLine() {
String cmdFile = getCmdLine();
if ( Const.isEmpty( cmdFile ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Title" ) );
mb.open();
} else {
ShowBrowserDialog sbd =
new ShowBrowserDialog( shell, BaseMessages.getString( PKG, "ExportCmdLine.CommandLine.Title" ), cmdFile );
sbd.open();
}
}
public void createCmdLineFile() {
String cmdFile = getCmdLine();
if ( Const.isEmpty( cmdFile ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Title" ) );
mb.open();
} else {
boolean export = true;
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setFilterExtensions( new String[] { "*.bat", ".sh", "*.*" } );
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "ExportCmdLine.BatFiles" ),
BaseMessages.getString( PKG, "ExportCmdLineShFiles" ),
BaseMessages.getString( PKG, "ExportCmdLine.AllFiles" ) } );
String filename = dialog.open();
if ( filename != null ) {
// See if the file already exists...
int id = SWT.YES;
try {
FileObject f = KettleVFS.getFileObject( filename );
if ( f.exists() ) {
MessageBox mb = new MessageBox( shell, SWT.NO | SWT.YES | SWT.ICON_WARNING );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLineShFiles.FileExistsReplace", filename ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLineShFiles.ConfirmOverwrite" ) );
id = mb.open();
}
} catch ( Exception e ) {
// Ignore errors
}
if ( id == SWT.NO ) {
export = false;
}
if ( export ) {
java.io.FileWriter out = null;
try {
out = new java.io.FileWriter( filename );
out.write( cmdFile );
out.flush();
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "ExportCmdLineShFiles.ErrorWritingFile.Title" ), BaseMessages
.getString( PKG, "ExportCmdLineShFiles.ErrorWritingFile.Message", filename ), e );
} finally {
if ( out != null ) {
try {
out.close();
} catch ( Exception e ) {
// Ignore errors
}
}
}
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLineShFiles.CmdExported.Message", filename ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLineShFiles.CmdExported.Title" ) );
mb.open();
}
}
}
}
private String getCmdLine() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
String cmdFile = "";
if ( rep != null && ( jobMeta != null || transMeta != null ) ) {
if ( jobMeta != null ) {
if ( jobMeta.getName() != null ) {
if ( Const.isWindows() ) {
cmdFile =
"kitchen "
+ "/rep:\"" + rep.getName() + "\"" + " /user:\""
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" ) + "\"" + " /pass:\""
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo().getPassword() ) + "\""
+ " /job:\"" + jobMeta.getName() + '"' + " /dir:\""
+ jobMeta.getRepositoryDirectory().getPath() + "\"" + " /level:Basic";
} else {
cmdFile =
"sh kitchen.sh "
+ "-rep='"
+ rep.getName()
+ "'"
+ " -user='"
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" )
+ "'"
+ " -pass='"
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo() != null ? rep
.getUserInfo().getPassword() : "" ) + "'" + " -job='" + jobMeta.getName() + "'"
+ " -dir='" + jobMeta.getRepositoryDirectory().getPath() + "'" + " -level=Basic";
}
}
} else {
if ( transMeta.getName() != null ) {
if ( Const.isWindows() ) {
cmdFile =
"pan "
+ "/rep:\""
+ rep.getName()
+ "\""
+ " /user:\""
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" )
+ "\""
+ " /pass:\""
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo() != null ? rep
.getUserInfo().getPassword() : "" ) + "\"" + " /trans:\"" + transMeta.getName() + "\""
+ " /dir:\"" + transMeta.getRepositoryDirectory().getPath() + "\"" + " /level:Basic";
} else {
cmdFile =
"sh pan.sh "
+ "-rep='"
+ rep.getName()
+ "'"
+ " -user='"
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" )
+ "'"
+ " -pass='"
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo() != null ? rep
.getUserInfo().getPassword() : "" ) + "'" + " -trans='" + transMeta.getName() + "'"
+ " -dir='" + transMeta.getRepositoryDirectory().getPath() + "'" + " -level=Basic";
}
}
}
} else if ( rep == null && ( jobMeta != null || transMeta != null ) ) {
if ( jobMeta != null ) {
if ( jobMeta.getFilename() != null ) {
if ( Const.isWindows() ) {
cmdFile = "kitchen " + "/file:\"" + jobMeta.getFilename() + "\"" + " /level:Basic";
} else {
cmdFile = "sh kitchen.sh " + "-file='" + jobMeta.getFilename() + "'" + " -level=Basic";
}
}
} else {
if ( transMeta.getFilename() != null ) {
if ( Const.isWindows() ) {
cmdFile = "pan " + "/file:\"" + transMeta.getFilename() + "\"" + " /level:Basic";
} else {
cmdFile = "sh pan.sh " + "-file:'" + transMeta.getFilename() + "'" + " -level=Basic";
}
}
}
}
return cmdFile;
}
// private String lastVfsUsername="";
// private String lastVfsPassword="";
public void openFileVFSFile() {
FileObject initialFile;
FileObject rootFile;
try {
initialFile = KettleVFS.getFileObject( getLastFileOpened() );
rootFile = initialFile.getFileSystem().getRoot();
} catch ( Exception e ) {
String message = Const.getStackTracker( e );
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), message, e );
return;
}
FileObject selectedFile =
getVfsFileChooserDialog( rootFile, initialFile ).open(
shell, null, Const.STRING_TRANS_AND_JOB_FILTER_EXT, Const.getTransformationAndJobFilterNames(),
VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE );
if ( selectedFile != null ) {
setLastFileOpened( selectedFile.getName().getFriendlyURI() );
openFile( selectedFile.getName().getFriendlyURI(), false );
}
}
public void addFileListener( FileListener listener ) {
this.fileListeners.add( listener );
for ( String s : listener.getSupportedExtensions() ) {
if ( !fileExtensionMap.containsKey( s ) ) {
fileExtensionMap.put( s, listener );
}
}
}
public void openFile( String filename, boolean importfile ) {
try {
// Open the XML and see what's in there.
// We expect a single <transformation> or <job> root at this time...
boolean loaded = false;
FileListener listener = null;
Node root = null;
// match by extension first
int idx = filename.lastIndexOf( '.' );
if ( idx != -1 ) {
for ( FileListener li : fileListeners ) {
if ( li.accepts( filename ) ) {
listener = li;
break;
}
}
}
// Attempt to find a root XML node name. Fails gracefully for non-XML file
// types.
try {
Document document = XMLHandler.loadXMLFile( filename );
root = document.getDocumentElement();
} catch ( KettleXMLException e ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.File.Xml.Parse.Error" ) );
}
}
// otherwise try by looking at the root node if we were able to parse file
// as XML
if ( listener == null && root != null ) {
for ( FileListener li : fileListeners ) {
if ( li.acceptsXml( root.getNodeName() ) ) {
listener = li;
break;
}
}
}
// You got to have a file name!
//
if ( !Const.isEmpty( filename ) ) {
if ( listener != null ) {
loaded = listener.open( root, filename, importfile );
}
if ( !loaded ) {
// Give error back
hideSplash();
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.UnknownFileType.Message", filename ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.UnknownFileType.Title" ) );
mb.open();
} else {
applyVariables(); // set variables in the newly loaded
// transformation(s) and job(s).
}
}
} catch ( KettleMissingPluginsException e ) {
// There are missing plugins, let's try to handle them in the marketplace...
//
if ( marketPluginIsAvailable() ) {
handleMissingPluginsExceptionWithMarketplace( e );
}
}
}
/**
* Check to see if the market plugin is available.
*
* @return true if the market plugin is installed and ready, false if it is not.
*/
private boolean marketPluginIsAvailable() {
PluginInterface marketPlugin = findMarketPlugin();
return marketPlugin != null;
}
private PluginInterface findMarketPlugin() {
return PluginRegistry.getInstance().findPluginWithId( SpoonPluginType.class, "market" );
}
/**
* Shows a dialog listing the missing plugins, asking if you want to go into the marketplace
*
* @param missingPluginsException
* The missing plugins exception
*/
public void handleMissingPluginsExceptionWithMarketplace( KettleMissingPluginsException missingPluginsException ) {
try {
hideSplash();
MessageBox box = new MessageBox( shell, SWT.ICON_QUESTION | SWT.YES | SWT.NO );
box.setText( BaseMessages.getString( PKG, "Spoon.MissingPluginsFoundDialog.Title" ) );
box.setMessage( BaseMessages.getString(
PKG, "Spoon.MissingPluginsFoundDialog.Message", Const.CR, missingPluginsException.getPluginsMessage() ) );
int answer = box.open();
if ( ( answer & SWT.YES ) != 0 ) {
String controllerClassName = "org.pentaho.di.ui.spoon.dialog.MarketplaceController";
PluginInterface marketPlugin = findMarketPlugin();
ClassLoader classLoader = PluginRegistry.getInstance().getClassLoader( marketPlugin );
Class<?> controllerClass = classLoader.loadClass( controllerClassName );
Method method = controllerClass.getMethod( "showMarketPlaceDialog" );
method.invoke( null );
}
} catch ( Exception ex ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.ErrorShowingMarketplaceDialog.Title" ), BaseMessages
.getString( PKG, "Spoon.ErrorShowingMarketplaceDialog.Message" ), ex );
}
}
public PropsUI getProperties() {
return props;
}
/*
* public void newFileDropDown() { newFileDropDown(toolbar); }
*/
public void newFileDropDown() {
// Drop down a list below the "New" icon (new.png)
// First problem: where is that icon?
XulToolbarbutton button = (XulToolbarbutton) this.mainToolbar.getElementById( "file-new" );
Object object = button.getManagedObject();
if ( object instanceof ToolItem ) {
// OK, let's determine the location of this widget...
//
ToolItem item = (ToolItem) object;
Rectangle bounds = item.getBounds();
org.eclipse.swt.graphics.Point p =
item.getParent().toDisplay( new org.eclipse.swt.graphics.Point( bounds.x, bounds.y ) );
fileMenus.setLocation( p.x, p.y + bounds.height );
fileMenus.setVisible( true );
}
}
public void newTransFile() {
TransMeta transMeta = new TransMeta();
transMeta.addObserver( this );
// Set the variables that were previously defined in this session on the
// transformation metadata too.
//
setTransMetaVariables( transMeta );
// Pass repository information
//
transMeta.setRepository( rep );
transMeta.setMetaStore( metaStore );
try {
SharedObjects sharedObjects =
rep != null ? rep.readTransSharedObjects( transMeta ) : transMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
transMeta.clearChanged();
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Exception.ErrorReadingSharedObjects.Message" ), e );
}
// Set the location of the new transMeta to that of the default location or the last saved location
transMeta.setRepositoryDirectory( getDefaultSaveLocation( transMeta ) );
int nr = 1;
transMeta.setName( STRING_TRANSFORMATION + " " + nr );
// See if a transformation with the same name isn't already loaded...
//
while ( findTransformation( delegates.tabs.makeTabName( transMeta, false ) ) != null ) {
nr++;
transMeta.setName( STRING_TRANSFORMATION + " " + nr ); // rename
}
addTransGraph( transMeta );
applyVariables();
// switch to design mode...
//
if ( setDesignMode() ) {
// No refresh done yet, do so
refreshTree();
}
loadPerspective( MainSpoonPerspective.ID );
}
public void newJobFile() {
try {
JobMeta jobMeta = new JobMeta();
jobMeta.addObserver( this );
// Set the variables that were previously defined in this session on
// the transformation metadata too.
//
setJobMetaVariables( jobMeta );
// Pass repository information
//
jobMeta.setRepository( rep );
jobMeta.setMetaStore( metaStore );
try {
SharedObjects sharedObjects =
rep != null ? rep.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", delegates.tabs.makeTabName(
jobMeta, true ) ), e );
}
// Set the location of the new jobMeta to that of the default location or the last saved location
jobMeta.setRepositoryDirectory( getDefaultSaveLocation( jobMeta ) );
int nr = 1;
jobMeta.setName( STRING_JOB + " " + nr );
// See if a transformation with the same name isn't already
// loaded...
while ( findJob( delegates.tabs.makeTabName( jobMeta, false ) ) != null ) {
nr++;
jobMeta.setName( STRING_JOB + " " + nr ); // rename
}
jobMeta.clearChanged();
addJobGraph( jobMeta );
applyVariables();
// switch to design mode...
//
if ( setDesignMode() ) {
// No refresh done yet, do so
refreshTree();
}
loadPerspective( MainSpoonPerspective.ID );
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Exception.ErrorCreatingNewJob.Title" ), BaseMessages
.getString( PKG, "Spoon.Exception.ErrorCreatingNewJob.Message" ), e );
}
}
/**
* Set previously defined variables (set variables dialog) on the specified transformation
*
* @param transMeta transformation's meta
*/
public void setTransMetaVariables( TransMeta transMeta ) {
for ( int i = 0; i < variables.size(); i++ ) {
try {
String name = variables.getValueMeta( i ).getName();
String value = variables.getString( i, "" );
transMeta.setVariable( name, Const.NVL( value, "" ) );
} catch ( Exception e ) {
// Ignore the exception, it should never happen on a getString()
// anyway.
}
}
// Also set the parameters
//
setParametersAsVariablesInUI( transMeta, transMeta );
}
/**
* Set previously defined variables (set variables dialog) on the specified job
*
* @param jobMeta job's meta
*/
public void setJobMetaVariables( JobMeta jobMeta ) {
for ( int i = 0; i < variables.size(); i++ ) {
try {
String name = variables.getValueMeta( i ).getName();
String value = variables.getString( i, "" );
jobMeta.setVariable( name, Const.NVL( value, "" ) );
} catch ( Exception e ) {
// Ignore the exception, it should never happen on a getString()
// anyway.
}
}
// Also set the parameters
//
setParametersAsVariablesInUI( jobMeta, jobMeta );
}
public void loadRepositoryObjects( TransMeta transMeta ) {
// Load common database info from active repository...
if ( rep != null ) {
try {
SharedObjects sharedObjects = rep.readTransSharedObjects( transMeta );
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Error.UnableToLoadSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Error.UnableToLoadSharedObjects.Message" ), e );
}
}
}
public boolean quitFile( boolean canCancel ) throws KettleException {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.QuitApplication" ) ); // "Quit application."
}
boolean exit = true;
saveSettings();
if ( props.showExitWarning() && canCancel ) {
// Display message: are you sure you want to exit?
//
MessageDialogWithToggle md =
new MessageDialogWithToggle( shell,
BaseMessages.getString( PKG, "System.Warning" ), // "Warning!"
null,
BaseMessages.getString( PKG, "Spoon.Message.Warning.PromptExit" ),
MessageDialog.WARNING, new String[] {
// "Yes",
BaseMessages.getString( PKG, "Spoon.Message.Warning.Yes" ),
// "No"
BaseMessages.getString( PKG, "Spoon.Message.Warning.No" )
}, 1,
// "Please, don't show this warning anymore."
BaseMessages.getString( PKG, "Spoon.Message.Warning.NotShowWarning" ),
!props.showExitWarning() );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
int idx = md.open();
props.setExitWarningShown( !md.getToggleState() );
props.saveProps();
if ( ( idx & 0xFF ) == 1 ) {
return false; // No selected: don't exit!
}
}
// Check all tabs to see if we can close them...
//
List<TabMapEntry> list = delegates.tabs.getTabs();
for ( TabMapEntry mapEntry : list ) {
TabItemInterface itemInterface = mapEntry.getObject();
if ( !itemInterface.canBeClosed() ) {
// Show the tab
tabfolder.setSelected( mapEntry.getTabItem() );
// Unsaved work that needs to changes to be applied?
//
int reply = itemInterface.showChangedWarning();
if ( reply == SWT.YES ) {
exit = itemInterface.applyChanges();
} else {
if ( reply == SWT.CANCEL ) {
return false;
} else { // SWT.NO
exit = true;
}
}
}
}
if ( exit || !canCancel ) {
// we have asked about it all and we're still here. Now close
// all the tabs, stop the running transformations
for ( TabMapEntry mapEntry : list ) {
if ( !mapEntry.getObject().canBeClosed() ) {
// Unsaved transformation?
//
if ( mapEntry.getObject() instanceof TransGraph ) {
TransMeta transMeta = (TransMeta) mapEntry.getObject().getManagedObject();
if ( transMeta.hasChanged() ) {
delegates.tabs.removeTab( mapEntry );
}
}
// A running transformation?
//
if ( mapEntry.getObject() instanceof TransGraph ) {
TransGraph transGraph = (TransGraph) mapEntry.getObject();
if ( transGraph.isRunning() ) {
transGraph.stop();
delegates.tabs.removeTab( mapEntry );
}
}
}
}
}
// and now we call the listeners
try {
lifecycleSupport.onExit( this );
} catch ( LifecycleException e ) {
MessageBox box = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
box.setMessage( e.getMessage() );
box.open();
}
if ( exit ) {
close();
}
return exit;
}
public boolean saveFile() {
try {
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
return saveToFile( meta );
}
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.File.Save.Fail.Title" ), BaseMessages.getString(
PKG, "Spoon.File.Save.Fail.Message" ), e );
}
return false;
}
public boolean saveToFile( EngineMetaInterface meta ) throws KettleException {
if ( meta == null ) {
return false;
}
boolean saved = false;
if ( meta instanceof TransMeta ) {
( (TransMeta) meta ).setRepository( rep );
( (TransMeta) meta ).setMetaStore( metaStore );
}
if ( meta instanceof JobMeta ) {
( (JobMeta) meta ).setRepository( rep );
( (JobMeta) meta ).setMetaStore( metaStore );
}
if ( log.isDetailed() ) {
// "Save to file or repository...
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.SaveToFileOrRepository" ) );
}
SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();
// In case the perspective wants to handle open/save itself, let it...
//
if ( activePerspective instanceof SpoonPerspectiveOpenSaveInterface ) {
return ( (SpoonPerspectiveOpenSaveInterface) activePerspective ).save( meta );
}
String activePerspectiveId = activePerspective.getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( rep != null && etlPerspective ) {
saved = saveToRepository( meta );
} else {
if ( meta.getFilename() != null ) {
saved = save( meta, meta.getFilename(), false );
} else {
if ( meta.canSave() ) {
saved = saveFileAs( meta );
}
}
}
meta.saveSharedObjects(); // throws Exception in case anything goes wrong
try {
if ( props.useDBCache() && meta instanceof TransMeta ) {
( (TransMeta) meta ).getDbCache().saveCache();
}
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingDatabaseCache.Title" ),
// "An error occurred saving the database cache to disk"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingDatabaseCache.Message" ), e );
}
delegates.tabs.renameTabs(); // filename or name of transformation might
// have changed.
refreshTree();
// Update menu status for the newly saved object
enableMenus();
return saved;
}
public boolean saveToRepository( EngineMetaInterface meta ) throws KettleException {
return saveToRepository( meta, meta.getObjectId() == null );
}
public boolean saveToRepository( EngineMetaInterface meta, boolean ask_name ) throws KettleException {
// Verify repository security first...
//
if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_TRANSFORMATION ) ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_TRANSFORMATION ) ) {
return false;
}
}
if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_JOB ) ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_JOB ) ) {
return false;
}
}
if ( log.isDetailed() ) {
// "Save to repository..."
//
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.SaveToRepository" ) );
}
if ( rep != null ) {
boolean answer = true;
boolean ask = ask_name;
// If the repository directory is root then get the default save directory
if ( meta.getRepositoryDirectory() == null || meta.getRepositoryDirectory().isRoot() ) {
meta.setRepositoryDirectory( rep.getDefaultSaveDirectory( meta ) );
}
while ( answer && ( ask || Const.isEmpty( meta.getName() ) ) ) {
if ( !ask ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_WARNING );
// "Please give this transformation a name before saving it in the database."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptTransformationName.Message" ) );
// "Transformation has no name."
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptTransformationName.Title" ) );
mb.open();
}
ask = false;
if ( meta instanceof TransMeta ) {
answer = TransGraph.editProperties( (TransMeta) meta, this, rep, false );
}
if ( meta instanceof JobMeta ) {
answer = JobGraph.editProperties( (JobMeta) meta, this, rep, false );
}
}
if ( answer && !Const.isEmpty( meta.getName() ) ) {
int response = SWT.YES;
ObjectId existingId = null;
if ( meta instanceof TransMeta ) {
existingId = rep.getTransformationID( meta.getName(), meta.getRepositoryDirectory() );
}
if ( meta instanceof JobMeta ) {
existingId = rep.getJobId( meta.getName(), meta.getRepositoryDirectory() );
}
// If there is no object id (import from XML) and there is an existing object.
//
// or...
//
// If the transformation/job has an object id and it's different from the one in the repository.
//
if ( ( meta.getObjectId() == null && existingId != null )
|| existingId != null && !meta.getObjectId().equals( existingId ) ) {
// In case we support revisions, we can simply overwrite
// without a problem so we simply don't ask.
// However, if we import from a file we should ask.
//
if ( !rep.getRepositoryMeta().getRepositoryCapabilities().supportsRevisions()
|| meta.getObjectId() == null ) {
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION );
// There already is a transformation called ... in the repository.
// Do you want to overwrite the transformation?
//
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteTransformation.Message", meta
.getName(), Const.CR ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteTransformation.Title" ) );
response = mb.open();
}
}
boolean saved = false;
if ( response == SWT.YES ) {
if ( meta.getObjectId() == null ) {
meta.setObjectId( existingId );
}
try {
shell.setCursor( cursor_hourglass );
// Keep info on who & when this transformation was
// created and or modified...
if ( meta.getCreatedDate() == null ) {
meta.setCreatedDate( new Date() );
if ( capabilities.supportsUsers() ) {
meta.setCreatedUser( rep.getUserInfo().getLogin() );
}
}
// Keep info on who & when this transformation was
// changed...
meta.setModifiedDate( new Date() );
if ( capabilities.supportsUsers() ) {
meta.setModifiedUser( rep.getUserInfo().getLogin() );
}
// Finally before saving, ask for a version comment (if
// applicable)
//
String versionComment = null;
boolean versionOk = false;
while ( !versionOk ) {
versionComment = RepositorySecurityUI.getVersionComment( shell, rep, meta.getName() );
// if the version comment is null, the user hit cancel, exit.
if ( rep != null
&& rep.getSecurityProvider() != null && rep.getSecurityProvider().allowsVersionComments()
&& versionComment == null ) {
return false;
}
if ( Const.isEmpty( versionComment ) && rep.getSecurityProvider().isVersionCommentMandatory() ) {
if ( !RepositorySecurityUI.showVersionCommentMandatoryDialog( shell ) ) {
return false; // no, I don't want to enter a
// version comment and yes,
// it's mandatory.
}
} else {
versionOk = true;
}
}
if ( versionOk ) {
SaveProgressDialog spd = new SaveProgressDialog( shell, rep, meta, versionComment );
if ( spd.open() ) {
saved = true;
if ( !props.getSaveConfirmation() ) {
MessageDialogWithToggle md =
new MessageDialogWithToggle(
shell, BaseMessages.getString( PKG, "Spoon.Message.Warning.SaveOK" ), null, BaseMessages
.getString( PKG, "Spoon.Message.Warning.TransformationWasStored" ),
MessageDialog.QUESTION, new String[] {
BaseMessages.getString( PKG, "Spoon.Message.Warning.OK" ) },
0,
BaseMessages.getString( PKG, "Spoon.Message.Warning.NotShowThisMessage" ),
props.getSaveConfirmation() );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
md.open();
props.setSaveConfirmation( md.getToggleState() );
}
// Handle last opened files...
props.addLastFile(
meta.getFileType(), meta.getName(), meta.getRepositoryDirectory().getPath(), true,
getRepositoryName() );
saveSettings();
addMenuLast();
setShellText();
}
}
} finally {
shell.setCursor( null );
}
}
return saved;
}
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
// "There is no repository connection available."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.NoRepositoryConnection.Message" ) );
// "No repository available."
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.NoRepositoryConnection.Title" ) );
mb.open();
}
return false;
}
public boolean saveJobRepository( JobMeta jobMeta ) throws KettleException {
return saveToRepository( jobMeta, false );
}
public boolean saveJobRepository( JobMeta jobMeta, boolean ask_name ) throws KettleException {
return saveToRepository( jobMeta, ask_name );
}
public boolean saveFileAs() throws KettleException {
try {
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
if ( meta.canSave() ) {
return saveFileAs( meta );
}
}
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.File.Save.Fail.Title" ), BaseMessages.getString(
PKG, "Spoon.File.Save.Fail.Message" ), e );
}
return false;
}
public boolean saveFileAs( EngineMetaInterface meta ) throws KettleException {
boolean saved;
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.SaveAs" ) ); // "Save as..."
}
String activePerspectiveId = SpoonPerspectiveManager.getInstance().getActivePerspective().getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( rep != null && etlPerspective ) {
meta.setObjectId( null );
saved = saveToRepository( meta, true );
} else {
saved = saveXMLFile( meta, false );
}
delegates.tabs.renameTabs(); // filename or name of transformation might
// have changed.
refreshTree();
if ( saved && ( meta instanceof TransMeta || meta instanceof JobMeta ) ) {
TabMapEntry tabEntry = delegates.tabs.findTabMapEntry( meta );
TabItem tabItem = tabEntry.getTabItem();
if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_TRANSFORMATION ) ) {
tabItem.setImage( GUIResource.getInstance().getImageTransGraph() );
} else if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_JOB ) ) {
tabItem.setImage( GUIResource.getInstance().getImageJobGraph() );
}
}
// Update menu status for the newly saved object
enableMenus();
return saved;
}
public boolean exportXMLFile() {
return saveXMLFile( true );
}
/**
* Export this job or transformation including all depending resources to a single zip file.
*/
public void exportAllXMLFile() {
ResourceExportInterface resourceExportInterface = getActiveTransformation();
if ( resourceExportInterface == null ) {
resourceExportInterface = getActiveJob();
}
if ( resourceExportInterface == null ) {
return; // nothing to do here, prevent an NPE
}
// ((VariableSpace)resourceExportInterface).getVariable("Internal.Transformation.Filename.Directory");
// Ask the user for a zip file to export to:
//
try {
String zipFilename = null;
while ( Const.isEmpty( zipFilename ) ) {
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceSelectZipFile" ) );
dialog.setFilterExtensions( new String[] { "*.zip;*.ZIP", "*" } );
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "System.FileType.ZIPFiles" ),
BaseMessages.getString( PKG, "System.FileType.AllFiles" ), } );
setFilterPath( dialog );
if ( dialog.open() != null ) {
lastDirOpened = dialog.getFilterPath();
zipFilename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
FileObject zipFileObject = KettleVFS.getFileObject( zipFilename );
if ( zipFileObject.exists() ) {
MessageBox box = new MessageBox( shell, SWT.YES | SWT.NO | SWT.CANCEL );
box
.setMessage( BaseMessages
.getString( PKG, "Spoon.ExportResourceZipFileExists.Message", zipFilename ) );
box.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceZipFileExists.Title" ) );
int answer = box.open();
if ( answer == SWT.CANCEL ) {
return;
}
if ( answer == SWT.NO ) {
zipFilename = null;
}
}
} else {
return;
}
}
// Export the resources linked to the currently loaded file...
//
TopLevelResource topLevelResource =
ResourceUtil.serializeResourceExportInterface(
zipFilename, resourceExportInterface, (VariableSpace) resourceExportInterface, rep, metaStore );
String message =
ResourceUtil.getExplanation( zipFilename, topLevelResource.getResourceName(), resourceExportInterface );
/*
* // Add the ZIP file as a repository to the repository list... // RepositoriesMeta repositoriesMeta = new
* RepositoriesMeta(); repositoriesMeta.readData();
*
* KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(
* KettleFileRepositoryMeta.REPOSITORY_TYPE_ID, "Export " + baseFileName, "Export to file : " + zipFilename,
* "zip://" + zipFilename + "!"); fileRepositoryMeta.setReadOnly(true); // A ZIP file is read-only int nr = 2;
* String baseName = fileRepositoryMeta.getName(); while
* (repositoriesMeta.findRepository(fileRepositoryMeta.getName()) != null) { fileRepositoryMeta.setName(baseName +
* " " + nr); nr++; }
*
* repositoriesMeta.addRepository(fileRepositoryMeta); repositoriesMeta.writeData();
*/
// Show some information concerning all this work...
EnterTextDialog enterTextDialog =
new EnterTextDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ResourceSerialized" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ResourceSerializedSuccesfully" ), message );
enterTextDialog.setReadOnly();
enterTextDialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), BaseMessages.getString(
PKG, "Spoon.ErrorExportingFile" ), e );
}
}
/**
* Export this job or transformation including all depending resources to a single ZIP file containing a file
* repository.
*/
public void exportAllFileRepository() {
ResourceExportInterface resourceExportInterface = getActiveTransformation();
if ( resourceExportInterface == null ) {
resourceExportInterface = getActiveJob();
}
if ( resourceExportInterface == null ) {
return; // nothing to do here, prevent an NPE
}
// Ask the user for a zip file to export to:
//
try {
String zipFilename = null;
while ( Const.isEmpty( zipFilename ) ) {
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceSelectZipFile" ) );
dialog.setFilterExtensions( new String[] { "*.zip;*.ZIP", "*" } );
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "System.FileType.ZIPFiles" ),
BaseMessages.getString( PKG, "System.FileType.AllFiles" ), } );
setFilterPath( dialog );
if ( dialog.open() != null ) {
lastDirOpened = dialog.getFilterPath();
zipFilename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
FileObject zipFileObject = KettleVFS.getFileObject( zipFilename );
if ( zipFileObject.exists() ) {
MessageBox box = new MessageBox( shell, SWT.YES | SWT.NO | SWT.CANCEL );
box
.setMessage( BaseMessages
.getString( PKG, "Spoon.ExportResourceZipFileExists.Message", zipFilename ) );
box.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceZipFileExists.Title" ) );
int answer = box.open();
if ( answer == SWT.CANCEL ) {
return;
}
if ( answer == SWT.NO ) {
zipFilename = null;
}
}
} else {
return;
}
}
// Export the resources linked to the currently loaded file...
//
TopLevelResource topLevelResource =
ResourceUtil.serializeResourceExportInterface(
zipFilename, resourceExportInterface, (VariableSpace) resourceExportInterface, rep, metaStore );
String message =
ResourceUtil.getExplanation( zipFilename, topLevelResource.getResourceName(), resourceExportInterface );
/*
* // Add the ZIP file as a repository to the repository list... // RepositoriesMeta repositoriesMeta = new
* RepositoriesMeta(); repositoriesMeta.readData();
*
* KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(
* KettleFileRepositoryMeta.REPOSITORY_TYPE_ID, "Export " + baseFileName, "Export to file : " + zipFilename,
* "zip://" + zipFilename + "!"); fileRepositoryMeta.setReadOnly(true); // A ZIP file is read-only int nr = 2;
* String baseName = fileRepositoryMeta.getName(); while
* (repositoriesMeta.findRepository(fileRepositoryMeta.getName()) != null) { fileRepositoryMeta.setName(baseName +
* " " + nr); nr++; }
*
* repositoriesMeta.addRepository(fileRepositoryMeta); repositoriesMeta.writeData();
*/
// Show some information concerning all this work...
//
EnterTextDialog enterTextDialog =
new EnterTextDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ResourceSerialized" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ResourceSerializedSuccesfully" ), message );
enterTextDialog.setReadOnly();
enterTextDialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), BaseMessages.getString(
PKG, "Spoon.ErrorExportingFile" ), e );
}
}
public void exportRepositoryAll() {
exportRepositoryDirectory( null );
}
/**
* @param directoryToExport
* set to null to export the complete repository
* @return false if we want to stop processing. true if we need to continue.
*/
public boolean exportRepositoryDirectory( RepositoryDirectory directoryToExport ) {
FileDialog dialog = this.getExportFileDialog();
if ( dialog.open() == null ) {
return false;
}
String filename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.Exporting" ), BaseMessages.getString(
PKG, "Spoon.Log.ExportObjectsToFile", filename ) );
// check if file is exists
MessageBox box = RepositoryExportProgressDialog.checkIsFileIsAcceptable( shell, log, filename );
int answer = ( box == null ) ? SWT.OK : box.open();
if ( answer != SWT.OK ) {
// seems user don't want to overwrite file...
return false;
}
//ok, let's show one more modal dialog, users like modal dialogs.
//They feel that their opinion are important to us.
box =
new MessageBox( shell, SWT.ICON_QUESTION
| SWT.APPLICATION_MODAL | SWT.SHEET | SWT.YES | SWT.NO | SWT.CANCEL );
box.setText( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRulesToExport.Title" ) );
box.setMessage( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRulesToExport.Message" ) );
answer = box.open();
if ( answer == SWT.CANCEL ) {
return false;
}
// Get the import rules
//
ImportRules importRules = new ImportRules();
if ( answer == SWT.YES ) {
ImportRulesDialog importRulesDialog = new ImportRulesDialog( shell, importRules );
if ( !importRulesDialog.open() ) {
return false;
}
}
RepositoryExportProgressDialog repd =
new RepositoryExportProgressDialog( shell, rep, directoryToExport, filename, importRules );
repd.open();
return true;
}
/**
* local method to be able to use Spoon localization messages.
* @return
*/
public FileDialog getExportFileDialog() {
FileDialog dialog = new FileDialog( shell, SWT.SAVE | SWT.SINGLE );
dialog.setText( BaseMessages.getString( PKG, "Spoon.SelectAnXMLFileToExportTo.Message" ) );
return dialog;
}
public void importDirectoryToRepository() {
FileDialog dialog = new FileDialog( shell, SWT.OPEN | SWT.MULTI );
dialog.setText( BaseMessages.getString( PKG, "Spoon.SelectAnXMLFileToImportFrom.Message" ) );
if ( dialog.open() == null ) {
return;
}
// Ask for a set of import rules
//
MessageBox box =
new MessageBox( shell, SWT.ICON_QUESTION
| SWT.APPLICATION_MODAL | SWT.SHEET | SWT.YES | SWT.NO | SWT.CANCEL );
box.setText( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRules.Title" ) );
box.setMessage( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRules.Message" ) );
int answer = box.open();
if ( answer == SWT.CANCEL ) {
return;
}
// Get the import rules
//
ImportRules importRules = new ImportRules();
if ( answer == SWT.YES ) {
ImportRulesDialog importRulesDialog = new ImportRulesDialog( shell, importRules );
if ( !importRulesDialog.open() ) {
return;
}
}
// Ask for a destination in the repository...
//
SelectDirectoryDialog sdd = new SelectDirectoryDialog( shell, SWT.NONE, rep );
RepositoryDirectoryInterface baseDirectory = sdd.open();
if ( baseDirectory == null ) {
return;
}
// Finally before importing, ask for a version comment (if applicable)
//
String versionComment = null;
boolean versionOk = false;
while ( !versionOk ) {
versionComment =
RepositorySecurityUI.getVersionComment( shell, rep, "Import of files into ["
+ baseDirectory.getPath() + "]" );
// if the version comment is null, the user hit cancel, exit.
if ( rep != null
&& rep.getSecurityProvider() != null && rep.getSecurityProvider().allowsVersionComments()
&& versionComment == null ) {
return;
}
if ( Const.isEmpty( versionComment ) && rep.getSecurityProvider().isVersionCommentMandatory() ) {
if ( !RepositorySecurityUI.showVersionCommentMandatoryDialog( shell ) ) {
versionOk = true;
}
} else {
versionOk = true;
}
}
String[] filenames = dialog.getFileNames();
if ( filenames.length > 0 ) {
RepositoryImportProgressDialog ripd =
new RepositoryImportProgressDialog(
shell, SWT.NONE, rep, dialog.getFilterPath(), filenames, baseDirectory, versionComment, importRules );
ripd.open();
refreshTree();
}
}
public boolean saveXMLFile( boolean export ) {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
return saveXMLFile( transMeta, export );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
return saveXMLFile( jobMeta, export );
}
return false;
}
public boolean saveXMLFile( EngineMetaInterface meta, boolean export ) {
if ( log.isBasic() ) {
log.logBasic( "Save file as..." );
}
boolean saved = false;
String beforeFilename = meta.getFilename();
String beforeName = meta.getName();
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
String[] extensions = meta.getFilterExtensions();
dialog.setFilterExtensions( extensions );
dialog.setFilterNames( meta.getFilterNames() );
setFilterPath( dialog );
String filename = dialog.open();
if ( filename != null ) {
lastDirOpened = dialog.getFilterPath();
// Is the filename ending on .ktr, .xml?
boolean ending = false;
for ( int i = 0; i < extensions.length - 1; i++ ) {
String[] parts = extensions[i].split( ";" );
for ( String part : parts ) {
if ( filename.toLowerCase().endsWith( part.substring( 1 ).toLowerCase() ) ) {
ending = true;
}
}
}
if ( filename.endsWith( meta.getDefaultExtension() ) ) {
ending = true;
}
if ( !ending ) {
if ( !meta.getDefaultExtension().startsWith( "." ) && !filename.endsWith( "." ) ) {
filename += ".";
}
filename += meta.getDefaultExtension();
}
// See if the file already exists...
int id = SWT.YES;
try {
FileObject f = KettleVFS.getFileObject( filename );
if ( f.exists() ) {
MessageBox mb = new MessageBox( shell, SWT.NO | SWT.YES | SWT.ICON_WARNING );
// "This file already exists. Do you want to overwrite it?"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Message" ) );
// "This file already exists!"
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Title" ) );
id = mb.open();
}
} catch ( Exception e ) {
// TODO do we want to show an error dialog here? My first guess
// is not, but we might.
}
if ( id == SWT.YES ) {
if ( !export && !Const.isEmpty( beforeFilename ) && !beforeFilename.equals( filename ) ) {
meta.setName( Const.createName( filename ) );
meta.setFilename( filename );
// If the user hits cancel here, don't save anything
//
if ( !editProperties() ) {
// Revert the changes!
//
meta.setFilename( beforeFilename );
meta.setName( beforeName );
return saved;
}
}
saved = save( meta, filename, export );
if ( !saved ) {
meta.setFilename( beforeFilename );
meta.setName( beforeName );
}
}
}
return saved;
}
public boolean saveXMLFileToVfs() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
return saveXMLFileToVfs( transMeta );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
return saveXMLFileToVfs( jobMeta );
}
return false;
}
public boolean saveXMLFileToVfs( EngineMetaInterface meta ) {
if ( log.isBasic() ) {
log.logBasic( "Save file as..." );
}
FileObject rootFile;
FileObject initialFile;
try {
initialFile = KettleVFS.getFileObject( getLastFileOpened() );
rootFile = KettleVFS.getFileObject( getLastFileOpened() ).getFileSystem().getRoot();
} catch ( Exception e ) {
MessageBox messageDialog = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
messageDialog.setText( "Error" );
messageDialog.setMessage( e.getMessage() );
messageDialog.open();
return false;
}
String filename = null;
FileObject selectedFile =
getVfsFileChooserDialog( rootFile, initialFile ).open(
shell, "Untitled", Const.STRING_TRANS_AND_JOB_FILTER_EXT, Const.getTransformationAndJobFilterNames(),
VfsFileChooserDialog.VFS_DIALOG_SAVEAS );
if ( selectedFile != null ) {
filename = selectedFile.getName().getFriendlyURI();
}
String[] extensions = meta.getFilterExtensions();
if ( filename != null ) {
// Is the filename ending on .ktr, .xml?
boolean ending = false;
for ( int i = 0; i < extensions.length - 1; i++ ) {
if ( filename.endsWith( extensions[i].substring( 1 ) ) ) {
ending = true;
}
}
if ( filename.endsWith( meta.getDefaultExtension() ) ) {
ending = true;
}
if ( !ending ) {
filename += '.' + meta.getDefaultExtension();
}
// See if the file already exists...
int id = SWT.YES;
try {
FileObject f = KettleVFS.getFileObject( filename );
if ( f.exists() ) {
MessageBox mb = new MessageBox( shell, SWT.NO | SWT.YES | SWT.ICON_WARNING );
// "This file already exists. Do you want to overwrite it?"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Title" ) );
id = mb.open();
}
} catch ( Exception e ) {
// TODO do we want to show an error dialog here? My first guess
// is not, but we might.
}
if ( id == SWT.YES ) {
save( meta, filename, false );
}
}
return false;
}
public boolean save( EngineMetaInterface meta, String filename, boolean export ) {
boolean saved = false;
// the only file types that are subject to ascii-only rule are those that are not trans and not job
boolean isNotTransOrJob =
!LastUsedFile.FILE_TYPE_TRANSFORMATION.equals( meta.getFileType() )
&& !LastUsedFile.FILE_TYPE_JOB.equals( meta.getFileType() );
if ( isNotTransOrJob ) {
Pattern pattern = Pattern.compile( "\\p{ASCII}+" );
Matcher matcher = pattern.matcher( filename );
if ( !matcher.matches() ) {
/*
* Temporary fix for AGILEBI-405 Don't allow saving of files that contain special characters until AGILEBI-394
* is resolved. AGILEBI-394 Naming an analyzer report with spanish accents gives error when publishing.
*/
MessageBox box = new MessageBox( staticSpoon.shell, SWT.ICON_ERROR | SWT.OK );
box.setMessage( "Special characters are not allowed in the filename. Please use ASCII characters only" );
box.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingConnection.Title" ) );
box.open();
return false;
}
}
FileListener listener = null;
// match by extension first
int idx = filename.lastIndexOf( '.' );
if ( idx != -1 ) {
String extension = filename.substring( idx + 1 );
listener = fileExtensionMap.get( extension );
}
if ( listener == null ) {
String xt = meta.getDefaultExtension();
listener = fileExtensionMap.get( xt );
}
if ( listener != null ) {
String sync = BasePropertyHandler.getProperty( SYNC_TRANS );
if ( Boolean.parseBoolean( sync ) ) {
listener.syncMetaName( meta, Const.createName( filename ) );
delegates.tabs.renameTabs();
}
saved = listener.save( meta, filename, export );
}
return saved;
}
public boolean saveMeta( EngineMetaInterface meta, String filename ) {
meta.setFilename( filename );
if ( Const.isEmpty( meta.getName() )
|| delegates.jobs.isDefaultJobName( meta.getName() )
|| delegates.trans.isDefaultTransformationName( meta.getName() ) ) {
meta.nameFromFilename();
}
boolean saved = false;
try {
String xml = XMLHandler.getXMLHeader() + meta.getXML();
DataOutputStream dos = new DataOutputStream( KettleVFS.getOutputStream( filename, false ) );
dos.write( xml.getBytes( Const.XML_ENCODING ) );
dos.close();
saved = true;
// Handle last opened files...
props.addLastFile( meta.getFileType(), filename, null, false, null );
saveSettings();
addMenuLast();
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Spoon.Log.FileWritten" ) + " [" + filename + "]" ); // "File
}
// written
// to
meta.setFilename( filename );
meta.clearChanged();
setShellText();
} catch ( Exception e ) {
if ( log.isDebug() ) {
// "Error opening file for writing! --> "
log.logDebug( BaseMessages.getString( PKG, "Spoon.Log.ErrorOpeningFileForWriting" ) + e.toString() );
}
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingFile.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingFile.Message" )
+ Const.CR + e.toString(), e );
}
return saved;
}
public void helpAbout() {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION | SWT.CENTER | SWT.SHEET );
String releaseText = Const.RELEASE.getMessage();
StringBuilder messageBuilder = new StringBuilder();
BuildVersion buildVersion = BuildVersion.getInstance();
// buildVersionInfo correspond to
// ${release.major.number}.${release.minor.number}.${release.milestone.number}.${build.id}
String buildVersionInfo = buildVersion.getVersion();
if ( Const.isEmpty( buildVersionInfo ) ) {
buildVersionInfo = "Unknown";
}
// suppose buildVersion consists of releaseInfo and commit id
String releaseInfo = "";
String buildStatus = "";
// build the result message
messageBuilder.append( BaseMessages.getString( PKG, "System.ProductInfo" ) );
messageBuilder.append( releaseText );
messageBuilder.append( " - " );
// Regex represents the string that contains a git 40-character checksum hash
String containingChecksumRegex = ".+\\b([a-f0-9]{40})\\b";
// check if string contains VCS checksum
if ( !buildVersionInfo.matches( containingChecksumRegex ) ) {
releaseInfo = buildVersionInfo;
} else {
// The next actions will not have sense when we will have separate string for commit id in manifest file in
// kettle-engine jar
String[] buildVsionInfoElts = buildVersionInfo.split( "\\." );
int elementCount = buildVsionInfoElts.length;
for ( int i = 0; i < elementCount; i++ ) {
String currentElement = buildVsionInfoElts[i];
// check if current element VCS checksum
if ( currentElement.length() != 40 ) {
releaseInfo += currentElement + ".";
} else {
buildStatus = currentElement;
}
}
// delete dot symbol at the end position
releaseInfo = new String( releaseInfo.substring( 0, releaseInfo.length() - 1 ) );
}
messageBuilder.append( releaseInfo );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( BaseMessages.getString( PKG, "System.CompanyInfo", Const.COPYRIGHT_YEAR ) );
messageBuilder.append( Const.CR );
messageBuilder.append( BaseMessages.getString( PKG, "System.ProductWebsiteUrl" ) );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( "Build version : " );
messageBuilder.append( releaseInfo );
if ( !buildStatus.isEmpty() ) {
messageBuilder.append( Const.CR );
messageBuilder.append( "Commit ID : " );
messageBuilder.append( buildStatus );
}
messageBuilder.append( Const.CR );
messageBuilder.append( "Build date : " );
String inputStringDate = buildVersion.getBuildDate();
String outputStringDate = "";
SimpleDateFormat inputFormat = null;
SimpleDateFormat otputFormat = null;
if ( inputStringDate.matches( "^\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}.\\d{3}$" ) ) {
inputFormat = new SimpleDateFormat( "yyyy/MM/dd hh:mm:ss.SSS" );
}
if ( inputStringDate.matches( "^\\d{4}-\\d{1,2}-\\d{1,2}\\_\\d{1,2}-\\d{2}-\\d{2}$" ) ) {
inputFormat = new SimpleDateFormat( "yyyy-MM-dd_hh-mm-ss" );
}
if ( inputStringDate.matches( "^\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}.\\d{2}.\\d{2}$" ) ) {
inputFormat = new SimpleDateFormat( "yyyy-MM-dd hh.mm.ss" );
}
otputFormat = new SimpleDateFormat( "MMMM d, yyyy hh:mm:ss" );
try {
if ( inputFormat != null ) {
Date date = inputFormat.parse( inputStringDate );
outputStringDate = otputFormat.format( date );
} else {
// If date isn't correspond to formats above just show date in origin format
outputStringDate = inputStringDate;
}
} catch ( ParseException e ) {
// Just show date in origin format
outputStringDate = inputStringDate;
}
messageBuilder.append( outputStringDate );
// set the text in the message box
mb.setMessage( messageBuilder.toString() );
mb.setText( APP_NAME );
// now open the message box
mb.open();
}
/**
* Show a plugin browser
*/
public void showPluginInfo() {
try {
// First we collect information concerning all the plugin types...
//
Map<String, RowMetaInterface> metaMap = new HashMap<String, RowMetaInterface>();
Map<String, List<Object[]>> dataMap = new HashMap<String, List<Object[]>>();
PluginRegistry registry = PluginRegistry.getInstance();
List<Class<? extends PluginTypeInterface>> pluginTypeClasses = registry.getPluginTypes();
for ( Class<? extends PluginTypeInterface> pluginTypeClass : pluginTypeClasses ) {
PluginTypeInterface pluginTypeInterface = registry.getPluginType( pluginTypeClass );
String subject = pluginTypeInterface.getName();
RowBuffer pluginInformation = registry.getPluginInformation( pluginTypeClass );
metaMap.put( subject, pluginInformation.getRowMeta() );
dataMap.put( subject, pluginInformation.getBuffer() );
}
// Now push it all to a subject data browser...
//
SubjectDataBrowserDialog dialog =
new SubjectDataBrowserDialog( shell, metaMap, dataMap, "Plugin browser", "Plugin type" );
dialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error listing plugins", e );
}
}
public void editUnselectAll() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
transMeta.unselectAll();
getActiveTransGraph().redraw();
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
jobMeta.unselectAll();
getActiveJobGraph().redraw();
}
}
public void editSelectAll() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
transMeta.selectAll();
getActiveTransGraph().redraw();
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
jobMeta.selectAll();
getActiveJobGraph().redraw();
}
}
public void editOptions() {
EnterOptionsDialog eod = new EnterOptionsDialog( shell );
if ( eod.open() != null ) {
props.saveProps();
loadSettings();
changeLooks();
MessageBox mb = new MessageBox( shell, SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PleaseRestartApplication.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PleaseRestartApplication.Title" ) );
mb.open();
}
}
public void editKettlePropertiesFile() {
KettlePropertiesFileDialog dialog = new KettlePropertiesFileDialog( shell, SWT.NONE );
Map<String, String> newProperties = dialog.open();
if ( newProperties != null ) {
for ( String name : newProperties.keySet() ) {
String value = newProperties.get( name );
applyVariableToAllLoadedObjects( name, value );
// Also set as a JVM property
//
System.setProperty( name, value );
}
}
}
/**
* Matches if the filter is non-empty
*
* @param string string to match
* @return true in case string matches filter
*/
private boolean filterMatch( String string ) {
String filter = selectionFilter.getText();
if ( Const.isEmpty( string ) ) {
return true;
}
if ( Const.isEmpty( filter ) ) {
return true;
}
try {
if ( string.matches( filter ) ) {
return true;
}
} catch ( Exception e ) {
log.logError( "Not a valid pattern [" + filter + "] : " + e.getMessage() );
}
return string.toUpperCase().contains( filter.toUpperCase() );
}
/**
* Refresh the object selection tree (on the left of the screen)
*/
public void refreshTree() {
if ( shell.isDisposed() ) {
return;
}
if ( !viewSelected ) {
return; // Nothing to see here, move along...
}
if ( selectionTree == null || selectionTree.isDisposed() ) {
// //////////////////////////////////////////////////////////////////////////////////////////////////
//
// Now set up the transformation/job tree
//
selectionTree = new Tree( variableComposite, SWT.SINGLE );
props.setLook( selectionTree );
selectionTree.setLayout( new FillLayout() );
addDefaultKeyListeners( selectionTree );
/*
* ExpandItem treeItem = new ExpandItem(mainExpandBar, SWT.NONE); treeItem.setControl(selectionTree);
* treeItem.setHeight(shell.getBounds().height); setHeaderImage(treeItem,
* GUIResource.getInstance().getImageLogoSmall(), STRING_SPOON_MAIN_TREE, 0, true);
*/
// Add a tree memory as well...
TreeMemory.addTreeListener( selectionTree, STRING_SPOON_MAIN_TREE );
selectionTree.addMenuDetectListener( new MenuDetectListener() {
public void menuDetected( MenuDetectEvent e ) {
setMenu( selectionTree );
}
} );
selectionTree.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
showSelection();
}
} );
selectionTree.addSelectionListener( new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
doubleClickedInTree( selectionTree );
}
} );
// Set a listener on the tree
addDragSourceToTree( selectionTree );
}
GUIResource guiResource = GUIResource.getInstance();
TransMeta activeTransMeta = getActiveTransformation();
JobMeta activeJobMeta = getActiveJob();
boolean showAll = activeTransMeta == null && activeJobMeta == null;
// get a list of transformations from the transformation map
//
/*
* List<TransMeta> transformations = delegates.trans.getTransformationList(); Collections.sort(transformations);
* TransMeta[] transMetas = transformations.toArray(new TransMeta[transformations.size()]);
*
* // get a list of jobs from the job map List<JobMeta> jobs = delegates.jobs.getJobList(); Collections.sort(jobs);
* JobMeta[] jobMetas = jobs.toArray(new JobMeta[jobs.size()]);
*/
// Refresh the content of the tree for those transformations
//
// First remove the old ones.
selectionTree.removeAll();
// Now add the data back
//
if ( !props.isOnlyActiveFileShownInTree() || showAll || activeTransMeta != null ) {
TreeItem tiTrans = new TreeItem( selectionTree, SWT.NONE );
tiTrans.setText( STRING_TRANSFORMATIONS );
tiTrans.setImage( GUIResource.getInstance().getImageBol() );
// Set expanded if this is the only transformation shown.
if ( props.isOnlyActiveFileShownInTree() ) {
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiTrans, true );
}
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
Object managedObject = entry.getObject().getManagedObject();
if ( managedObject instanceof TransMeta ) {
TransMeta transMeta = (TransMeta) managedObject;
if ( !props.isOnlyActiveFileShownInTree()
|| showAll || ( activeTransMeta != null && activeTransMeta.equals( transMeta ) ) ) {
// Add a tree item with the name of transformation
//
String name = delegates.tabs.makeTabName( transMeta, entry.isShowingLocation() );
if ( Const.isEmpty( name ) ) {
name = STRING_TRANS_NO_NAME;
}
TreeItem tiTransName = new TreeItem( tiTrans, SWT.NONE );
tiTransName.setText( name );
tiTransName.setImage( guiResource.getImageTransGraph() );
// Set expanded if this is the only transformation
// shown.
if ( props.isOnlyActiveFileShownInTree() ) {
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiTransName, true );
}
// /////////////////////////////////////////////////////
//
// Now add the database connections
//
TreeItem tiDbTitle = new TreeItem( tiTransName, SWT.NONE );
tiDbTitle.setText( STRING_CONNECTIONS );
tiDbTitle.setImage( guiResource.getImageBol() );
String[] dbNames = new String[transMeta.nrDatabases()];
for ( int i = 0; i < dbNames.length; i++ ) {
dbNames[i] = transMeta.getDatabase( i ).getName();
}
Arrays.sort( dbNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
// Draw the connections themselves below it.
for ( String dbName : dbNames ) {
DatabaseMeta databaseMeta = transMeta.findDatabase( dbName );
if ( !filterMatch( dbName ) ) {
continue;
}
TreeItem tiDb = new TreeItem( tiDbTitle, SWT.NONE );
tiDb.setText( databaseMeta.getDisplayName() );
if ( databaseMeta.isShared() ) {
tiDb.setFont( guiResource.getFontBold() );
}
tiDb.setImage( guiResource.getImageConnection() );
}
// /////////////////////////////////////////////////////
//
// The steps
//
TreeItem tiStepTitle = new TreeItem( tiTransName, SWT.NONE );
tiStepTitle.setText( STRING_STEPS );
tiStepTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.nrSteps(); i++ ) {
StepMeta stepMeta = transMeta.getStep( i );
PluginInterface stepPlugin =
PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, stepMeta.getStepID() );
if ( !filterMatch( stepMeta.getName() ) && !filterMatch( stepMeta.getName() ) ) {
continue;
}
TreeItem tiStep = new TreeItem( tiStepTitle, SWT.NONE );
tiStep.setText( stepMeta.getName() );
if ( stepMeta.isShared() ) {
tiStep.setFont( guiResource.getFontBold() );
}
if ( !stepMeta.isDrawn() ) {
tiStep.setForeground( guiResource.getColorDarkGray() );
}
Image stepIcon = guiResource.getImagesStepsSmall().get( stepPlugin.getIds()[0] );
if ( stepIcon == null ) {
stepIcon = guiResource.getImageBol();
}
tiStep.setImage( stepIcon );
}
// /////////////////////////////////////////////////////
//
// The hops
//
TreeItem tiHopTitle = new TreeItem( tiTransName, SWT.NONE );
tiHopTitle.setText( STRING_HOPS );
tiHopTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.nrTransHops(); i++ ) {
TransHopMeta hopMeta = transMeta.getTransHop( i );
if ( !filterMatch( hopMeta.toString() ) ) {
continue;
}
TreeItem tiHop = new TreeItem( tiHopTitle, SWT.NONE );
tiHop.setText( hopMeta.toString() );
if ( hopMeta.isEnabled() ) {
tiHop.setImage( guiResource.getImageHop() );
} else {
tiHop.setImage( guiResource.getImageDisabledHop() );
}
}
// /////////////////////////////////////////////////////
//
// The partitions
//
TreeItem tiPartitionTitle = new TreeItem( tiTransName, SWT.NONE );
tiPartitionTitle.setText( STRING_PARTITIONS );
tiPartitionTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.getPartitionSchemas().size(); i++ ) {
PartitionSchema partitionSchema = transMeta.getPartitionSchemas().get( i );
if ( !filterMatch( partitionSchema.getName() ) ) {
continue;
}
TreeItem tiPartition = new TreeItem( tiPartitionTitle, SWT.NONE );
tiPartition.setText( partitionSchema.getName() );
tiPartition.setImage( guiResource.getImageFolderConnections() );
if ( partitionSchema.isShared() ) {
tiPartition.setFont( guiResource.getFontBold() );
}
}
// /////////////////////////////////////////////////////
//
// The slaves
//
TreeItem tiSlaveTitle = new TreeItem( tiTransName, SWT.NONE );
tiSlaveTitle.setText( STRING_SLAVES );
tiSlaveTitle.setImage( guiResource.getImageBol() );
// Put the slaves below it.
//
String[] slaveNames = transMeta.getSlaveServerNames();
Arrays.sort( slaveNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
for ( String slaveName : slaveNames ) {
SlaveServer slaveServer = transMeta.findSlaveServer( slaveName );
if ( !filterMatch( slaveServer.getName() ) ) {
continue;
}
TreeItem tiSlave = new TreeItem( tiSlaveTitle, SWT.NONE );
tiSlave.setText( slaveServer.getName() );
tiSlave.setImage( guiResource.getImageSlave() );
if ( slaveServer.isShared() ) {
tiSlave.setFont( guiResource.getFontBold() );
}
}
// /////////////////////////////////////////////////////
//
// The clusters
//
TreeItem tiClusterTitle = new TreeItem( tiTransName, SWT.NONE );
tiClusterTitle.setText( STRING_CLUSTERS );
tiClusterTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.getClusterSchemas().size(); i++ ) {
ClusterSchema clusterSchema = transMeta.getClusterSchemas().get( i );
if ( !filterMatch( clusterSchema.getName() ) ) {
continue;
}
TreeItem tiCluster = new TreeItem( tiClusterTitle, SWT.NONE );
tiCluster.setText( clusterSchema.toString() );
tiCluster.setImage( guiResource.getImageCluster() );
if ( clusterSchema.isShared() ) {
tiCluster.setFont( guiResource.getFontBold() );
}
}
}
}
}
}
if ( !props.isOnlyActiveFileShownInTree() || showAll || activeJobMeta != null ) {
TreeItem tiJobs = new TreeItem( selectionTree, SWT.NONE );
tiJobs.setText( STRING_JOBS );
tiJobs.setImage( GUIResource.getInstance().getImageBol() );
// Set expanded if this is the only job shown.
if ( props.isOnlyActiveFileShownInTree() ) {
tiJobs.setExpanded( true );
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiJobs, true );
}
// Now add the jobs
//
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
Object managedObject = entry.getObject().getManagedObject();
if ( managedObject instanceof JobMeta ) {
JobMeta jobMeta = (JobMeta) managedObject;
if ( !props.isOnlyActiveFileShownInTree()
|| showAll || ( activeJobMeta != null && activeJobMeta.equals( jobMeta ) ) ) {
// Add a tree item with the name of job
//
String name = delegates.tabs.makeTabName( jobMeta, entry.isShowingLocation() );
if ( Const.isEmpty( name ) ) {
name = STRING_JOB_NO_NAME;
}
if ( !filterMatch( name ) ) {
continue;
}
TreeItem tiJobName = new TreeItem( tiJobs, SWT.NONE );
tiJobName.setText( name );
tiJobName.setImage( guiResource.getImageJobGraph() );
// Set expanded if this is the only job shown.
if ( props.isOnlyActiveFileShownInTree() ) {
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiJobName, true );
}
// /////////////////////////////////////////////////////
//
// Now add the database connections
//
TreeItem tiDbTitle = new TreeItem( tiJobName, SWT.NONE );
tiDbTitle.setText( STRING_CONNECTIONS );
tiDbTitle.setImage( guiResource.getImageBol() );
String[] dbNames = new String[jobMeta.nrDatabases()];
for ( int i = 0; i < dbNames.length; i++ ) {
dbNames[i] = jobMeta.getDatabase( i ).getName();
}
Arrays.sort( dbNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
// Draw the connections themselves below it.
for ( String dbName : dbNames ) {
DatabaseMeta databaseMeta = jobMeta.findDatabase( dbName );
if ( !filterMatch( databaseMeta.getName() ) ) {
continue;
}
TreeItem tiDb = new TreeItem( tiDbTitle, SWT.NONE );
tiDb.setText( databaseMeta.getDisplayName() );
if ( databaseMeta.isShared() ) {
tiDb.setFont( guiResource.getFontBold() );
}
tiDb.setImage( guiResource.getImageConnection() );
}
// /////////////////////////////////////////////////////
//
// The job entries
//
TreeItem tiJobEntriesTitle = new TreeItem( tiJobName, SWT.NONE );
tiJobEntriesTitle.setText( STRING_JOB_ENTRIES );
tiJobEntriesTitle.setImage( guiResource.getImageBol() );
// Put the job entries below it.
//
for ( int i = 0; i < jobMeta.nrJobEntries(); i++ ) {
JobEntryCopy jobEntry = jobMeta.getJobEntry( i );
if ( !filterMatch( jobEntry.getName() ) && !filterMatch( jobEntry.getDescription() ) ) {
continue;
}
TreeItem tiJobEntry = ConstUI.findTreeItem( tiJobEntriesTitle, jobEntry.getName() );
if ( tiJobEntry != null ) {
continue; // only show it once
}
tiJobEntry = new TreeItem( tiJobEntriesTitle, SWT.NONE );
tiJobEntry.setText( jobEntry.getName() );
// if (jobEntry.isShared())
// tiStep.setFont(guiResource.getFontBold()); TODO:
// allow job entries to be shared as well...
if ( jobEntry.isStart() ) {
tiJobEntry.setImage( GUIResource.getInstance().getImageStart() );
} else if ( jobEntry.isDummy() ) {
tiJobEntry.setImage( GUIResource.getInstance().getImageDummy() );
} else {
String key = jobEntry.getEntry().getPluginId();
Image image = GUIResource.getInstance().getImagesJobentriesSmall().get( key );
tiJobEntry.setImage( image );
}
}
// /////////////////////////////////////////////////////
//
// The slaves
//
TreeItem tiSlaveTitle = new TreeItem( tiJobName, SWT.NONE );
tiSlaveTitle.setText( STRING_SLAVES );
tiSlaveTitle.setImage( guiResource.getImageBol() );
// Put the slaves below it.
//
String[] slaveNames = jobMeta.getSlaveServerNames();
Arrays.sort( slaveNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
for ( String slaveName : slaveNames ) {
SlaveServer slaveServer = jobMeta.findSlaveServer( slaveName );
if ( !filterMatch( slaveServer.getName() ) ) {
continue;
}
TreeItem tiSlave = new TreeItem( tiSlaveTitle, SWT.NONE );
tiSlave.setText( slaveServer.getName() );
tiSlave.setImage( guiResource.getImageSlave() );
if ( slaveServer.isShared() ) {
tiSlave.setFont( guiResource.getFontBold() );
}
}
}
}
}
}
// Set the expanded state of the complete tree.
TreeMemory.setExpandedFromMemory( selectionTree, STRING_SPOON_MAIN_TREE );
// refreshCoreObjectsHistory();
selectionTree.setFocus();
selectionTree.layout();
variableComposite.layout( true, true );
setShellText();
}
public String getActiveTabText() {
if ( tabfolder.getSelected() == null ) {
return null;
}
return tabfolder.getSelected().getText();
}
public void refreshGraph() {
if ( shell.isDisposed() ) {
return;
}
TabItem tabItem = tabfolder.getSelected();
if ( tabItem == null ) {
return;
}
TabMapEntry tabMapEntry = delegates.tabs.getTab( tabItem );
if ( tabMapEntry != null ) {
if ( tabMapEntry.getObject() instanceof TransGraph ) {
TransGraph transGraph = (TransGraph) tabMapEntry.getObject();
transGraph.redraw();
}
if ( tabMapEntry.getObject() instanceof JobGraph ) {
JobGraph jobGraph = (JobGraph) tabMapEntry.getObject();
jobGraph.redraw();
}
}
setShellText();
}
public StepMeta newStep( TransMeta transMeta ) {
return newStep( transMeta, true, true );
}
public StepMeta newStep( TransMeta transMeta, boolean openit, boolean rename ) {
if ( transMeta == null ) {
return null;
}
TreeItem[] ti = selectionTree.getSelection();
StepMeta inf = null;
if ( ti.length == 1 ) {
String stepType = ti[0].getText();
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Spoon.Log.NewStep" ) + stepType ); // "New step: "
}
inf = newStep( transMeta, stepType, stepType, openit, rename );
}
return inf;
}
/**
* Allocate new step, optionally open and rename it.
*
* @param name
* Name of the new step
* @param description
* Description of the type of step
* @param openit
* Open the dialog for this step?
* @param rename
* Rename this step?
*
* @return The newly created StepMeta object.
*
*/
public StepMeta newStep( TransMeta transMeta, String name, String description, boolean openit, boolean rename ) {
StepMeta inf = null;
// See if we need to rename the step to avoid doubles!
if ( rename && transMeta.findStep( name ) != null ) {
int i = 2;
String newName = name + " " + i;
while ( transMeta.findStep( newName ) != null ) {
i++;
newName = name + " " + i;
}
name = newName;
}
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface stepPlugin = null;
try {
stepPlugin = registry.findPluginWithName( StepPluginType.class, description );
if ( stepPlugin != null ) {
StepMetaInterface info = (StepMetaInterface) registry.loadClass( stepPlugin );
info.setDefault();
if ( openit ) {
StepDialogInterface dialog = this.getStepEntryDialog( info, transMeta, name );
if ( dialog != null ) {
name = dialog.open();
}
}
inf = new StepMeta( stepPlugin.getIds()[0], name, info );
if ( name != null ) {
// OK pressed in the dialog: we have a step-name
String newName = name;
StepMeta stepMeta = transMeta.findStep( newName );
int nr = 2;
while ( stepMeta != null ) {
newName = name + " " + nr;
stepMeta = transMeta.findStep( newName );
nr++;
}
if ( nr > 2 ) {
inf.setName( newName );
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
// "This stepName already exists. Spoon changed the stepName to ["+newName+"]"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ChangeStepname.Message", newName ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ChangeStepname.Title" ) );
mb.open();
}
inf.setLocation( 20, 20 ); // default location at (20,20)
transMeta.addStep( inf );
addUndoNew( transMeta, new StepMeta[] { inf }, new int[] { transMeta.indexOfStep( inf ) } );
// Also store it in the pluginHistory list...
props.increasePluginHistory( stepPlugin.getIds()[0] );
// stepHistoryChanged = true;
refreshTree();
} else {
return null; // Cancel pressed in dialog.
}
setShellText();
}
} catch ( KettleException e ) {
String filename = stepPlugin.getErrorHelpFile();
if ( stepPlugin != null && !Const.isEmpty( filename ) ) {
// OK, in stead of a normal error message, we give back the
// content of the error help file... (HTML)
FileInputStream fis = null;
try {
StringBuilder content = new StringBuilder();
fis = new FileInputStream( new File( filename ) );
int ch = fis.read();
while ( ch >= 0 ) {
content.append( (char) ch );
ch = fis.read();
}
ShowBrowserDialog sbd =
new ShowBrowserDialog(
// "Error help text"
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorHelpText.Title" ), content.toString() );
sbd.open();
} catch ( Exception ex ) {
new ErrorDialog( shell,
// "Error showing help text"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorShowingHelpText.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ErrorShowingHelpText.Message" ), ex );
} finally {
if ( fis != null ) {
try {
fis.close();
} catch ( Exception ex ) {
log.logError( "Error closing plugin help file", ex );
}
}
}
} else {
new ErrorDialog( shell,
// "Error creating step"
// "I was unable to create a new step"
BaseMessages.getString( PKG, "Spoon.Dialog.UnableCreateNewStep.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.UnableCreateNewStep.Message" ), e );
}
return null;
} catch ( Throwable e ) {
if ( !shell.isDisposed() ) {
new ErrorDialog( shell,
// "Error creating step"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorCreatingStep.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.UnableCreateNewStep.Message" ), e );
}
return null;
}
return inf;
}
public void setShellText() {
if ( shell.isDisposed() ) {
return;
}
String filename = null;
String name = null;
String version = null;
ChangedFlagInterface changed = null;
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
changed = transMeta;
filename = transMeta.getFilename();
name = transMeta.getName();
version = transMeta.getObjectRevision() == null ? null : transMeta.getObjectRevision().getName();
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
changed = jobMeta;
filename = jobMeta.getFilename();
name = jobMeta.getName();
version = jobMeta.getObjectRevision() == null ? null : jobMeta.getObjectRevision().getName();
}
String text = "";
if ( rep != null ) {
text += APP_TITLE + " - [" + getRepositoryName() + "] ";
} else {
text += APP_TITLE + " - ";
}
if ( Const.isEmpty( name ) ) {
if ( !Const.isEmpty( filename ) ) {
text += filename;
} else {
String tab = getActiveTabText();
if ( !Const.isEmpty( tab ) ) {
text += tab;
} else {
text += BaseMessages.getString( PKG, "Spoon.Various.NoName" ); // "[no name]"
}
}
} else {
text += name;
}
if ( !Const.isEmpty( version ) ) {
text += " v" + version;
}
if ( changed != null && changed.hasChanged() ) {
text += " " + BaseMessages.getString( PKG, "Spoon.Various.Changed" );
}
shell.setText( text );
markTabsChanged();
}
public void enableMenus() {
boolean disableTransMenu = getActiveTransformation() == null;
boolean disableJobMenu = getActiveJob() == null;
boolean disableMetaMenu = getActiveMeta() == null;
boolean isRepositoryRunning = rep != null;
boolean disablePreviewButton = true;
String activePerspectiveId = null;
SpoonPerspectiveManager manager = SpoonPerspectiveManager.getInstance();
if ( manager != null && manager.getActivePerspective() != null ) {
activePerspectiveId = manager.getActivePerspective().getId();
}
boolean etlPerspective = false;
if ( activePerspectiveId != null && activePerspectiveId.length() > 0 ) {
etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
}
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
disablePreviewButton = !( transGraph.isRunning() && !transGraph.isHalting() );
}
boolean disableSave = true;
TabItemInterface currentTab = getActiveTabitem();
if ( currentTab != null && currentTab.canHandleSave() ) {
disableSave = !currentTab.hasContentChanged();
}
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
disableSave = !meta.canSave();
}
org.pentaho.ui.xul.dom.Document doc;
if ( mainSpoonContainer != null ) {
doc = mainSpoonContainer.getDocumentRoot();
if ( doc != null ) {
if ( etlPerspective ) {
doc.getElementById( "file" ).setVisible( etlPerspective );
doc.getElementById( "edit" ).setVisible( etlPerspective );
doc.getElementById( "view" ).setVisible( etlPerspective );
doc.getElementById( "action" ).setVisible( etlPerspective );
doc.getElementById( "tools" ).setVisible( etlPerspective );
doc.getElementById( "help" ).setVisible( etlPerspective );
doc.getElementById( "help-tip" ).setVisible( etlPerspective );
doc.getElementById( "help-welcome" ).setVisible( etlPerspective );
doc.getElementById( "help-plugins" ).setVisible( true );
}
// Only enable certain menu-items if we need to.
disableMenuItem( doc, "file-new-database", disableTransMenu && disableJobMenu || !isRepositoryRunning );
disableMenuItem( doc, "file-save", disableTransMenu && disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "toolbar-file-save", disableTransMenu
&& disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "file-save-as", disableTransMenu && disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "toolbar-file-save-as", disableTransMenu
&& disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "file-save-as-vfs", disableTransMenu && disableJobMenu && disableMetaMenu );
disableMenuItem( doc, "file-close", disableTransMenu && disableJobMenu && disableMetaMenu );
disableMenuItem( doc, "file-print", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "file-export-to-xml", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "file-export-all-to-xml", disableTransMenu && disableJobMenu );
// Disable the undo and redo menus if there is no active transformation
// or active job
// DO NOT ENABLE them otherwise ... leave that to the undo/redo settings
//
disableMenuItem( doc, UNDO_MENU_ITEM, disableTransMenu && disableJobMenu );
disableMenuItem( doc, REDO_MENU_ITEM, disableTransMenu && disableJobMenu );
disableMenuItem( doc, "edit-clear-selection", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "edit-select-all", disableTransMenu && disableJobMenu );
updateSettingsMenu( doc, disableTransMenu, disableJobMenu );
disableMenuItem( doc, "edit-settings", disableTransMenu && disableJobMenu && disableMetaMenu );
// View Menu
( (XulMenuitem) doc.getElementById( "view-results" ) ).setSelected( isExecutionResultsPaneVisible() );
disableMenuItem( doc, "view-results", transGraph == null && disableJobMenu );
disableMenuItem( doc, "view-zoom-in", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "view-zoom-out", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "view-zoom-100pct", disableTransMenu && disableJobMenu );
// Transformations
disableMenuItem( doc, "process-run", disableTransMenu && disablePreviewButton && disableJobMenu );
disableMenuItem( doc, "trans-replay", disableTransMenu && disablePreviewButton );
disableMenuItem( doc, "trans-preview", disableTransMenu && disablePreviewButton );
disableMenuItem( doc, "trans-debug", disableTransMenu && disablePreviewButton );
disableMenuItem( doc, "trans-verify", disableTransMenu );
disableMenuItem( doc, "trans-impact", disableTransMenu );
disableMenuItem( doc, "trans-get-sql", disableTransMenu );
disableMenuItem( doc, "trans-last-impact", disableTransMenu );
// Tools
disableMenuItem( doc, "repository-connect", isRepositoryRunning );
disableMenuItem( doc, "repository-disconnect", !isRepositoryRunning );
disableMenuItem( doc, "repository-explore", !isRepositoryRunning );
disableMenuItem( doc, "repository-clear-shared-object-cache", !isRepositoryRunning );
disableMenuItem( doc, "toolbar-expore-repository", !isRepositoryRunning );
disableMenuItem( doc, "repository-export-all", !isRepositoryRunning );
disableMenuItem( doc, "repository-import-directory", !isRepositoryRunning );
disableMenuItem( doc, "trans-last-preview", !isRepositoryRunning || disableTransMenu );
// Wizard
disableMenuItem( doc, "wizard-connection", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "wizard-copy-table", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "wizard-copy-tables", isRepositoryRunning && disableTransMenu && disableJobMenu );
disableMenuItem( doc, "database-inst-dependancy", !isRepositoryRunning );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.MENUS_REFRESHED );
MenuManager menuManager = getMenuBarManager();
menuManager.updateAll( true );
// What steps & plugins to show?
refreshCoreObjects();
fireMenuControlers();
}
}
}
/**
* @param doc
* @param disableJobMenu
* @param disableTransMenu
*/
private void updateSettingsMenu( org.pentaho.ui.xul.dom.Document doc, boolean disableTransMenu,
boolean disableJobMenu ) {
XulMenuitem settingsItem = (XulMenuitem) doc.getElementById( "edit-settings" );
if ( settingsItem != null ) {
if ( disableTransMenu && !disableJobMenu ) {
settingsItem.setAcceltext( "CTRL-J" );
settingsItem.setAccesskey( "ctrl-j" );
} else if ( !disableTransMenu && disableJobMenu ) {
settingsItem.setAcceltext( "CTRL-T" );
settingsItem.setAccesskey( "ctrl-t" );
} else {
settingsItem.setAcceltext( "" );
settingsItem.setAccesskey( "" );
}
}
}
public void addSpoonMenuController( ISpoonMenuController menuController ) {
if ( menuControllers != null ) {
menuControllers.add( menuController );
}
}
public boolean removeSpoonMenuController( ISpoonMenuController menuController ) {
if ( menuControllers != null ) {
return menuControllers.remove( menuController );
}
return false;
}
public ISpoonMenuController removeSpoonMenuController( String menuControllerName ) {
ISpoonMenuController result = null;
if ( menuControllers != null ) {
for ( ISpoonMenuController menuController : menuControllers ) {
if ( menuController.getName().equals( menuControllerName ) ) {
result = menuController;
menuControllers.remove( result );
break;
}
}
}
return result;
}
private void disableMenuItem( org.pentaho.ui.xul.dom.Document doc, String itemId, boolean disable ) {
XulComponent menuItem = doc.getElementById( itemId );
if ( menuItem != null ) {
menuItem.setDisabled( disable );
} else {
log.logError( "Non-Fatal error : Menu Item with id = " + itemId + " does not exist! Check 'menubar.xul'" );
}
}
private void markTabsChanged() {
boolean anyTabsChanged = false;
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
if ( entry.getTabItem().isDisposed() ) {
continue;
}
boolean changed = entry.getObject().hasContentChanged();
anyTabsChanged |= changed;
entry.getTabItem().setChanged( changed );
}
}
/**
* Check to see if any jobs or transformations are dirty
* @return true if any of the open jobs or trans are marked dirty
*/
public boolean isTabsChanged() {
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
if ( entry.getTabItem().isDisposed() ) {
continue;
}
if ( entry.getObject().hasContentChanged() ) {
return true;
}
}
return false;
}
public void printFile() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
printTransFile( transMeta );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
printJobFile( jobMeta );
}
}
private void printTransFile( TransMeta transMeta ) {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph == null ) {
return;
}
PrintSpool ps = new PrintSpool();
Printer printer = ps.getPrinter( shell );
// Create an image of the screen
Point max = transMeta.getMaximum();
Image img = transGraph.getTransformationImage( printer, max.x, max.y, 1.0f );
ps.printImage( shell, img );
img.dispose();
ps.dispose();
}
private void printJobFile( JobMeta jobMeta ) {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph == null ) {
return;
}
PrintSpool ps = new PrintSpool();
Printer printer = ps.getPrinter( shell );
// Create an image of the screen
Point max = jobMeta.getMaximum();
Image img = jobGraph.getJobImage( printer, max.x, max.y, 1.0f );
ps.printImage( shell, img );
img.dispose();
ps.dispose();
}
public TransGraph getActiveTransGraph() {
if ( tabfolder != null ) {
if ( tabfolder.getSelected() == null ) {
return null;
}
} else {
return null;
}
if ( delegates != null && delegates.tabs != null ) {
TabMapEntry mapEntry = delegates.tabs.getTab( tabfolder.getSelected() );
if ( mapEntry != null ) {
if ( mapEntry.getObject() instanceof TransGraph ) {
return (TransGraph) mapEntry.getObject();
}
}
}
return null;
}
public JobGraph getActiveJobGraph() {
if ( delegates != null && delegates.tabs != null && tabfolder != null ) {
TabMapEntry mapEntry = delegates.tabs.getTab( tabfolder.getSelected() );
if ( mapEntry.getObject() instanceof JobGraph ) {
return (JobGraph) mapEntry.getObject();
}
}
return null;
}
public EngineMetaInterface getActiveMeta() {
SpoonPerspectiveManager manager = SpoonPerspectiveManager.getInstance();
if ( manager != null && manager.getActivePerspective() != null ) {
return manager.getActivePerspective().getActiveMeta();
}
return null;
}
public TabItemInterface getActiveTabitem() {
if ( tabfolder == null ) {
return null;
}
TabItem tabItem = tabfolder.getSelected();
if ( tabItem == null ) {
return null;
}
if ( delegates != null && delegates.tabs != null ) {
TabMapEntry mapEntry = delegates.tabs.getTab( tabItem );
if ( mapEntry != null ) {
return mapEntry.getObject();
} else {
return null;
}
}
return null;
}
/**
* @return The active TransMeta object by looking at the selected TransGraph, TransLog, TransHist If nothing valueable
* is selected, we return null
*/
public TransMeta getActiveTransformation() {
EngineMetaInterface meta = getActiveMeta();
if ( meta instanceof TransMeta ) {
return (TransMeta) meta;
}
return null;
}
/**
* @return The active JobMeta object by looking at the selected JobGraph, JobLog, JobHist If nothing valueable is
* selected, we return null
*/
public JobMeta getActiveJob() {
EngineMetaInterface meta = getActiveMeta();
if ( meta instanceof JobMeta ) {
return (JobMeta) meta;
}
return null;
}
public UndoInterface getActiveUndoInterface() {
return (UndoInterface) this.getActiveMeta();
}
public TransMeta findTransformation( String tabItemText ) {
if ( delegates != null && delegates.trans != null ) {
return delegates.trans.getTransformation( tabItemText );
} else {
return null;
}
}
public JobMeta findJob( String tabItemText ) {
if ( delegates != null && delegates.jobs != null ) {
return delegates.jobs.getJob( tabItemText );
} else {
return null;
}
}
public TransMeta[] getLoadedTransformations() {
if ( delegates != null && delegates.trans != null ) {
List<TransMeta> list = delegates.trans.getTransformationList();
return list.toArray( new TransMeta[list.size()] );
} else {
return null;
}
}
public JobMeta[] getLoadedJobs() {
if ( delegates != null && delegates.jobs != null ) {
List<JobMeta> list = delegates.jobs.getJobList();
return list.toArray( new JobMeta[list.size()] );
} else {
return null;
}
}
public void saveSettings() {
if ( shell.isDisposed() ) {
// we cannot save the settings, it's too late
return;
}
WindowProperty windowProperty = new WindowProperty( shell );
windowProperty.setName( APP_TITLE );
props.setScreen( windowProperty );
props.setLogLevel( DefaultLogLevel.getLogLevel().getCode() );
props.setSashWeights( sashform.getWeights() );
// Also save the open files...
// Go over the list of tabs, then add the info to the list
// of open tab files in PropsUI
//
props.getOpenTabFiles().clear();
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
String fileType = null;
String filename = null;
String directory = null;
int openType = 0;
if ( entry.getObjectType() == ObjectType.TRANSFORMATION_GRAPH ) {
fileType = LastUsedFile.FILE_TYPE_TRANSFORMATION;
TransMeta transMeta = (TransMeta) entry.getObject().getManagedObject();
filename = rep != null ? transMeta.getName() : transMeta.getFilename();
directory = transMeta.getRepositoryDirectory().toString();
openType = LastUsedFile.OPENED_ITEM_TYPE_MASK_GRAPH;
} else if ( entry.getObjectType() == ObjectType.JOB_GRAPH ) {
fileType = LastUsedFile.FILE_TYPE_JOB;
JobMeta jobMeta = (JobMeta) entry.getObject().getManagedObject();
filename = rep != null ? jobMeta.getName() : jobMeta.getFilename();
directory = jobMeta.getRepositoryDirectory().toString();
openType = LastUsedFile.OPENED_ITEM_TYPE_MASK_GRAPH;
}
if ( fileType != null ) {
props.addOpenTabFile(
fileType, filename, directory, rep != null, rep != null ? rep.getName() : null, openType );
}
}
props.saveProps();
}
public void loadSettings() {
LogLevel logLevel = LogLevel.getLogLevelForCode( props.getLogLevel() );
DefaultLogLevel.setLogLevel( logLevel );
log.setLogLevel( logLevel );
KettleLogStore.getAppender().setMaxNrLines( props.getMaxNrLinesInLog() );
// transMeta.setMaxUndo(props.getMaxUndo());
DBCache.getInstance().setActive( props.useDBCache() );
}
public void changeLooks() {
if ( !selectionTree.isDisposed() ) {
props.setLook( selectionTree );
}
props.setLook( tabfolder.getSwtTabset(), Props.WIDGET_STYLE_TAB );
refreshTree();
refreshGraph();
}
public void undoAction( UndoInterface undoInterface ) {
if ( undoInterface == null ) {
return;
}
TransAction ta = undoInterface.previousUndo();
if ( ta == null ) {
return;
}
setUndoMenu( undoInterface ); // something changed: change the menu
if ( undoInterface instanceof TransMeta ) {
delegates.trans.undoTransformationAction( (TransMeta) undoInterface, ta );
}
if ( undoInterface instanceof JobMeta ) {
delegates.jobs.undoJobAction( (JobMeta) undoInterface, ta );
}
// Put what we undo in focus
if ( undoInterface instanceof TransMeta ) {
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( (TransMeta) undoInterface );
transGraph.forceFocus();
}
if ( undoInterface instanceof JobMeta ) {
JobGraph jobGraph = delegates.jobs.findJobGraphOfJob( (JobMeta) undoInterface );
jobGraph.forceFocus();
}
}
public void redoAction( UndoInterface undoInterface ) {
if ( undoInterface == null ) {
return;
}
TransAction ta = undoInterface.nextUndo();
if ( ta == null ) {
return;
}
setUndoMenu( undoInterface ); // something changed: change the menu
if ( undoInterface instanceof TransMeta ) {
delegates.trans.redoTransformationAction( (TransMeta) undoInterface, ta );
}
if ( undoInterface instanceof JobMeta ) {
delegates.jobs.redoJobAction( (JobMeta) undoInterface, ta );
}
// Put what we redo in focus
if ( undoInterface instanceof TransMeta ) {
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( (TransMeta) undoInterface );
transGraph.forceFocus();
}
if ( undoInterface instanceof JobMeta ) {
JobGraph jobGraph = delegates.jobs.findJobGraphOfJob( (JobMeta) undoInterface );
jobGraph.forceFocus();
}
}
/**
* Sets the text and enabled settings for the undo and redo menu items
*
* @param undoInterface
* the object which holds the undo/redo information
*/
public void setUndoMenu( UndoInterface undoInterface ) {
if ( shell.isDisposed() ) {
return;
}
TransAction prev = undoInterface != null ? undoInterface.viewThisUndo() : null;
TransAction next = undoInterface != null ? undoInterface.viewNextUndo() : null;
// Set the menubar text and enabled flags
XulMenuitem item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( UNDO_MENU_ITEM );
item.setLabel( prev == null ? UNDO_UNAVAILABLE : BaseMessages.getString(
PKG, "Spoon.Menu.Undo.Available", prev.toString() ) );
item.setDisabled( prev == null );
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( REDO_MENU_ITEM );
item.setLabel( next == null ? REDO_UNAVAILABLE : BaseMessages.getString(
PKG, "Spoon.Menu.Redo.Available", next.toString() ) );
item.setDisabled( next == null );
}
public void addUndoNew( UndoInterface undoInterface, Object[] obj, int[] position ) {
addUndoNew( undoInterface, obj, position, false );
}
public void addUndoNew( UndoInterface undoInterface, Object[] obj, int[] position, boolean nextAlso ) {
undoInterface.addUndo( obj, null, position, null, null, TransMeta.TYPE_UNDO_NEW, nextAlso );
setUndoMenu( undoInterface );
}
// Undo delete object
public void addUndoDelete( UndoInterface undoInterface, Object[] obj, int[] position ) {
addUndoDelete( undoInterface, obj, position, false );
}
// Undo delete object
public void addUndoDelete( UndoInterface undoInterface, Object[] obj, int[] position, boolean nextAlso ) {
undoInterface.addUndo( obj, null, position, null, null, TransMeta.TYPE_UNDO_DELETE, nextAlso );
setUndoMenu( undoInterface );
}
// Change of step, connection, hop or note...
public void addUndoPosition( UndoInterface undoInterface, Object[] obj, int[] pos, Point[] prev, Point[] curr ) {
// It's better to store the indexes of the objects, not the objects
// itself!
undoInterface.addUndo( obj, null, pos, prev, curr, JobMeta.TYPE_UNDO_POSITION, false );
setUndoMenu( undoInterface );
}
// Change of step, connection, hop or note...
public void addUndoChange( UndoInterface undoInterface, Object[] from, Object[] to, int[] pos ) {
addUndoChange( undoInterface, from, to, pos, false );
}
// Change of step, connection, hop or note...
public void addUndoChange( UndoInterface undoInterface, Object[] from, Object[] to, int[] pos, boolean nextAlso ) {
undoInterface.addUndo( from, to, pos, null, null, JobMeta.TYPE_UNDO_CHANGE, nextAlso );
setUndoMenu( undoInterface );
}
/**
* Checks *all* the steps in the transformation, puts the result in remarks list
*/
public void checkTrans( TransMeta transMeta ) {
checkTrans( transMeta, false );
}
/**
* Check the steps in a transformation
*
* @param only_selected
* True: Check only the selected steps...
*/
public void checkTrans( TransMeta transMeta, boolean only_selected ) {
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
CheckTransProgressDialog ctpd =
new CheckTransProgressDialog( shell, transMeta, transGraph.getRemarks(), only_selected );
ctpd.open(); // manages the remarks arraylist...
showLastTransCheck();
}
/**
* Show the remarks of the last transformation check that was run.
*
* @see #checkTrans()
*/
public void showLastTransCheck() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
CheckResultDialog crd = new CheckResultDialog( transMeta, shell, SWT.NONE, transGraph.getRemarks() );
String stepName = crd.open();
if ( stepName != null ) {
// Go to the indicated step!
StepMeta stepMeta = transMeta.findStep( stepName );
if ( stepMeta != null ) {
delegates.steps.editStep( transMeta, stepMeta );
}
}
}
public void analyseImpact( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
AnalyseImpactProgressDialog aipd = new AnalyseImpactProgressDialog( shell, transMeta, transGraph.getImpact() );
transGraph.setImpactFinished( aipd.open() );
if ( transGraph.isImpactFinished() ) {
showLastImpactAnalyses( transMeta );
}
}
public void showLastImpactAnalyses( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
List<Object[]> rows = new ArrayList<Object[]>();
RowMetaInterface rowMeta = null;
for ( int i = 0; i < transGraph.getImpact().size(); i++ ) {
DatabaseImpact ii = transGraph.getImpact().get( i );
RowMetaAndData row = ii.getRow();
rowMeta = row.getRowMeta();
rows.add( row.getData() );
}
if ( rows.size() > 0 ) {
// Display all the rows...
PreviewRowsDialog prd =
new PreviewRowsDialog( shell, Variables.getADefaultVariableSpace(), SWT.NONE, "-", rowMeta, rows );
prd.setTitleMessage(
// "Impact analyses"
// "Result of analyses:"
BaseMessages.getString( PKG, "Spoon.Dialog.ImpactAnalyses.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ImpactAnalyses.Message" ) );
prd.open();
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
if ( transGraph.isImpactFinished() ) {
// "As far as I can tell, this transformation has no impact on any database."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.TransformationNoImpactOnDatabase.Message" ) );
} else {
// "Please run the impact analyses first on this transformation."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.RunImpactAnalysesFirst.Message" ) );
}
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ImpactAnalyses.Title" ) ); // Impact
mb.open();
}
}
public void toClipboard( String clipText ) {
try {
GUIResource.getInstance().toClipboard( clipText );
} catch ( Throwable e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ExceptionCopyToClipboard.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ExceptionCopyToClipboard.Message" ), e );
}
}
public String fromClipboard() {
try {
return GUIResource.getInstance().fromClipboard();
} catch ( Throwable e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ExceptionPasteFromClipboard.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ExceptionPasteFromClipboard.Message" ), e );
return null;
}
}
/**
* Paste transformation from the clipboard...
*
*/
public void pasteTransformation() {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
if ( log.isDetailed() ) {
// "Paste transformation from the clipboard!"
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.PasteTransformationFromClipboard" ) );
}
String xml = fromClipboard();
try {
Document doc = XMLHandler.loadXMLString( xml );
TransMeta transMeta = new TransMeta( XMLHandler.getSubNode( doc, TransMeta.XML_TAG ), rep );
setTransMetaVariables( transMeta );
addTransGraph( transMeta ); // create a new tab
sharedObjectsFileMap.put( transMeta.getSharedObjects().getFilename(), transMeta.getSharedObjects() );
refreshGraph();
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorPastingTransformation.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorPastingTransformation.Message" ), e );
}
}
/**
* Paste job from the clipboard...
*
*/
public void pasteJob() {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
String xml = fromClipboard();
try {
Document doc = XMLHandler.loadXMLString( xml );
JobMeta jobMeta = new JobMeta( XMLHandler.getSubNode( doc, JobMeta.XML_TAG ), rep, this );
addJobGraph( jobMeta ); // create a new tab
refreshGraph();
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog( shell,
// Error pasting transformation
// "An error occurred pasting a transformation from the clipboard"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorPastingJob.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ErrorPastingJob.Message" ), e );
}
}
public void copyTransformation( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
try {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
toClipboard( XMLHandler.getXMLHeader() + transMeta.getXML() );
} catch ( Exception ex ) {
new ErrorDialog( getShell(), "Error", "Error encoding to XML", ex );
}
}
public void copyJob( JobMeta jobMeta ) {
if ( jobMeta == null ) {
return;
}
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
toClipboard( XMLHandler.getXMLHeader() + jobMeta.getXML() );
}
public void copyTransformationImage( TransMeta transMeta ) {
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
Clipboard clipboard = GUIResource.getInstance().getNewClipboard();
Point area = transMeta.getMaximum();
Image image = transGraph.getTransformationImage( Display.getCurrent(), area.x, area.y, 1.0f );
clipboard.setContents(
new Object[] { image.getImageData() }, new Transfer[] { ImageDataTransfer.getInstance() } );
}
/**
* @return Either a TransMeta or JobMeta object
*/
public HasDatabasesInterface getActiveHasDatabasesInterface() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
return transMeta;
}
return getActiveJob();
}
/**
* Shows a wizard that creates a new database connection...
*
*/
public void createDatabaseWizard() {
HasDatabasesInterface hasDatabasesInterface = getActiveHasDatabasesInterface();
if ( hasDatabasesInterface == null ) {
return; // nowhere to put the new database
}
CreateDatabaseWizard cdw = new CreateDatabaseWizard();
DatabaseMeta newDBInfo = cdw.createAndRunDatabaseWizard( shell, props, hasDatabasesInterface.getDatabases() );
if ( newDBInfo != null ) { // finished
hasDatabasesInterface.addDatabase( newDBInfo );
refreshTree();
refreshGraph();
}
}
public List<DatabaseMeta> getActiveDatabases() {
Map<String, DatabaseMeta> map = new Hashtable<String, DatabaseMeta>();
HasDatabasesInterface hasDatabasesInterface = getActiveHasDatabasesInterface();
if ( hasDatabasesInterface != null ) {
for ( int i = 0; i < hasDatabasesInterface.nrDatabases(); i++ ) {
map.put( hasDatabasesInterface.getDatabase( i ).getName(), hasDatabasesInterface.getDatabase( i ) );
}
}
if ( rep != null ) {
try {
List<DatabaseMeta> repDBs = rep.readDatabases();
for ( DatabaseMeta databaseMeta : repDBs ) {
map.put( databaseMeta.getName(), databaseMeta );
}
} catch ( Exception e ) {
log.logError( "Unexpected error reading databases from the repository: " + e.toString() );
log.logError( Const.getStackTracker( e ) );
}
}
List<DatabaseMeta> databases = new ArrayList<DatabaseMeta>();
databases.addAll( map.values() );
return databases;
}
/**
* Create a transformation that extracts tables & data from a database.
* <p>
* <p>
*
* 0) Select the database to rip
* <p>
* 1) Select the table in the database to copy
* <p>
* 2) Select the database to dump to
* <p>
* 3) Select the repository directory in which it will end up
* <p>
* 4) Select a name for the new transformation
* <p>
* 6) Create 1 transformation for the selected table
* <p>
*/
public void copyTableWizard() {
List<DatabaseMeta> databases = getActiveDatabases();
if ( databases.size() == 0 ) {
return; // Nothing to do here
}
final CopyTableWizardPage1 page1 = new CopyTableWizardPage1( "1", databases );
page1.createControl( shell );
final CopyTableWizardPage2 page2 = new CopyTableWizardPage2( "2" );
page2.createControl( shell );
Wizard wizard = new Wizard() {
public boolean performFinish() {
return delegates.db.copyTable( page1.getSourceDatabase(), page1.getTargetDatabase(), page2.getSelection() );
}
/**
* @see org.eclipse.jface.wizard.Wizard#canFinish()
*/
public boolean canFinish() {
return page2.canFinish();
}
};
wizard.addPage( page1 );
wizard.addPage( page2 );
WizardDialog wd = new WizardDialog( shell, wizard );
WizardDialog.setDefaultImage( GUIResource.getInstance().getImageWizard() );
wd.setMinimumPageSize( 700, 400 );
wd.updateSize();
wd.open();
}
public String toString() {
return APP_NAME;
}
public void selectRep( CommandLineOption[] options ) {
RepositoryMeta repositoryMeta;
StringBuffer optionRepname = getCommandLineOption( options, "rep" ).getArgument();
StringBuffer optionFilename = getCommandLineOption( options, "file" ).getArgument();
StringBuffer optionUsername = getCommandLineOption( options, "user" ).getArgument();
StringBuffer optionPassword = getCommandLineOption( options, "pass" ).getArgument();
if ( Const.isEmpty( optionRepname )
&& Const.isEmpty( optionFilename ) && props.showRepositoriesDialogAtStartup() ) {
if ( log.isBasic() ) {
// "Asking for repository"
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.AskingForRepository" ) );
}
loginDialog = new RepositoriesDialog( shell, null, new ILoginCallback() {
public void onSuccess( Repository repository ) {
setRepository( repository );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CONNECTED );
}
public void onError( Throwable t ) {
onLoginError( t );
}
public void onCancel() {
// do nothing
}
} );
hideSplash();
loginDialog.show();
showSplash();
} else if ( !Const.isEmpty( optionRepname ) && Const.isEmpty( optionFilename ) ) {
RepositoriesMeta repsInfo = new RepositoriesMeta();
try {
repsInfo.readData();
repositoryMeta = repsInfo.findRepository( optionRepname.toString() );
if ( repositoryMeta != null && !Const.isEmpty( optionUsername ) && !Const.isEmpty( optionPassword ) ) {
// Define and connect to the repository...
Repository repo =
PluginRegistry
.getInstance().loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class );
repo.init( repositoryMeta );
repo.connect( optionUsername != null ? optionUsername.toString() : null, optionPassword != null
? optionPassword.toString() : null );
setRepository( repo );
} else {
if ( !Const.isEmpty( optionUsername ) && !Const.isEmpty( optionPassword ) ) {
String msg = BaseMessages.getString( PKG, "Spoon.Log.NoRepositoriesDefined" );
log.logError( msg ); // "No repositories defined on this system."
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Error.Repository.NotFound", optionRepname
.toString() ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Error.Repository.NotFound.Title" ) );
mb.open();
}
loginDialog = new RepositoriesDialog( shell, null, new ILoginCallback() {
public void onSuccess( Repository repository ) {
setRepository( repository );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CONNECTED );
}
public void onError( Throwable t ) {
onLoginError( t );
}
public void onCancel() {
// TODO Auto-generated method stub
}
} );
hideSplash();
loginDialog.show();
showSplash();
}
} catch ( Exception e ) {
hideSplash();
// Eat the exception but log it...
log.logError( "Error reading repositories xml file", e );
}
}
}
public void handleStartOptions( CommandLineOption[] options ) {
// note that at this point the rep object is populated by previous calls
StringBuffer optionRepname = getCommandLineOption( options, "rep" ).getArgument();
StringBuffer optionFilename = getCommandLineOption( options, "file" ).getArgument();
StringBuffer optionDirname = getCommandLineOption( options, "dir" ).getArgument();
StringBuffer optionTransname = getCommandLineOption( options, "trans" ).getArgument();
StringBuffer optionJobname = getCommandLineOption( options, "job" ).getArgument();
// StringBuffer optionUsername = getCommandLineOption(options,
// "user").getArgument();
// StringBuffer optionPassword = getCommandLineOption(options,
// "pass").getArgument();
try {
// Read kettle transformation specified on command-line?
if ( !Const.isEmpty( optionRepname ) || !Const.isEmpty( optionFilename ) ) {
if ( !Const.isEmpty( optionRepname ) ) {
if ( rep != null ) {
if ( Const.isEmpty( optionDirname ) ) {
optionDirname = new StringBuffer( RepositoryDirectory.DIRECTORY_SEPARATOR );
}
// Options /file, /job and /trans are mutually
// exclusive
int t =
( Const.isEmpty( optionFilename ) ? 0 : 1 )
+ ( Const.isEmpty( optionJobname ) ? 0 : 1 ) + ( Const.isEmpty( optionTransname ) ? 0 : 1 );
if ( t > 1 ) {
// "More then one mutually exclusive options /file, /job and /trans are specified."
log.logError( BaseMessages.getString( PKG, "Spoon.Log.MutuallyExcusive" ) );
} else if ( t == 1 ) {
if ( !Const.isEmpty( optionFilename ) ) {
openFile( optionFilename.toString(), false );
} else {
// OK, if we have a specified job or
// transformation, try to load it...
// If not, keep the repository logged
// in.
RepositoryDirectoryInterface rdi = rep.findDirectory( optionDirname.toString() );
if ( rdi == null ) {
log.logError( BaseMessages.getString( PKG, "Spoon.Log.UnableFindDirectory", optionDirname
.toString() ) ); // "Can't find directory ["+dirname+"] in the repository."
} else {
if ( !Const.isEmpty( optionTransname ) ) {
TransMeta transMeta =
rep.loadTransformation( optionTransname.toString(), rdi, null, true, null ); // reads
// last
// version
transMeta.clearChanged();
transMeta.setInternalKettleVariables();
addTransGraph( transMeta );
} else {
// Try to load a specified job
// if any
JobMeta jobMeta = rep.loadJob( optionJobname.toString(), rdi, null, null ); // reads
// last
// version
jobMeta.clearChanged();
jobMeta.setInternalKettleVariables();
addJobGraph( jobMeta );
}
}
}
}
} else {
// "No repositories defined on this system."
log.logError( BaseMessages.getString( PKG, "Spoon.Log.NoRepositoriesDefined" ) );
}
} else if ( !Const.isEmpty( optionFilename ) ) {
openFile( optionFilename.toString(), false );
}
}
} catch ( KettleException ke ) {
hideSplash();
log.logError( BaseMessages.getString( PKG, "Spoon.Log.ErrorOccurred" ) + Const.CR + ke.getMessage() );
log.logError( Const.getStackTracker( ke ) );
// do not just eat the exception
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Log.ErrorOccurred" ), BaseMessages.getString(
PKG, "Spoon.Log.ErrorOccurred" )
+ Const.CR + ke.getMessage(), ke );
rep = null;
}
}
private void loadLastUsedFiles() {
if ( props.openLastFile() ) {
if ( log.isDetailed() ) {
// "Trying to open the last file used."
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.TryingOpenLastUsedFile" ) );
}
List<LastUsedFile> lastUsedFiles = props.getOpenTabFiles();
for ( LastUsedFile lastUsedFile : lastUsedFiles ) {
try {
if ( !lastUsedFile.isSourceRepository()
|| lastUsedFile.isSourceRepository() && rep != null
&& rep.getName().equals( lastUsedFile.getRepositoryName() ) ) {
loadLastUsedFile( lastUsedFile, rep == null ? null : rep.getName(), false );
}
} catch ( Exception e ) {
hideSplash();
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.LoadLastUsedFile.Exception.Title" ), BaseMessages
.getString( PKG, "Spoon.LoadLastUsedFile.Exception.Message", lastUsedFile.toString() ), e );
}
}
}
}
public void start( CommandLineOption[] options ) throws KettleException {
// Show the repository connection dialog
//
selectRep( options );
// Read the start option parameters
//
handleStartOptions( options );
// Load the last loaded files
//
loadLastUsedFiles();
// Enable menus based on whether user was able to login or not
//
enableMenus();
// enable perspective switching
SpoonPerspectiveManager.getInstance().setForcePerspective( false );
if ( props.showTips() ) {
TipsDialog tip = new TipsDialog( shell );
hideSplash();
tip.open();
}
if ( splash != null ) {
splash.dispose();
splash = null;
}
// If we are a MILESTONE or RELEASE_CANDIDATE
if ( !ValueMeta.convertStringToBoolean( System.getProperty( "KETTLE_HIDE_DEVELOPMENT_VERSION_WARNING", "N" ) )
&& Const.RELEASE.equals( Const.ReleaseType.MILESTONE ) ) {
// display the same warning message
MessageBox dialog = new MessageBox( shell, SWT.ICON_WARNING );
dialog.setText( BaseMessages.getString( PKG, "Spoon.Warning.DevelopmentRelease.Title" ) );
dialog.setMessage( BaseMessages.getString(
PKG, "Spoon.Warning.DevelopmentRelease.Message", Const.CR, BuildVersion.getInstance().getVersion() ) );
dialog.open();
}
}
private void waitForDispose() {
boolean retryAfterError; // Enable the user to retry and
// continue after fatal error
do {
retryAfterError = false; // reset to false after error otherwise
// it will loop forever after
// closing Spoon
try {
while ( getShell() != null && !getShell().isDisposed() ) {
if ( !readAndDispatch() ) {
sleep();
}
}
} catch ( Throwable e ) {
// "An unexpected error occurred in Spoon: probable cause: please close all windows before stopping Spoon! "
log.logError( BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorOccurred" )
+ Const.CR + e.getMessage() );
log.logError( Const.getStackTracker( e ) );
try {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorOccurred" ), BaseMessages
.getString( PKG, "Spoon.Log.UnexpectedErrorOccurred" )
+ Const.CR + e.getMessage(), e );
// Retry dialog
MessageBox mb = new MessageBox( shell, SWT.ICON_QUESTION | SWT.NO | SWT.YES );
mb.setText( BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorRetry.Titel" ) );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorRetry.Message" ) );
if ( mb.open() == SWT.YES ) {
retryAfterError = true;
}
} catch ( Throwable e1 ) {
// When the opening of a dialog crashed, we can not do
// anything more here
}
}
} while ( retryAfterError );
if ( !display.isDisposed() ) {
display.update();
}
dispose();
if ( log.isBasic() ) {
log.logBasic( APP_NAME + " " + BaseMessages.getString( PKG, "Spoon.Log.AppHasEnded" ) ); // " has ended."
}
// Close the logfile
if ( fileLoggingEventListener != null ) {
try {
fileLoggingEventListener.close();
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Error closing logging file", e );
}
KettleLogStore.getAppender().removeLoggingEventListener( fileLoggingEventListener );
}
}
// public Splash splash;
// public CommandLineOption options[];
public static CommandLineOption getCommandLineOption( CommandLineOption[] options, String opt ) {
for ( CommandLineOption option : options ) {
if ( option.getOption().equals( opt ) ) {
return option;
}
}
return null;
}
public static CommandLineOption[] getCommandLineArgs( List<String> args ) {
CommandLineOption[] clOptions =
new CommandLineOption[] {
new CommandLineOption( "rep", "Repository name", new StringBuffer() ),
new CommandLineOption( "user", "Repository username", new StringBuffer() ),
new CommandLineOption( "pass", "Repository password", new StringBuffer() ),
new CommandLineOption( "job", "The name of the job to launch", new StringBuffer() ),
new CommandLineOption( "trans", "The name of the transformation to launch", new StringBuffer() ),
new CommandLineOption( "dir", "The directory (don't forget the leading /)", new StringBuffer() ),
new CommandLineOption( "file", "The filename (Transformation in XML) to launch", new StringBuffer() ),
new CommandLineOption(
"level", "The logging level (Basic, Detailed, Debug, Rowlevel, Error, Nothing)",
new StringBuffer() ),
new CommandLineOption( "logfile", "The logging file to write to", new StringBuffer() ),
new CommandLineOption(
"log", "The logging file to write to (deprecated)", new StringBuffer(), false, true ),
new CommandLineOption( "perspective", "The perspective to start in", new StringBuffer(), false, true ) };
// start with the default logger until we find out otherwise
//
log = new LogChannel( APP_NAME );
// Parse the options...
if ( !CommandLineOption.parseArguments( args, clOptions, log ) ) {
log.logError( "Command line option not understood" );
System.exit( 8 );
}
String kettleRepname = Const.getEnvironmentVariable( "KETTLE_REPOSITORY", null );
String kettleUsername = Const.getEnvironmentVariable( "KETTLE_USER", null );
String kettlePassword = Const.getEnvironmentVariable( "KETTLE_PASSWORD", null );
if ( !Const.isEmpty( kettleRepname ) ) {
clOptions[0].setArgument( new StringBuffer( kettleRepname ) );
}
if ( !Const.isEmpty( kettleUsername ) ) {
clOptions[1].setArgument( new StringBuffer( kettleUsername ) );
}
if ( !Const.isEmpty( kettlePassword ) ) {
clOptions[2].setArgument( new StringBuffer( kettlePassword ) );
}
return clOptions;
}
private void loadLastUsedFile( LastUsedFile lastUsedFile, String repositoryName ) throws KettleException {
loadLastUsedFile( lastUsedFile, repositoryName, true );
}
private void loadLastUsedFile( LastUsedFile lastUsedFile, String repositoryName, boolean trackIt ) throws KettleException {
boolean useRepository = repositoryName != null;
// Perhaps we need to connect to the repository?
//
if ( lastUsedFile.isSourceRepository() ) {
if ( !Const.isEmpty( lastUsedFile.getRepositoryName() ) ) {
if ( useRepository && !lastUsedFile.getRepositoryName().equalsIgnoreCase( repositoryName ) ) {
// We just asked...
useRepository = false;
}
}
}
if ( useRepository && lastUsedFile.isSourceRepository() ) {
if ( rep != null ) { // load from this repository...
if ( rep.getName().equalsIgnoreCase( lastUsedFile.getRepositoryName() ) ) {
RepositoryDirectoryInterface rdi = rep.findDirectory( lastUsedFile.getDirectory() );
if ( rdi != null ) {
// Are we loading a transformation or a job?
if ( lastUsedFile.isTransformation() ) {
if ( log.isDetailed() ) {
// "Auto loading transformation ["+lastfiles[0]+"] from repository directory ["+lastdirs[0]+"]"
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.AutoLoadingTransformation", lastUsedFile
.getFilename(), lastUsedFile.getDirectory() ) );
}
TransLoadProgressDialog tlpd =
new TransLoadProgressDialog( shell, rep, lastUsedFile.getFilename(), rdi, null );
TransMeta transMeta = tlpd.open();
if ( transMeta != null ) {
if ( trackIt ) {
props.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, lastUsedFile.getFilename(), rdi
.getPath(), true, rep.getName() );
}
// transMeta.setFilename(lastUsedFile.getFilename());
transMeta.clearChanged();
addTransGraph( transMeta );
refreshTree();
}
} else if ( lastUsedFile.isJob() ) {
JobLoadProgressDialog progressDialog =
new JobLoadProgressDialog( shell, rep, lastUsedFile.getFilename(), rdi, null );
JobMeta jobMeta = progressDialog.open();
if ( jobMeta != null ) {
if ( trackIt ) {
props.addLastFile(
LastUsedFile.FILE_TYPE_JOB, lastUsedFile.getFilename(), rdi.getPath(), true, rep
.getName() );
}
jobMeta.clearChanged();
addJobGraph( jobMeta );
}
}
refreshTree();
}
}
}
}
if ( !lastUsedFile.isSourceRepository() && !Const.isEmpty( lastUsedFile.getFilename() ) ) {
if ( lastUsedFile.isTransformation() ) {
openFile( lastUsedFile.getFilename(), false );
}
if ( lastUsedFile.isJob() ) {
openFile( lastUsedFile.getFilename(), false );
}
refreshTree();
}
}
/**
* Create a new SelectValues step in between this step and the previous. If the previous fields are not there, no
* mapping can be made, same with the required fields.
*
* @param stepMeta
* The target step to map against.
*/
// retry of required fields acquisition
public void generateFieldMapping( TransMeta transMeta, StepMeta stepMeta ) {
try {
if ( stepMeta != null ) {
StepMetaInterface smi = stepMeta.getStepMetaInterface();
RowMetaInterface targetFields = smi.getRequiredFields( transMeta );
RowMetaInterface sourceFields = transMeta.getPrevStepFields( stepMeta );
// Build the mapping: let the user decide!!
String[] source = sourceFields.getFieldNames();
for ( int i = 0; i < source.length; i++ ) {
ValueMetaInterface v = sourceFields.getValueMeta( i );
source[i] += EnterMappingDialog.STRING_ORIGIN_SEPARATOR + v.getOrigin() + ")";
}
String[] target = targetFields.getFieldNames();
EnterMappingDialog dialog = new EnterMappingDialog( shell, source, target );
List<SourceToTargetMapping> mappings = dialog.open();
if ( mappings != null ) {
// OK, so we now know which field maps where.
// This allows us to generate the mapping using a
// SelectValues Step...
SelectValuesMeta svm = new SelectValuesMeta();
svm.allocate( mappings.size(), 0, 0 );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < mappings.size(); i++ ) {
SourceToTargetMapping mapping = mappings.get( i );
svm.getSelectName()[i] = sourceFields.getValueMeta( mapping.getSourcePosition() ).getName();
svm.getSelectRename()[i] = target[mapping.getTargetPosition()];
svm.getSelectLength()[i] = -1;
svm.getSelectPrecision()[i] = -1;
}
// a new comment. Sincerely yours CO ;)
// Now that we have the meta-data, create a new step info object
String stepName = stepMeta.getName() + " Mapping";
stepName = transMeta.getAlternativeStepname( stepName ); // if
// it's already there, rename it.
StepMeta newStep = new StepMeta( "SelectValues", stepName, svm );
newStep.setLocation( stepMeta.getLocation().x + 20, stepMeta.getLocation().y + 20 );
newStep.setDraw( true );
transMeta.addStep( newStep );
addUndoNew( transMeta, new StepMeta[] { newStep }, new int[] { transMeta.indexOfStep( newStep ) } );
// Redraw stuff...
refreshTree();
refreshGraph();
}
} else {
throw new KettleException( "There is no target to do a field mapping against!" );
}
} catch ( KettleException e ) {
new ErrorDialog(
shell, "Error creating mapping",
"There was an error when Kettle tried to generate a field mapping against the target step", e );
}
}
public void editPartitioning( TransMeta transMeta, StepMeta stepMeta ) {
// Before we start, check if there are any partition schemas defined...
//
String[] schemaNames = transMeta.getPartitionSchemasNames();
if ( schemaNames.length == 0 ) {
MessageBox box = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
box.setText( "Create a partition schema" );
box.setMessage( "You first need to create one or more partition schemas in "
+ "the transformation settings dialog before you can select one!" );
box.open();
return;
}
StepPartitioningMeta stepPartitioningMeta = stepMeta.getStepPartitioningMeta();
if ( stepPartitioningMeta == null ) {
stepPartitioningMeta = new StepPartitioningMeta();
}
StepMeta before = (StepMeta) stepMeta.clone();
PluginRegistry registry = PluginRegistry.getInstance();
List<PluginInterface> plugins = registry.getPlugins( PartitionerPluginType.class );
String[] options = new String[StepPartitioningMeta.methodDescriptions.length + plugins.size()];
String[] codes = new String[StepPartitioningMeta.methodDescriptions.length + plugins.size()];
System.arraycopy(
StepPartitioningMeta.methodDescriptions, 0, options, 0, StepPartitioningMeta.methodDescriptions.length );
System.arraycopy( StepPartitioningMeta.methodCodes, 0, codes, 0, StepPartitioningMeta.methodCodes.length );
Iterator<PluginInterface> it = plugins.iterator();
int idx = 0;
while ( it.hasNext() ) {
PluginInterface entry = it.next();
options[StepPartitioningMeta.methodDescriptions.length + idx] = entry.getDescription();
codes[StepPartitioningMeta.methodCodes.length + idx] = entry.getIds()[0];
idx++;
}
for ( int i = 0; i < codes.length; i++ ) {
if ( codes[i].equals( stepPartitioningMeta.getMethod() ) ) {
idx = i;
break;
}
}
EnterSelectionDialog dialog =
new EnterSelectionDialog( shell, options, "Partioning method", "Select the partitioning method" );
String methodDescription = dialog.open( idx );
if ( methodDescription != null ) {
String method = StepPartitioningMeta.methodCodes[StepPartitioningMeta.PARTITIONING_METHOD_NONE];
for ( int i = 0; i < options.length; i++ ) {
if ( options[i].equals( methodDescription ) ) {
method = codes[i];
}
}
try {
int methodType = StepPartitioningMeta.getMethodType( method );
stepPartitioningMeta.setMethodType( methodType );
stepPartitioningMeta.setMethod( method );
switch ( methodType ) {
case StepPartitioningMeta.PARTITIONING_METHOD_NONE:
break;
case StepPartitioningMeta.PARTITIONING_METHOD_MIRROR:
case StepPartitioningMeta.PARTITIONING_METHOD_SPECIAL:
// Set the partitioning schema too.
PartitionSchema partitionSchema = stepPartitioningMeta.getPartitionSchema();
idx = -1;
if ( partitionSchema != null ) {
idx = Const.indexOfString( partitionSchema.getName(), schemaNames );
}
EnterSelectionDialog askSchema =
new EnterSelectionDialog(
shell, schemaNames, "Select a partition schema", "Select the partition schema to use:" );
String schemaName = askSchema.open( idx );
if ( schemaName != null ) {
idx = Const.indexOfString( schemaName, schemaNames );
stepPartitioningMeta.setPartitionSchema( transMeta.getPartitionSchemas().get( idx ) );
}
if ( methodType == StepPartitioningMeta.PARTITIONING_METHOD_SPECIAL ) {
// ask for a field name
StepDialogInterface partitionerDialog;
try {
partitionerDialog =
delegates.steps.getPartitionerDialog( stepMeta, stepPartitioningMeta, transMeta );
partitionerDialog.open();
} catch ( Exception e ) {
new ErrorDialog(
shell, "Error",
"There was an unexpected error while editing the partitioning method specifics:", e );
}
}
break;
default:
break;
}
StepMeta after = (StepMeta) stepMeta.clone();
addUndoChange( transMeta, new StepMeta[] { before }, new StepMeta[] { after }, new int[] { transMeta
.indexOfStep( stepMeta ) } );
refreshGraph();
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.ErrorEditingStepPartitioning.Title" ), BaseMessages
.getString( PKG, "Spoon.ErrorEditingStepPartitioning.Message" ), e );
}
}
}
/**
* Select a clustering schema for this step.
*
* @param stepMeta
* The step to set the clustering schema for.
*/
public void editClustering( TransMeta transMeta, StepMeta stepMeta ) {
List<StepMeta> stepMetas = new ArrayList<StepMeta>();
stepMetas.add( stepMeta );
editClustering( transMeta, stepMetas );
}
/**
* Select a clustering schema for this step.
*
* @param stepMetas
* The steps (at least one!) to set the clustering schema for.
*/
public void editClustering( TransMeta transMeta, List<StepMeta> stepMetas ) {
StepMeta stepMeta = stepMetas.get( 0 );
int idx = -1;
if ( stepMeta.getClusterSchema() != null ) {
idx = transMeta.getClusterSchemas().indexOf( stepMeta.getClusterSchema() );
}
String[] clusterSchemaNames = transMeta.getClusterSchemaNames();
EnterSelectionDialog dialog =
new EnterSelectionDialog(
shell, clusterSchemaNames, "Cluster schema", "Select the cluster schema to use (cancel=clear)" );
String schemaName = dialog.open( idx );
if ( schemaName == null ) {
for ( StepMeta step : stepMetas ) {
step.setClusterSchema( null );
}
} else {
ClusterSchema clusterSchema = transMeta.findClusterSchema( schemaName );
for ( StepMeta step : stepMetas ) {
step.setClusterSchema( clusterSchema );
}
}
refreshTree();
refreshGraph();
}
public void createKettleArchive( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
JarfileGenerator.generateJarFile( transMeta );
}
/**
* This creates a new partitioning schema, edits it and adds it to the transformation metadata
*
*/
public void newPartitioningSchema( TransMeta transMeta ) {
PartitionSchema partitionSchema = new PartitionSchema();
PartitionSchemaDialog dialog =
new PartitionSchemaDialog( shell, partitionSchema, transMeta.getDatabases(), transMeta );
if ( dialog.open() ) {
transMeta.getPartitionSchemas().add( partitionSchema );
if ( rep != null ) {
try {
if ( !rep.getSecurityProvider().isReadOnly() ) {
rep.save( partitionSchema, Const.VERSION_COMMENT_INITIAL_VERSION, null );
} else {
throw new KettleException( BaseMessages.getString(
PKG, "Spoon.Dialog.Exception.ReadOnlyRepositoryUser" ) );
}
} catch ( KettleException e ) {
new ErrorDialog(
getShell(), BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingPartition.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorSavingPartition.Message", partitionSchema.getName() ), e );
}
}
refreshTree();
}
}
private void editPartitionSchema( TransMeta transMeta, PartitionSchema partitionSchema ) {
PartitionSchemaDialog dialog =
new PartitionSchemaDialog( shell, partitionSchema, transMeta.getDatabases(), transMeta );
if ( dialog.open() ) {
refreshTree();
}
}
private void delPartitionSchema( TransMeta transMeta, PartitionSchema partitionSchema ) {
try {
if ( rep != null && partitionSchema.getObjectId() != null ) {
// remove the partition schema from the repository too...
rep.deletePartitionSchema( partitionSchema.getObjectId() );
}
int idx = transMeta.getPartitionSchemas().indexOf( partitionSchema );
transMeta.getPartitionSchemas().remove( idx );
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorDeletingClusterSchema.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorDeletingClusterSchema.Message" ), e );
}
}
/**
* This creates a new clustering schema, edits it and adds it to the transformation metadata
*
*/
public void newClusteringSchema( TransMeta transMeta ) {
ClusterSchema clusterSchema = new ClusterSchema();
ClusterSchemaDialog dialog = new ClusterSchemaDialog( shell, clusterSchema, transMeta.getSlaveServers() );
if ( dialog.open() ) {
transMeta.getClusterSchemas().add( clusterSchema );
if ( rep != null ) {
try {
if ( !rep.getSecurityProvider().isReadOnly() ) {
rep.save( clusterSchema, Const.VERSION_COMMENT_INITIAL_VERSION, null );
} else {
throw new KettleException( BaseMessages.getString(
PKG, "Spoon.Dialog.Exception.ReadOnlyRepositoryUser" ) );
}
} catch ( KettleException e ) {
new ErrorDialog(
getShell(), BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingCluster.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorSavingCluster.Message", clusterSchema.getName() ), e );
}
}
refreshTree();
}
}
private void editClusterSchema( TransMeta transMeta, ClusterSchema clusterSchema ) {
ClusterSchemaDialog dialog = new ClusterSchemaDialog( shell, clusterSchema, transMeta.getSlaveServers() );
if ( dialog.open() ) {
refreshTree();
}
}
private void delClusterSchema( TransMeta transMeta, ClusterSchema clusterSchema ) {
try {
if ( rep != null && clusterSchema.getObjectId() != null ) {
// remove the partition schema from the repository too...
rep.deleteClusterSchema( clusterSchema.getObjectId() );
}
int idx = transMeta.getClusterSchemas().indexOf( clusterSchema );
transMeta.getClusterSchemas().remove( idx );
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorDeletingPartitionSchema.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorDeletingPartitionSchema.Message" ), e );
}
}
/**
* This creates a slave server, edits it and adds it to the transformation metadata
*
*/
public void newSlaveServer( HasSlaveServersInterface hasSlaveServersInterface ) {
delegates.slaves.newSlaveServer( hasSlaveServersInterface );
}
public void delSlaveServer( HasSlaveServersInterface hasSlaveServersInterface, SlaveServer slaveServer ) {
try {
delegates.slaves.delSlaveServer( hasSlaveServersInterface, slaveServer );
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorDeletingSlave.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorDeletingSlave.Message" ), e );
}
}
/**
* Sends transformation to slave server
*
* @param executionConfiguration
*/
public void sendTransformationXMLToSlaveServer( TransMeta transMeta,
TransExecutionConfiguration executionConfiguration ) {
try {
Trans.sendToSlaveServer( transMeta, executionConfiguration, rep, metaStore );
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error sending transformation to server", e );
}
}
public void runFile() {
executeFile( true, false, false, false, false, null, false );
}
public void replayTransformation() {
TransExecutionConfiguration tc = this.getTransExecutionConfiguration();
executeFile(
tc.isExecutingLocally(), tc.isExecutingRemotely(), tc.isExecutingClustered(), false, false, new Date(),
false );
}
public void previewFile() {
executeFile( true, false, false, true, false, null, true );
}
public void debugFile() {
executeFile( true, false, false, false, true, null, true );
}
public void executeFile( boolean local, boolean remote, boolean cluster, boolean preview, boolean debug,
Date replayDate, boolean safe ) {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
executeTransformation(
transMeta, local, remote, cluster, preview, debug, replayDate, safe, transExecutionConfiguration
.getLogLevel() );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
executeJob( jobMeta, local, remote, replayDate, safe, null, 0 );
}
}
public void executeTransformation( final TransMeta transMeta, final boolean local, final boolean remote,
final boolean cluster, final boolean preview, final boolean debug, final Date replayDate,
final boolean safe, final LogLevel logLevel ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
Thread thread = new Thread() {
public void run() {
getDisplay().asyncExec( new Runnable() {
public void run() {
try {
delegates.trans.executeTransformation(
transMeta, local, remote, cluster, preview, debug, replayDate, safe, logLevel );
} catch ( Exception e ) {
new ErrorDialog(
shell, "Execute transformation", "There was an error during transformation execution", e );
}
}
} );
}
};
thread.start();
}
public void executeJob( JobMeta jobMeta, boolean local, boolean remote, Date replayDate, boolean safe,
String startCopyName, int startCopyNr ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
try {
delegates.jobs.executeJob( jobMeta, local, remote, replayDate, safe, startCopyName, startCopyNr );
} catch ( Exception e ) {
new ErrorDialog( shell, "Execute job", "There was an error during job execution", e );
}
}
public void addSpoonSlave( SlaveServer slaveServer ) {
delegates.slaves.addSpoonSlave( slaveServer );
}
public void addJobHistory( JobMeta jobMeta, boolean select ) {
JobGraph activeJobGraph = getActiveJobGraph();
if ( activeJobGraph != null ) {
activeJobGraph.jobHistoryDelegate.addJobHistory();
}
// delegates.jobs.addJobHistory(jobMeta, select);
}
public void paste() {
String clipContent = fromClipboard();
if ( clipContent != null ) {
// Load the XML
//
try {
Document document = XMLHandler.loadXMLString( clipContent );
boolean transformation = XMLHandler.getSubNode( document, TransMeta.XML_TAG ) != null;
boolean job = XMLHandler.getSubNode( document, JobMeta.XML_TAG ) != null;
boolean steps = XMLHandler.getSubNode( document, Spoon.XML_TAG_TRANSFORMATION_STEPS ) != null;
boolean jobEntries = XMLHandler.getSubNode( document, Spoon.XML_TAG_JOB_JOB_ENTRIES ) != null;
if ( transformation ) {
pasteTransformation();
} else if ( job ) {
pasteJob();
} else if ( steps ) {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null && transGraph.getLastMove() != null ) {
pasteXML( transGraph.getManagedObject(), clipContent, transGraph.screen2real(
transGraph.getLastMove().x, transGraph.getLastMove().y ) );
}
} else if ( jobEntries ) {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null && jobGraph.getLastMove() != null ) {
pasteXML( jobGraph.getManagedObject(), clipContent, jobGraph.getLastMove() );
}
}
} catch ( KettleXMLException e ) {
log.logError( "Unable to paste", e );
}
}
}
public JobEntryCopy newJobEntry( JobMeta jobMeta, String typeDesc, boolean openit ) {
return delegates.jobs.newJobEntry( jobMeta, typeDesc, openit );
}
public JobEntryDialogInterface getJobEntryDialog( JobEntryInterface jei, JobMeta jobMeta ) {
return delegates.jobs.getJobEntryDialog( jei, jobMeta );
}
public StepDialogInterface getStepEntryDialog( StepMetaInterface stepMeta, TransMeta transMeta, String stepName ) {
try {
return delegates.steps.getStepDialog( stepMeta, transMeta, stepName );
} catch ( Throwable t ) {
log.logError( "Could not create dialog for " + stepMeta.getDialogClassName(), t );
}
return null;
}
public void editJobEntry( JobMeta jobMeta, JobEntryCopy je ) {
delegates.jobs.editJobEntry( jobMeta, je );
}
public void deleteJobEntryCopies( JobMeta jobMeta, JobEntryCopy jobEntry ) {
delegates.jobs.deleteJobEntryCopies( jobMeta, jobEntry );
}
public void pasteXML( JobMeta jobMeta, String clipContent, Point loc ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
delegates.jobs.pasteXML( jobMeta, clipContent, loc );
}
public void newJobHop( JobMeta jobMeta, JobEntryCopy fr, JobEntryCopy to ) {
delegates.jobs.newJobHop( jobMeta, fr, to );
}
/**
* Create a job that extracts tables & data from a database.
* <p>
* <p>
*
* 0) Select the database to rip
* <p>
* 1) Select the tables in the database to rip
* <p>
* 2) Select the database to dump to
* <p>
* 3) Select the repository directory in which it will end up
* <p>
* 4) Select a name for the new job
* <p>
* 5) Create an empty job with the selected name.
* <p>
* 6) Create 1 transformation for every selected table
* <p>
* 7) add every created transformation to the job & evaluate
* <p>
*
*/
public void ripDBWizard() {
delegates.jobs.ripDBWizard();
}
public JobMeta ripDB( final List<DatabaseMeta> databases, final String jobName,
final RepositoryDirectory repdir, final String directory, final DatabaseMeta sourceDbInfo,
final DatabaseMeta targetDbInfo, final String[] tables ) {
return delegates.jobs.ripDB( databases, jobName, repdir, directory, sourceDbInfo, targetDbInfo, tables );
}
/**
* Set the core object state.
*
* @param state state to set
*/
public void setCoreObjectsState( int state ) {
coreObjectsState = state;
}
/**
* Get the core object state.
*
* @return state.
*/
public int getCoreObjectsState() {
return coreObjectsState;
}
public LogChannelInterface getLog() {
return log;
}
public Repository getRepository() {
return rep;
}
public void setRepository( Repository rep ) {
this.rep = rep;
try {
// Keep one metastore here...
//
if ( metaStore.getMetaStoreList().size() > 1 ) {
metaStore.getMetaStoreList().remove( 0 );
metaStore.setActiveMetaStoreName( metaStore.getMetaStoreList().get( 0 ).getName() );
}
if ( rep != null ) {
this.capabilities = rep.getRepositoryMeta().getRepositoryCapabilities();
// add a wrapper metastore to the delegation
//
IMetaStore repositoryMetaStore = rep.getMetaStore();
if ( repositoryMetaStore != null ) {
metaStore.addMetaStore( 0, repositoryMetaStore ); // first priority for explicitly connected repositories.
metaStore.setActiveMetaStoreName( repositoryMetaStore.getName() );
log.logBasic( "Connected to metastore : "
+ repositoryMetaStore.getName() + ", added to delegating metastore" );
} else {
log.logBasic( "No metastore found in the repository : "
+ rep.getName() + ", connected? " + rep.isConnected() );
}
}
} catch ( MetaStoreException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorAddingRepositoryMetaStore.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message" ), e );
}
// Registering the UI Support classes
UISupportRegistery.getInstance().registerUISupport(
RepositorySecurityProvider.class, BaseRepositoryExplorerUISupport.class );
UISupportRegistery
.getInstance().registerUISupport( RepositorySecurityManager.class, ManageUserUISupport.class );
if ( rep != null ) {
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CHANGED );
}
delegates.update( this );
enableMenus();
}
public void addMenuListener( String id, Object listener, String methodName ) {
menuListeners.add( new Object[] { id, listener, methodName } );
}
public void addTransGraph( TransMeta transMeta ) {
delegates.trans.addTransGraph( transMeta );
}
public void addJobGraph( JobMeta jobMeta ) {
delegates.jobs.addJobGraph( jobMeta );
}
public boolean addSpoonBrowser( String name, String urlString, LocationListener locationListener ) {
return delegates.tabs.addSpoonBrowser( name, urlString, locationListener );
}
public boolean addSpoonBrowser( String name, String urlString ) {
return delegates.tabs.addSpoonBrowser( name, urlString, null );
}
public TransExecutionConfiguration getTransExecutionConfiguration() {
return transExecutionConfiguration;
}
public void editStepErrorHandling( TransMeta transMeta, StepMeta stepMeta ) {
delegates.steps.editStepErrorHandling( transMeta, stepMeta );
}
public String editStep( TransMeta transMeta, StepMeta stepMeta ) {
return delegates.steps.editStep( transMeta, stepMeta );
}
public void dupeStep( TransMeta transMeta, StepMeta stepMeta ) {
delegates.steps.dupeStep( transMeta, stepMeta );
}
public void delStep( TransMeta transMeta, StepMeta stepMeta ) {
delegates.steps.delStep( transMeta, stepMeta );
}
public String makeTabName( EngineMetaInterface transMeta, boolean showingLocation ) {
return delegates.tabs.makeTabName( transMeta, showingLocation );
}
public void newConnection() {
delegates.db.newConnection();
}
public void getSQL() {
delegates.db.getSQL();
}
public boolean overwritePrompt( String message, String rememberText, String rememberPropertyName ) {
return new PopupOverwritePrompter( shell, props ).overwritePrompt( message, rememberText, rememberPropertyName );
}
public Object[] messageDialogWithToggle( String dialogTitle, Object image, String message, int dialogImageType,
String[] buttonLabels, int defaultIndex, String toggleMessage, boolean toggleState ) {
return GUIResource.getInstance().messageDialogWithToggle(
shell, dialogTitle, (Image) image, message, dialogImageType, buttonLabels, defaultIndex, toggleMessage,
toggleState );
}
public boolean messageBox( final String message, final String text, final boolean allowCancel, final int type ) {
final StringBuffer answer = new StringBuffer( "N" );
display.syncExec( new Runnable() {
@Override
public void run() {
int flags = SWT.OK;
if ( allowCancel ) {
flags |= SWT.CANCEL;
}
switch ( type ) {
case Const.INFO:
flags |= SWT.ICON_INFORMATION;
break;
case Const.ERROR:
flags |= SWT.ICON_ERROR;
break;
case Const.WARNING:
flags |= SWT.ICON_WARNING;
break;
default:
break;
}
MessageBox mb = new MessageBox( shell, flags );
// Set the Body Message
mb.setMessage( message );
// Set the title Message
mb.setText( text );
if ( mb.open() == SWT.OK ) {
answer.setCharAt( 0, 'Y' );
}
}
} );
return "Y".equalsIgnoreCase( answer.toString() );
}
/**
* @return the previewExecutionConfiguration
*/
public TransExecutionConfiguration getTransPreviewExecutionConfiguration() {
return transPreviewExecutionConfiguration;
}
/**
* @param previewExecutionConfiguration
* the previewExecutionConfiguration to set
*/
public void setTransPreviewExecutionConfiguration( TransExecutionConfiguration previewExecutionConfiguration ) {
this.transPreviewExecutionConfiguration = previewExecutionConfiguration;
}
/**
* @return the debugExecutionConfiguration
*/
public TransExecutionConfiguration getTransDebugExecutionConfiguration() {
return transDebugExecutionConfiguration;
}
/**
* @param debugExecutionConfiguration
* the debugExecutionConfiguration to set
*/
public void setTransDebugExecutionConfiguration( TransExecutionConfiguration debugExecutionConfiguration ) {
this.transDebugExecutionConfiguration = debugExecutionConfiguration;
}
/**
* @param executionConfiguration
* the executionConfiguration to set
*/
public void setTransExecutionConfiguration( TransExecutionConfiguration executionConfiguration ) {
this.transExecutionConfiguration = executionConfiguration;
}
/**
* @return the jobExecutionConfiguration
*/
public JobExecutionConfiguration getJobExecutionConfiguration() {
return jobExecutionConfiguration;
}
/**
* @param jobExecutionConfiguration
* the jobExecutionConfiguration to set
*/
public void setJobExecutionConfiguration( JobExecutionConfiguration jobExecutionConfiguration ) {
this.jobExecutionConfiguration = jobExecutionConfiguration;
}
/*
* public XulToolbar getToolbar() { return toolbar; }
*/
public void update( ChangedFlagInterface o, Object arg ) {
try {
Method m = getClass().getMethod( arg.toString() );
if ( m != null ) {
m.invoke( this );
}
} catch ( Exception e ) {
// ignore... let the other notifiers try to do something
System.out.println( "Unable to update: " + e.getLocalizedMessage() );
}
}
public void consume( final LifeEventInfo info ) {
// if (PropsUI.getInstance().isListenerDisabled(info.getName()))
// return;
if ( info.hasHint( LifeEventInfo.Hint.DISPLAY_BROWSER ) ) {
display.asyncExec( new Runnable() {
public void run() {
delegates.tabs.addSpoonBrowser( info.getName(), info.getMessage(), false, null );
}
} );
} else {
MessageBox box =
new MessageBox( shell, ( info.getState() != LifeEventInfo.State.SUCCESS
? SWT.ICON_ERROR : SWT.ICON_INFORMATION )
| SWT.OK );
box.setText( info.getName() );
box.setMessage( info.getMessage() );
box.open();
}
}
public void setLog() {
LogSettingsDialog lsd = new LogSettingsDialog( shell, SWT.NONE, props );
lsd.open();
log.setLogLevel( DefaultLogLevel.getLogLevel() );
}
/**
* @return the display
*/
public Display getDisplay() {
return display;
}
public void zoomIn() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.zoomIn();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.zoomIn();
}
}
public void zoomOut() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.zoomOut();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.zoomOut();
}
}
public void zoom100Percent() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.zoom100Percent();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.zoom100Percent();
}
}
public void setParametersAsVariablesInUI( NamedParams namedParameters, VariableSpace space ) {
for ( String param : namedParameters.listParameters() ) {
try {
space.setVariable( param, Const.NVL( namedParameters.getParameterValue( param ), Const.NVL(
namedParameters.getParameterDefault( param ), Const.NVL( space.getVariable( param ), "" ) ) ) );
} catch ( Exception e ) {
// ignore this
}
}
}
public void browseVersionHistory() {
if ( rep == null ) {
return;
}
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.browseVersionHistory();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.browseVersionHistory();
}
}
public Trans findActiveTrans( Job job, JobEntryCopy jobEntryCopy ) {
JobEntryTrans jobEntryTrans = job.getActiveJobEntryTransformations().get( jobEntryCopy );
if ( jobEntryTrans == null ) {
return null;
}
return jobEntryTrans.getTrans();
}
public Job findActiveJob( Job job, JobEntryCopy jobEntryCopy ) {
JobEntryJob jobEntryJob = job.getActiveJobEntryJobs().get( jobEntryCopy );
if ( jobEntryJob == null ) {
return null;
}
return jobEntryJob.getJob();
}
public Object getSelectionObject() {
return selectionObject;
}
public RepositoryDirectoryInterface getDefaultSaveLocation( RepositoryElementInterface repositoryElement ) {
try {
if ( getRepository() != defaultSaveLocationRepository ) {
// The repository has changed, reset the defaultSaveLocation
defaultSaveLocation = null;
defaultSaveLocationRepository = null;
}
if ( defaultSaveLocation == null ) {
if ( getRepository() != null ) {
defaultSaveLocation = getRepository().getDefaultSaveDirectory( repositoryElement );
defaultSaveLocationRepository = getRepository();
} else {
defaultSaveLocation = new RepositoryDirectory();
}
}
} catch ( Exception e ) {
throw new RuntimeException( e );
}
return defaultSaveLocation;
}
/* ========================= XulEventSource Methods ========================== */
protected PropertyChangeSupport changeSupport = new PropertyChangeSupport( this );
public void addPropertyChangeListener( PropertyChangeListener listener ) {
changeSupport.addPropertyChangeListener( listener );
}
public void addPropertyChangeListener( String propertyName, PropertyChangeListener listener ) {
changeSupport.addPropertyChangeListener( propertyName, listener );
}
public void removePropertyChangeListener( PropertyChangeListener listener ) {
changeSupport.removePropertyChangeListener( listener );
}
protected void firePropertyChange( String attr, Object previousVal, Object newVal ) {
if ( previousVal == null && newVal == null ) {
return;
}
changeSupport.firePropertyChange( attr, previousVal, newVal );
}
/*
* ========================= End XulEventSource Methods ==========================
*/
/*
* ========================= Start XulEventHandler Methods ==========================
*/
public Object getData() {
return null;
}
public String getName() {
return "spoon";
}
public XulDomContainer getXulDomContainer() {
return getMainSpoonContainer();
}
public void setData( Object arg0 ) {
}
public void setName( String arg0 ) {
}
public void setXulDomContainer( XulDomContainer arg0 ) {
}
public RepositorySecurityManager getSecurityManager() {
return rep.getSecurityManager();
}
public void displayDbDependancies() {
TreeItem[] selection = selectionTree.getSelection();
if ( selection == null || selection.length != 1 ) {
return;
}
// Clear all dependencies for select connection
TreeItem parent = selection[0];
if ( parent != null ) {
int nrChilds = parent.getItemCount();
if ( nrChilds > 0 ) {
for ( int i = 0; i < nrChilds; i++ ) {
parent.getItem( i ).dispose();
}
}
}
if ( rep == null ) {
return;
}
try {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
String[] jobList = rep.getJobsUsingDatabase( databaseMeta.getObjectId() );
String[] transList = rep.getTransformationsUsingDatabase( databaseMeta.getObjectId() );
if ( jobList.length == 0 && transList.length == 0 ) {
MessageBox box = new MessageBox( shell, SWT.ICON_INFORMATION | SWT.OK );
box.setText( "Connection dependencies" );
box.setMessage( "This connection is not used by a job nor a transformation." );
box.open();
} else {
for ( String aJobList : jobList ) {
if ( aJobList != null ) {
TreeItem tidep = new TreeItem( parent, SWT.NONE );
tidep.setImage( GUIResource.getInstance().getImageJobGraph() );
tidep.setText( aJobList );
}
}
for ( String aTransList : transList ) {
if ( aTransList != null ) {
TreeItem tidep = new TreeItem( parent, SWT.NONE );
tidep.setImage( GUIResource.getInstance().getImageTransGraph() );
tidep.setText( aTransList );
}
}
parent.setExpanded( true );
}
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error getting dependencies! :", e );
}
}
public void fireMenuControlers() {
if ( !Display.getDefault().getThread().equals( Thread.currentThread() ) ) {
display.syncExec( new Runnable() {
public void run() {
fireMenuControlers();
}
} );
return;
}
org.pentaho.ui.xul.dom.Document doc;
if ( mainSpoonContainer != null ) {
doc = mainSpoonContainer.getDocumentRoot();
for ( ISpoonMenuController menuController : menuControllers ) {
menuController.updateMenu( doc );
}
}
}
public void hideSplash() {
if ( splash != null ) {
splash.hide();
}
}
private void showSplash() {
if ( splash != null ) {
splash.show();
}
}
/**
* Hides or shows the main toolbar
*
* @param visible
*/
public void setMainToolbarVisible( boolean visible ) {
mainToolbar.setVisible( visible );
}
public void setMenuBarVisible( boolean visible ) {
mainSpoonContainer.getDocumentRoot().getElementById( "edit" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "file" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "view" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "action" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "tools" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "help" ).setVisible( visible );
MenuManager menuManager = getMenuBarManager();
menuManager.getMenu().setVisible( visible );
menuManager.updateAll( true );
}
@Override
protected Control createContents( Composite parent ) {
shell = getShell();
init( null );
openSpoon();
// listeners
//
try {
lifecycleSupport.onStart( this );
} catch ( LifecycleException e ) {
// if severe, we have to quit
MessageBox box = new MessageBox( shell, ( e.isSevere() ? SWT.ICON_ERROR : SWT.ICON_WARNING ) | SWT.OK );
box.setMessage( e.getMessage() );
box.open();
}
try {
start( commandLineOptions );
} catch ( KettleException e ) {
MessageBox box = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
box.setMessage( e.getMessage() );
box.open();
}
getMenuBarManager().updateAll( true );
return parent;
}
public void start() {
// We store the UI thread for the getDisplay() method
setBlockOnOpen( false );
try {
open();
waitForDispose();
// runEventLoop2(getShell());
} catch ( Throwable e ) {
LogChannel.GENERAL.logError( "Error starting Spoon shell", e );
}
System.out.println( "stopping" );
}
public String getStartupPerspective() {
return startupPerspective;
}
public DelegatingMetaStore getMetaStore() {
return metaStore;
}
public void setMetaStore( DelegatingMetaStore metaStore ) {
this.metaStore = metaStore;
}
private void onLoginError( Throwable t ) {
if ( t instanceof KettleAuthException ) {
ShowMessageDialog dialog =
new ShowMessageDialog( loginDialog.getShell(), SWT.OK | SWT.ICON_ERROR, BaseMessages.getString(
PKG, "Spoon.Dialog.LoginFailed.Title" ), t.getLocalizedMessage() );
dialog.open();
} else {
new ErrorDialog(
loginDialog.getShell(), BaseMessages.getString( PKG, "Spoon.Dialog.LoginFailed.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.LoginFailed.Message", t ), t );
}
}
@Override
protected void handleShellCloseEvent() {
try {
if ( quitFile( true ) ) {
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.SHUTDOWN );
super.handleShellCloseEvent();
}
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Error closing Spoon", e );
}
}
public void showAuthenticationOptions() {
AuthProviderDialog authProviderDialog = new AuthProviderDialog( shell );
authProviderDialog.show();
}
}
|
ui/src/org/pentaho/di/ui/spoon/Spoon.java
|
//CHECKSTYLE:FileLength:OFF
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.spoon;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.UIManager;
import javax.swing.plaf.metal.MetalLookAndFeel;
import org.apache.commons.vfs.FileObject;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.dialogs.MessageDialogWithToggle;
import org.eclipse.jface.window.ApplicationWindow;
import org.eclipse.jface.window.DefaultToolTip;
import org.eclipse.jface.window.ToolTip;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.browser.LocationEvent;
import org.eclipse.swt.browser.LocationListener;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.DND;
import org.eclipse.swt.dnd.DropTarget;
import org.eclipse.swt.dnd.DropTargetEvent;
import org.eclipse.swt.dnd.DropTargetListener;
import org.eclipse.swt.dnd.FileTransfer;
import org.eclipse.swt.dnd.Transfer;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.MenuDetectEvent;
import org.eclipse.swt.events.MenuDetectListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.TreeAdapter;
import org.eclipse.swt.events.TreeEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Cursor;
import org.eclipse.swt.graphics.DeviceData;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.printing.Printer;
import org.eclipse.swt.program.Program;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Sash;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.ToolBar;
import org.eclipse.swt.widgets.ToolItem;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.pentaho.di.cluster.ClusterSchema;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.AddUndoPositionInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.DBCache;
import org.pentaho.di.core.EngineMetaInterface;
import org.pentaho.di.core.JndiUtil;
import org.pentaho.di.core.KettleClientEnvironment;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.LastUsedFile;
import org.pentaho.di.core.NotePadMeta;
import org.pentaho.di.core.ObjectUsageCount;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.SourceToTargetMapping;
import org.pentaho.di.core.changed.ChangedFlagInterface;
import org.pentaho.di.core.changed.PDIObserver;
import org.pentaho.di.core.clipboard.ImageDataTransfer;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleAuthException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleMissingPluginsException;
import org.pentaho.di.core.exception.KettleRowException;
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.gui.GUIFactory;
import org.pentaho.di.core.gui.OverwritePrompter;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.gui.SpoonFactory;
import org.pentaho.di.core.gui.SpoonInterface;
import org.pentaho.di.core.gui.UndoInterface;
import org.pentaho.di.core.lifecycle.LifeEventHandler;
import org.pentaho.di.core.lifecycle.LifeEventInfo;
import org.pentaho.di.core.lifecycle.LifecycleException;
import org.pentaho.di.core.lifecycle.LifecycleSupport;
import org.pentaho.di.core.logging.DefaultLogLevel;
import org.pentaho.di.core.logging.FileLoggingEventListener;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.logging.LoggingObjectInterface;
import org.pentaho.di.core.logging.LoggingObjectType;
import org.pentaho.di.core.logging.SimpleLoggingObject;
import org.pentaho.di.core.parameters.NamedParams;
import org.pentaho.di.core.plugins.JobEntryPluginType;
import org.pentaho.di.core.plugins.LifecyclePluginType;
import org.pentaho.di.core.plugins.PartitionerPluginType;
import org.pentaho.di.core.plugins.PluginFolder;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.PluginTypeInterface;
import org.pentaho.di.core.plugins.PluginTypeListener;
import org.pentaho.di.core.plugins.RepositoryPluginType;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.reflection.StringSearchResult;
import org.pentaho.di.core.row.RowBuffer;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.undo.TransAction;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.imp.ImportRules;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entries.job.JobEntryJob;
import org.pentaho.di.job.entries.trans.JobEntryTrans;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.job.entry.JobEntryDialogInterface;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.laf.BasePropertyHandler;
import org.pentaho.di.metastore.MetaStoreConst;
import org.pentaho.di.pan.CommandLineOption;
import org.pentaho.di.partition.PartitionSchema;
import org.pentaho.di.pkg.JarfileGenerator;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.RepositoriesMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryCapabilities;
import org.pentaho.di.repository.RepositoryDirectory;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryElementInterface;
import org.pentaho.di.repository.RepositoryMeta;
import org.pentaho.di.repository.RepositoryObjectType;
import org.pentaho.di.repository.RepositoryOperation;
import org.pentaho.di.repository.RepositorySecurityManager;
import org.pentaho.di.repository.RepositorySecurityProvider;
import org.pentaho.di.resource.ResourceExportInterface;
import org.pentaho.di.resource.ResourceUtil;
import org.pentaho.di.resource.TopLevelResource;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.shared.SharedObjects;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.HasDatabasesInterface;
import org.pentaho.di.trans.HasSlaveServersInterface;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.RowDistributionInterface;
import org.pentaho.di.trans.step.RowDistributionPluginType;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepErrorMeta;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.step.StepPartitioningMeta;
import org.pentaho.di.trans.steps.selectvalues.SelectValuesMeta;
import org.pentaho.di.ui.cluster.dialog.ClusterSchemaDialog;
import org.pentaho.di.ui.cluster.dialog.SlaveServerDialog;
import org.pentaho.di.ui.core.ConstUI;
import org.pentaho.di.ui.core.PrintSpool;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.auth.AuthProviderDialog;
import org.pentaho.di.ui.core.database.wizard.CreateDatabaseWizard;
import org.pentaho.di.ui.core.dialog.CheckResultDialog;
import org.pentaho.di.ui.core.dialog.EnterMappingDialog;
import org.pentaho.di.ui.core.dialog.EnterOptionsDialog;
import org.pentaho.di.ui.core.dialog.EnterSearchDialog;
import org.pentaho.di.ui.core.dialog.EnterSelectionDialog;
import org.pentaho.di.ui.core.dialog.EnterStringsDialog;
import org.pentaho.di.ui.core.dialog.EnterTextDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.dialog.KettlePropertiesFileDialog;
import org.pentaho.di.ui.core.dialog.PopupOverwritePrompter;
import org.pentaho.di.ui.core.dialog.PreviewRowsDialog;
import org.pentaho.di.ui.core.dialog.ShowBrowserDialog;
import org.pentaho.di.ui.core.dialog.ShowMessageDialog;
import org.pentaho.di.ui.core.dialog.Splash;
import org.pentaho.di.ui.core.dialog.SubjectDataBrowserDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.OsHelper;
import org.pentaho.di.ui.core.widget.TreeMemory;
import org.pentaho.di.ui.imp.ImportRulesDialog;
import org.pentaho.di.ui.job.dialog.JobDialogPluginType;
import org.pentaho.di.ui.job.dialog.JobLoadProgressDialog;
import org.pentaho.di.ui.partition.dialog.PartitionSchemaDialog;
import org.pentaho.di.ui.repository.ILoginCallback;
import org.pentaho.di.ui.repository.RepositoriesDialog;
import org.pentaho.di.ui.repository.RepositorySecurityUI;
import org.pentaho.di.ui.repository.dialog.RepositoryDialogInterface;
import org.pentaho.di.ui.repository.dialog.RepositoryExportProgressDialog;
import org.pentaho.di.ui.repository.dialog.RepositoryImportProgressDialog;
import org.pentaho.di.ui.repository.dialog.RepositoryRevisionBrowserDialogInterface;
import org.pentaho.di.ui.repository.dialog.SelectDirectoryDialog;
import org.pentaho.di.ui.repository.dialog.SelectObjectDialog;
import org.pentaho.di.ui.repository.repositoryexplorer.RepositoryExplorer;
import org.pentaho.di.ui.repository.repositoryexplorer.RepositoryExplorerCallback;
import org.pentaho.di.ui.repository.repositoryexplorer.UISupportRegistery;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIRepositoryContent;
import org.pentaho.di.ui.repository.repositoryexplorer.uisupport.BaseRepositoryExplorerUISupport;
import org.pentaho.di.ui.repository.repositoryexplorer.uisupport.ManageUserUISupport;
import org.pentaho.di.ui.spoon.SpoonLifecycleListener.SpoonLifeCycleEvent;
import org.pentaho.di.ui.spoon.TabMapEntry.ObjectType;
import org.pentaho.di.ui.spoon.delegates.SpoonDelegates;
import org.pentaho.di.ui.spoon.dialog.AnalyseImpactProgressDialog;
import org.pentaho.di.ui.spoon.dialog.CheckTransProgressDialog;
import org.pentaho.di.ui.spoon.dialog.LogSettingsDialog;
import org.pentaho.di.ui.spoon.dialog.MetaStoreExplorerDialog;
import org.pentaho.di.ui.spoon.dialog.SaveProgressDialog;
import org.pentaho.di.ui.spoon.dialog.TipsDialog;
import org.pentaho.di.ui.spoon.job.JobGraph;
import org.pentaho.di.ui.spoon.trans.TransGraph;
import org.pentaho.di.ui.spoon.wizards.CopyTableWizardPage1;
import org.pentaho.di.ui.spoon.wizards.CopyTableWizardPage2;
import org.pentaho.di.ui.trans.dialog.TransDialogPluginType;
import org.pentaho.di.ui.trans.dialog.TransHopDialog;
import org.pentaho.di.ui.trans.dialog.TransLoadProgressDialog;
import org.pentaho.di.ui.util.HelpUtils;
import org.pentaho.di.ui.util.ThreadGuiResources;
import org.pentaho.di.ui.xul.KettleXulLoader;
import org.pentaho.di.version.BuildVersion;
import org.pentaho.metastore.api.IMetaStore;
import org.pentaho.metastore.api.exceptions.MetaStoreException;
import org.pentaho.metastore.stores.delegate.DelegatingMetaStore;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.XulEventSource;
import org.pentaho.ui.xul.binding.BindingFactory;
import org.pentaho.ui.xul.binding.DefaultBindingFactory;
import org.pentaho.ui.xul.components.WaitBoxRunnable;
import org.pentaho.ui.xul.components.XulMenuitem;
import org.pentaho.ui.xul.components.XulMenuseparator;
import org.pentaho.ui.xul.components.XulToolbarbutton;
import org.pentaho.ui.xul.components.XulWaitBox;
import org.pentaho.ui.xul.containers.XulMenupopup;
import org.pentaho.ui.xul.containers.XulToolbar;
import org.pentaho.ui.xul.impl.XulEventHandler;
import org.pentaho.ui.xul.jface.tags.ApplicationWindowLocal;
import org.pentaho.ui.xul.jface.tags.JfaceMenuitem;
import org.pentaho.ui.xul.jface.tags.JfaceMenupopup;
import org.pentaho.ui.xul.swt.SwtXulLoader;
import org.pentaho.ui.xul.swt.tags.SwtDeck;
import org.pentaho.vfs.ui.VfsFileChooserDialog;
import org.pentaho.xul.swt.tab.TabItem;
import org.pentaho.xul.swt.tab.TabListener;
import org.pentaho.xul.swt.tab.TabSet;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
/**
* This class handles the main window of the Spoon graphical transformation editor.
*
* @author Matt
* @since 16-may-2003, i18n at 07-Feb-2006, redesign 01-Dec-2006
*/
public class Spoon extends ApplicationWindow implements AddUndoPositionInterface, TabListener, SpoonInterface,
OverwritePrompter, PDIObserver, LifeEventHandler, XulEventSource, XulEventHandler {
private static Class<?> PKG = Spoon.class;
public static final LoggingObjectInterface loggingObject = new SimpleLoggingObject( "Spoon", LoggingObjectType.SPOON,
null );
public static final String STRING_TRANSFORMATIONS = BaseMessages.getString( PKG, "Spoon.STRING_TRANSFORMATIONS" );
public static final String STRING_JOBS = BaseMessages.getString( PKG, "Spoon.STRING_JOBS" );
public static final String STRING_BUILDING_BLOCKS = BaseMessages.getString( PKG, "Spoon.STRING_BUILDING_BLOCKS" );
public static final String STRING_ELEMENTS = BaseMessages.getString( PKG, "Spoon.STRING_ELEMENTS" );
public static final String STRING_CONNECTIONS = BaseMessages.getString( PKG, "Spoon.STRING_CONNECTIONS" );
public static final String STRING_STEPS = BaseMessages.getString( PKG, "Spoon.STRING_STEPS" );
public static final String STRING_JOB_ENTRIES = BaseMessages.getString( PKG, "Spoon.STRING_JOB_ENTRIES" );
public static final String STRING_HOPS = BaseMessages.getString( PKG, "Spoon.STRING_HOPS" );
public static final String STRING_PARTITIONS = BaseMessages.getString( PKG, "Spoon.STRING_PARTITIONS" );
public static final String STRING_SLAVES = BaseMessages.getString( PKG, "Spoon.STRING_SLAVES" );
public static final String STRING_CLUSTERS = BaseMessages.getString( PKG, "Spoon.STRING_CLUSTERS" );
public static final String STRING_TRANS_BASE = BaseMessages.getString( PKG, "Spoon.STRING_BASE" );
public static final String STRING_HISTORY = BaseMessages.getString( PKG, "Spoon.STRING_HISTORY" );
public static final String STRING_TRANS_NO_NAME = BaseMessages.getString( PKG, "Spoon.STRING_TRANS_NO_NAME" );
public static final String STRING_JOB_NO_NAME = BaseMessages.getString( PKG, "Spoon.STRING_JOB_NO_NAME" );
public static final String STRING_TRANSFORMATION = BaseMessages.getString( PKG, "Spoon.STRING_TRANSFORMATION" );
public static final String STRING_JOB = BaseMessages.getString( PKG, "Spoon.STRING_JOB" );
private static final String SYNC_TRANS = "sync_trans_name_to_file_name";
public static final String APP_NAME = BaseMessages.getString( PKG, "Spoon.Application.Name" );
private static final String STRING_SPOON_MAIN_TREE = BaseMessages.getString( PKG, "Spoon.MainTree.Label" );
private static final String STRING_SPOON_CORE_OBJECTS_TREE = BaseMessages
.getString( PKG, "Spoon.CoreObjectsTree.Label" );
public static final String XML_TAG_TRANSFORMATION_STEPS = "transformation-steps";
public static final String XML_TAG_JOB_JOB_ENTRIES = "job-jobentries";
private static final String XML_TAG_STEPS = "steps";
public static final int MESSAGE_DIALOG_WITH_TOGGLE_YES_BUTTON_ID = 256;
public static final int MESSAGE_DIALOG_WITH_TOGGLE_NO_BUTTON_ID = 257;
public static final int MESSAGE_DIALOG_WITH_TOGGLE_CUSTOM_DISTRIBUTION_BUTTON_ID = 258;
private static Spoon staticSpoon;
private static LogChannelInterface log;
private Display display;
private Shell shell;
private static Splash splash;
private static FileLoggingEventListener fileLoggingEventListener;
private boolean destroy;
private SashForm sashform;
public TabSet tabfolder;
// THE HANDLERS
public SpoonDelegates delegates = new SpoonDelegates( this );
public RowMetaAndData variables = new RowMetaAndData( new RowMeta() );
/**
* These are the arguments that were given at Spoon launch time...
*/
private String[] arguments;
private boolean stopped;
private Cursor cursor_hourglass, cursor_hand;
public PropsUI props;
public Repository rep;
// private RepositorySecurityManager securityManager;
public RepositoryCapabilities capabilities;
// Save the last directory saved to for new files
// TODO: Save the last saved position to the defaultSaveLocation
private RepositoryDirectoryInterface defaultSaveLocation = null;
// Associate the defaultSaveLocation with a given repository; We should clear this out on a repo change
private Repository defaultSaveLocationRepository = null;
private CTabItem view, design;
private Label selectionLabel;
public Text selectionFilter;
private org.eclipse.swt.widgets.Menu fileMenus;
private static final String APP_TITLE = APP_NAME;
private static final String STRING_WELCOME_TAB_NAME = BaseMessages.getString( PKG, "Spoon.Title.STRING_WELCOME" );
private static final String STRING_DOCUMENT_TAB_NAME = BaseMessages.getString( PKG, "Spoon.Documentation" );
// "docs/English/welcome/index.html";
private static final String FILE_WELCOME_PAGE = Const
.safeAppendDirectory( BasePropertyHandler.getProperty( "documentationDirBase", "docs/" ),
BaseMessages.getString( PKG, "Spoon.Title.STRING_DOCUMENT_WELCOME" ) );
// "docs/English/welcome/index.html";
private static final String FILE_DOCUMENT_MAP = Const
.safeAppendDirectory( BasePropertyHandler.getProperty( "documentationDirBase", "docs/" ),
BaseMessages.getString( PKG, "Spoon.Title.STRING_DOCUMENT_MAP" ) );
private static final String UNDO_MENU_ITEM = "edit-undo";
private static final String REDO_MENU_ITEM = "edit-redo";
// "Undo : not available \tCTRL-Z"
private static final String UNDO_UNAVAILABLE = BaseMessages.getString( PKG, "Spoon.Menu.Undo.NotAvailable" );
// "Redo : not available \tCTRL-Y"
private static final String REDO_UNAVAILABLE = BaseMessages.getString( PKG, "Spoon.Menu.Redo.NotAvailable" );
private Composite tabComp;
private Tree selectionTree;
private Tree coreObjectsTree;
private TransExecutionConfiguration transExecutionConfiguration;
private TransExecutionConfiguration transPreviewExecutionConfiguration;
private TransExecutionConfiguration transDebugExecutionConfiguration;
private JobExecutionConfiguration jobExecutionConfiguration;
// private Menu spoonMenu; // Connections,
private int coreObjectsState = STATE_CORE_OBJECTS_NONE;
protected Map<String, FileListener> fileExtensionMap = new HashMap<String, FileListener>();
private List<Object[]> menuListeners = new ArrayList<Object[]>();
// loads the lifecycle listeners
private LifecycleSupport lifecycleSupport = new LifecycleSupport();
private Composite mainComposite;
private boolean viewSelected;
private boolean designSelected;
private Composite variableComposite;
private Map<String, String> coreStepToolTipMap;
private Map<String, String> coreJobToolTipMap;
private DefaultToolTip toolTip;
public Map<String, SharedObjects> sharedObjectsFileMap;
/**
* We can use this to set a default filter path in the open and save dialogs
*/
public String lastDirOpened;
private List<FileListener> fileListeners = new ArrayList<FileListener>();
private XulDomContainer mainSpoonContainer;
// Menu controllers to modify the main spoon menu
private List<ISpoonMenuController> menuControllers = new ArrayList<ISpoonMenuController>();
private XulToolbar mainToolbar;
private SwtDeck deck;
public static final String XUL_FILE_MAIN = "ui/spoon.xul";
private Map<String, XulComponent> menuMap = new HashMap<String, XulComponent>();
private RepositoriesDialog loginDialog;
private VfsFileChooserDialog vfsFileChooserDialog;
// the id of the perspective to start in, if any
protected String startupPerspective = null;
private CommandLineOption[] commandLineOptions;
public DelegatingMetaStore metaStore;
/**
* This is the main procedure for Spoon.
*
* @param a
* Arguments are available in the "Get System Info" step.
*/
public static void main( String[] a ) throws KettleException {
ExecutorService executor = Executors.newCachedThreadPool();
Future<KettleException> pluginRegistryFuture = executor.submit( new Callable<KettleException>() {
@Override
public KettleException call() throws Exception {
registerUIPluginObjectTypes();
try {
KettleEnvironment.init();
} catch ( KettleException e ) {
return e;
}
KettleClientEnvironment.getInstance().setClient( KettleClientEnvironment.ClientType.SPOON );
return null;
}
} );
try {
OsHelper.setAppName();
// Bootstrap Kettle
//
Display display;
if ( System.getProperties().containsKey( "SLEAK" ) ) {
DeviceData data = new DeviceData();
data.tracking = true;
display = new Display( data );
Sleak sleak = new Sleak();
Shell sleakShell = new Shell( display );
sleakShell.setText( "S-Leak" );
org.eclipse.swt.graphics.Point size = sleakShell.getSize();
sleakShell.setSize( size.x / 2, size.y / 2 );
sleak.create( sleakShell );
sleakShell.open();
} else {
display = new Display();
}
// Note: this needs to be done before the look and feel is set
OsHelper.initOsHandlers( display );
UIManager.setLookAndFeel( new MetalLookAndFeel() );
// The core plugin types don't know about UI classes. Add them in now
// before the PluginRegistry is inited.
splash = new Splash( display );
List<String> args = new ArrayList<String>( Arrays.asList( a ) );
CommandLineOption[] commandLineOptions = getCommandLineArgs( args );
KettleException registryException = pluginRegistryFuture.get();
if ( registryException != null ) {
throw registryException;
}
PropsUI.init( display, Props.TYPE_PROPERTIES_SPOON );
KettleLogStore
.init( PropsUI.getInstance().getMaxNrLinesInLog(), PropsUI.getInstance().getMaxLogLineTimeoutMinutes() );
initLogging( commandLineOptions );
// remember...
staticSpoon = new Spoon();
staticSpoon.commandLineOptions = commandLineOptions;
// pull the startup perspective id from the command line options and hand it to Spoon
String pId;
StringBuffer perspectiveIdBuff = Spoon.getCommandLineOption( commandLineOptions, "perspective" ).getArgument();
pId = perspectiveIdBuff.toString();
if ( !Const.isEmpty( pId ) ) {
Spoon.staticSpoon.startupPerspective = pId;
}
SpoonFactory.setSpoonInstance( staticSpoon );
staticSpoon.setDestroy( true );
GUIFactory.setThreadDialogs( new ThreadGuiResources() );
staticSpoon.setArguments( args.toArray( new String[ args.size() ] ) );
staticSpoon.start();
} catch ( Throwable t ) {
// avoid calls to Messages i18n method getString() in this block
// We do this to (hopefully) also catch Out of Memory Exceptions
//
t.printStackTrace();
if ( staticSpoon != null ) {
log.logError( "Fatal error : " + Const.NVL( t.toString(), Const.NVL( t.getMessage(), "Unknown error" ) ) );
log.logError( Const.getStackTracker( t ) );
}
}
// Kill all remaining things in this VM!
System.exit( 0 );
}
private static void initLogging( CommandLineOption[] options ) throws KettleException {
StringBuffer optionLogFile = getCommandLineOption( options, "logfile" ).getArgument();
StringBuffer optionLogLevel = getCommandLineOption( options, "level" ).getArgument();
// Set default Locale:
Locale.setDefault( Const.DEFAULT_LOCALE );
if ( !Const.isEmpty( optionLogFile ) ) {
fileLoggingEventListener = new FileLoggingEventListener( optionLogFile.toString(), true );
if ( log.isBasic() ) {
String filename = fileLoggingEventListener.getFilename();
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.LoggingToFile" ) + filename );
}
KettleLogStore.getAppender().addLoggingEventListener( fileLoggingEventListener );
} else {
fileLoggingEventListener = null;
}
if ( !Const.isEmpty( optionLogLevel ) ) {
log.setLogLevel( LogLevel.getLogLevelForCode( optionLogLevel.toString() ) );
if ( log.isBasic() ) {
// "Logging is at level : "
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.LoggingAtLevel" ) + log.getLogLevel().getDescription() );
}
}
}
public Spoon() {
this( null );
}
public Spoon( Repository rep ) {
super( null );
this.addMenuBar();
log = new LogChannel( APP_NAME );
SpoonFactory.setSpoonInstance( this );
// Load at least one local Pentaho metastore and add it to the delegating metastore
//
metaStore = new DelegatingMetaStore();
try {
IMetaStore localMetaStore = MetaStoreConst.openLocalPentahoMetaStore();
metaStore.addMetaStore( localMetaStore );
metaStore.setActiveMetaStoreName( localMetaStore.getName() );
if ( rep != null ) {
metaStore.addMetaStore( 0, rep.getMetaStore() );
metaStore.setActiveMetaStoreName( rep.getMetaStore().getName() );
}
} catch ( MetaStoreException e ) {
new ErrorDialog( shell, "Error opening Pentaho Metastore", "Unable to open local Pentaho Metastore", e );
}
setRepository( rep );
props = PropsUI.getInstance();
sharedObjectsFileMap = new Hashtable<String, SharedObjects>();
Thread uiThread = Thread.currentThread();
display = Display.findDisplay( uiThread );
staticSpoon = this;
try {
JndiUtil.initJNDI();
} catch ( Exception e ) {
new ErrorDialog( shell, "Unable to init simple JNDI", "Unable to init simple JNDI", e );
}
}
/**
* The core plugin types don't know about UI classes. This method adds those in before initialization.
*
* TODO: create a SpoonLifecycle listener that can notify interested parties of a pre-initialization state so this can
* happen in those listeners.
*/
private static void registerUIPluginObjectTypes() {
RepositoryPluginType.getInstance()
.addObjectType( RepositoryRevisionBrowserDialogInterface.class, "version-browser-classname" );
RepositoryPluginType.getInstance().addObjectType( RepositoryDialogInterface.class, "dialog-classname" );
PluginRegistry.addPluginType( SpoonPluginType.getInstance() );
SpoonPluginType.getInstance().getPluginFolders().add( new PluginFolder( "plugins/repositories", false, true ) );
LifecyclePluginType.getInstance().getPluginFolders().add( new PluginFolder( "plugins/spoon", false, true ) );
LifecyclePluginType.getInstance().getPluginFolders().add( new PluginFolder( "plugins/repositories", false, true ) );
PluginRegistry.addPluginType( JobDialogPluginType.getInstance() );
PluginRegistry.addPluginType( TransDialogPluginType.getInstance() );
}
public void init( TransMeta ti ) {
FormLayout layout = new FormLayout();
layout.marginWidth = 0;
layout.marginHeight = 0;
shell.setLayout( layout );
addFileListener( new TransFileListener() );
addFileListener( new JobFileListener() );
// INIT Data structure
if ( ti != null ) {
delegates.trans.addTransformation( ti );
}
// Load settings in the props
loadSettings();
transExecutionConfiguration = new TransExecutionConfiguration();
transExecutionConfiguration.setGatheringMetrics( true );
transPreviewExecutionConfiguration = new TransExecutionConfiguration();
transPreviewExecutionConfiguration.setGatheringMetrics( true );
transDebugExecutionConfiguration = new TransExecutionConfiguration();
transDebugExecutionConfiguration.setGatheringMetrics( true );
jobExecutionConfiguration = new JobExecutionConfiguration();
// Clean out every time we start, auto-loading etc, is not a good idea
// If they are needed that often, set them in the kettle.properties file
//
variables = new RowMetaAndData( new RowMeta() );
// props.setLook(shell);
Image[] images = { GUIResource.getInstance().getImageSpoonHigh(), GUIResource.getInstance().getImageSpoon() };
shell.setImages( images );
// shell.setImage(GUIResource.getInstance().getImageSpoon());
cursor_hourglass = new Cursor( display, SWT.CURSOR_WAIT );
cursor_hand = new Cursor( display, SWT.CURSOR_HAND );
Composite sashComposite = null;
MainSpoonPerspective mainPerspective = null;
try {
SwtXulLoader xulLoader = new KettleXulLoader();
xulLoader.setOuterContext( shell );
xulLoader.setSettingsManager( XulSpoonSettingsManager.getInstance() );
ApplicationWindowLocal.setApplicationWindow( this );
mainSpoonContainer = xulLoader.loadXul( XUL_FILE_MAIN, new XulSpoonResourceBundle() );
BindingFactory bf = new DefaultBindingFactory();
bf.setDocument( mainSpoonContainer.getDocumentRoot() );
mainSpoonContainer.addEventHandler( this );
/* menuBar = (XulMenubar) */
mainSpoonContainer.getDocumentRoot().getElementById( "spoon-menubar" );
mainToolbar = (XulToolbar) mainSpoonContainer.getDocumentRoot().getElementById( "main-toolbar" );
/* canvas = (XulVbox) */
mainSpoonContainer.getDocumentRoot().getElementById( "trans-job-canvas" );
deck = (SwtDeck) mainSpoonContainer.getDocumentRoot().getElementById( "canvas-deck" );
final Composite tempSashComposite = new Composite( shell, SWT.None );
sashComposite = tempSashComposite;
mainPerspective = new MainSpoonPerspective( tempSashComposite, tabfolder );
if ( startupPerspective == null ) {
startupPerspective = mainPerspective.getId();
}
SpoonPerspectiveManager.getInstance().setStartupPerspective( startupPerspective );
SpoonPerspectiveManager.getInstance().addPerspective( mainPerspective );
SpoonPluginManager.getInstance().applyPluginsForContainer( "spoon", mainSpoonContainer );
SpoonPerspectiveManager.getInstance().setDeck( deck );
SpoonPerspectiveManager.getInstance().setXulDoc( mainSpoonContainer );
SpoonPerspectiveManager.getInstance().initialize();
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Error initializing transformation", e );
}
// addBar();
// Set the shell size, based upon previous time...
WindowProperty windowProperty = props.getScreen( APP_TITLE );
if ( windowProperty != null ) {
windowProperty.setShell( shell );
} else {
shell.pack();
shell.setMaximized( true ); // Default = maximized!
}
layout = new FormLayout();
layout.marginWidth = 0;
layout.marginHeight = 0;
GridData data = new GridData();
data.grabExcessHorizontalSpace = true;
data.grabExcessVerticalSpace = true;
data.verticalAlignment = SWT.FILL;
data.horizontalAlignment = SWT.FILL;
sashComposite.setLayoutData( data );
sashComposite.setLayout( layout );
sashform = new SashForm( sashComposite, SWT.HORIZONTAL );
FormData fdSash = new FormData();
fdSash.left = new FormAttachment( 0, 0 );
// fdSash.top = new FormAttachment((org.eclipse.swt.widgets.ToolBar)
// toolbar.getNativeObject(), 0);
fdSash.top = new FormAttachment( 0, 0 );
fdSash.bottom = new FormAttachment( 100, 0 );
fdSash.right = new FormAttachment( 100, 0 );
sashform.setLayoutData( fdSash );
createPopupMenus();
addTree();
addTabs();
mainPerspective.setTabset( this.tabfolder );
( (Composite) deck.getManagedObject() ).layout( true, true );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.STARTUP );
// Add a browser widget
if ( props.showWelcomePageOnStartup() ) {
showWelcomePage();
}
// Allow data to be copied or moved to the drop target
int operations = DND.DROP_COPY | DND.DROP_DEFAULT;
DropTarget target = new DropTarget( shell, operations );
// Receive data in File format
final FileTransfer fileTransfer = FileTransfer.getInstance();
Transfer[] types = new Transfer[] { fileTransfer };
target.setTransfer( types );
target.addDropListener( new DropTargetListener() {
public void dragEnter( DropTargetEvent event ) {
if ( event.detail == DND.DROP_DEFAULT ) {
if ( ( event.operations & DND.DROP_COPY ) != 0 ) {
event.detail = DND.DROP_COPY;
} else {
event.detail = DND.DROP_NONE;
}
}
}
public void dragOver( DropTargetEvent event ) {
event.feedback = DND.FEEDBACK_SELECT | DND.FEEDBACK_SCROLL;
}
public void dragOperationChanged( DropTargetEvent event ) {
if ( event.detail == DND.DROP_DEFAULT ) {
if ( ( event.operations & DND.DROP_COPY ) != 0 ) {
event.detail = DND.DROP_COPY;
} else {
event.detail = DND.DROP_NONE;
}
}
}
public void dragLeave( DropTargetEvent event ) {
}
public void dropAccept( DropTargetEvent event ) {
}
public void drop( DropTargetEvent event ) {
if ( fileTransfer.isSupportedType( event.currentDataType ) ) {
String[] files = (String[]) event.data;
for ( String file : files ) {
openFile( file, false );
}
}
}
} );
// listen for steps being added or removed
PluginRegistry.getInstance().addPluginListener( StepPluginType.class, new PluginTypeListener() {
@Override
public void pluginAdded( Object serviceObject ) {
previousShowTrans = false; // hack to get the tree to reload
Display.getDefault().asyncExec( new Runnable() {
@Override
public void run() {
refreshCoreObjects();
}
} );
}
@Override
public void pluginRemoved( Object serviceObject ) {
previousShowTrans = false; // hack to get the tree to reload
Display.getDefault().asyncExec( new Runnable() {
@Override
public void run() {
refreshCoreObjects();
}
} );
}
@Override
public void pluginChanged( Object serviceObject ) {
}
} );
}
public XulDomContainer getMainSpoonContainer() {
return mainSpoonContainer;
}
public void loadPerspective( String id ) {
List<SpoonPerspective> perspectives = SpoonPerspectiveManager.getInstance().getPerspectives();
for ( int pos = 0; pos < perspectives.size(); pos++ ) {
SpoonPerspective perspective = perspectives.get( pos );
if ( perspective.getId().equals( id ) ) {
loadPerspective( pos );
return;
}
}
}
public void loadPerspective( int pos ) {
try {
SpoonPerspectiveManager.getInstance().activatePerspective(
SpoonPerspectiveManager.getInstance().getPerspectives().get( pos ).getClass() );
} catch ( KettleException e ) {
log.logError( "Error loading perspective", e );
}
}
public static Spoon getInstance() {
return staticSpoon;
}
public VfsFileChooserDialog getVfsFileChooserDialog( FileObject rootFile, FileObject initialFile ) {
if ( vfsFileChooserDialog == null ) {
vfsFileChooserDialog = new VfsFileChooserDialog( shell, KettleVFS.getInstance().getFileSystemManager(), rootFile,
initialFile );
}
vfsFileChooserDialog.setRootFile( rootFile );
vfsFileChooserDialog.setInitialFile( initialFile );
return vfsFileChooserDialog;
}
public boolean closeFile() {
boolean closed = true;
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
// If a transformation is the current active tab, close it
closed = tabCloseSelected();
}
return closed;
}
public boolean closeAllFiles() {
int numTabs = delegates.tabs.getTabs().size();
for ( int i = numTabs - 1; i >= 0; i-- ) {
tabfolder.setSelected( i );
if ( !closeFile() ) {
return false; // A single cancel aborts the rest of the operation
}
}
return true;
}
/**
* Prompt user to close all open Jobs & Transformations if they have execute permissions.
* If they don't have execute permission then warn user if they really want to disconnect
* from repository. If yes, close all tabs.
*
* @return If user agrees with closing of tabs then return true so we can disconnect from the repo.
*/
public boolean closeAllJobsAndTransformations() {
// Check to see if there are any open jobs/trans
if ( getActiveMeta() == null ) {
return true;
}
boolean createPerms = !RepositorySecurityUI
.verifyOperations( shell, rep, false, RepositoryOperation.MODIFY_TRANSFORMATION,
RepositoryOperation.MODIFY_JOB );
boolean executePerms = !RepositorySecurityUI
.verifyOperations( shell, rep, false, RepositoryOperation.EXECUTE_TRANSFORMATION,
RepositoryOperation.EXECUTE_JOB );
boolean readPerms = !RepositorySecurityUI
.verifyOperations( shell, rep, false, RepositoryOperation.READ_TRANSFORMATION, RepositoryOperation.READ_JOB );
// Check to see if display of warning dialog has been disabled
String warningTitle = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Title" );
String warningText = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Message" );
int buttons = SWT.OK;
if ( readPerms && createPerms && executePerms ) {
warningTitle = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllOption.Disconnect.Title" );
warningText = BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllOption.Disconnect.Message" );
buttons = SWT.YES | SWT.NO;
}
MessageBox mb = new MessageBox( Spoon.getInstance().getShell(), buttons | SWT.ICON_WARNING );
mb.setMessage( warningText );
mb.setText( warningTitle );
final int isCloseAllFiles = mb.open();
if ( ( isCloseAllFiles == SWT.YES ) || ( isCloseAllFiles == SWT.OK ) ) {
// Yes - User specified that they want to close all.
return Spoon.getInstance().closeAllFiles();
} else if ( ( isCloseAllFiles == SWT.NO ) && ( executePerms ) ) {
// No - don't close tabs only if user has execute permissions.
// Return true so we can disconnect from repo
return true;
} else {
// Cancel - don't close tabs and don't disconnect from repo
return false;
}
}
public void closeSpoonBrowser() {
TabMapEntry browserTab = delegates.tabs.findTabMapEntry( STRING_WELCOME_TAB_NAME, ObjectType.BROWSER );
if ( browserTab != null ) {
delegates.tabs.removeTab( browserTab );
}
}
/**
* Search the transformation meta-data.
*
*/
public void searchMetaData() {
TransMeta[] transMetas = getLoadedTransformations();
JobMeta[] jobMetas = getLoadedJobs();
if ( ( transMetas == null || transMetas.length == 0 ) && ( jobMetas == null || jobMetas.length == 0 ) ) {
return;
}
EnterSearchDialog esd = new EnterSearchDialog( shell );
if ( !esd.open() ) {
return;
}
List<Object[]> rows = new ArrayList<Object[]>();
for ( TransMeta transMeta : transMetas ) {
String filter = esd.getFilterString();
if ( filter != null ) {
filter = filter.toUpperCase();
}
List<StringSearchResult> stringList =
transMeta.getStringList( esd.isSearchingSteps(), esd.isSearchingDatabases(), esd.isSearchingNotes() );
for ( StringSearchResult result : stringList ) {
boolean add = Const.isEmpty( filter );
if ( filter != null && result.getString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getFieldName().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getGrandParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( add ) {
rows.add( result.toRow() );
}
}
}
for ( JobMeta jobMeta : jobMetas ) {
String filter = esd.getFilterString();
if ( filter != null ) {
filter = filter.toUpperCase();
}
List<StringSearchResult> stringList =
jobMeta.getStringList( esd.isSearchingSteps(), esd.isSearchingDatabases(), esd.isSearchingNotes() );
for ( StringSearchResult result : stringList ) {
boolean add = Const.isEmpty( filter );
if ( filter != null && result.getString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getFieldName().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( filter != null && result.getGrandParentObject().toString().toUpperCase().contains( filter ) ) {
add = true;
}
if ( add ) {
rows.add( result.toRow() );
}
}
}
if ( rows.size() != 0 ) {
PreviewRowsDialog prd =
new PreviewRowsDialog( shell, Variables.getADefaultVariableSpace(), SWT.NONE, BaseMessages.getString(
PKG, "Spoon.StringSearchResult.Subtitle" ), StringSearchResult.getResultRowMeta(), rows );
String title = BaseMessages.getString( PKG, "Spoon.StringSearchResult.Title" );
String message = BaseMessages.getString( PKG, "Spoon.StringSearchResult.Message" );
prd.setTitleMessage( title, message );
prd.open();
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.NothingFound.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.NothingFound.Title" ) ); // Sorry!
mb.open();
}
}
public void showArguments() {
RowMetaAndData allArgs = new RowMetaAndData();
for ( int ii = 0; ii < arguments.length; ++ii ) {
allArgs.addValue( new ValueMeta(
Props.STRING_ARGUMENT_NAME_PREFIX + ( 1 + ii ), ValueMetaInterface.TYPE_STRING ), arguments[ii] );
}
// Now ask the use for more info on these!
EnterStringsDialog esd = new EnterStringsDialog( shell, SWT.NONE, allArgs );
esd.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.ShowArguments.Title" ) );
esd.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ShowArguments.Message" ) );
esd.setReadOnly( true );
esd.setShellImage( GUIResource.getInstance().getImageLogoSmall() );
esd.open();
}
private void fillVariables( RowMetaAndData vars ) {
TransMeta[] transMetas = getLoadedTransformations();
JobMeta[] jobMetas = getLoadedJobs();
if ( ( transMetas == null || transMetas.length == 0 ) && ( jobMetas == null || jobMetas.length == 0 ) ) {
return;
}
Properties sp = new Properties();
sp.putAll( System.getProperties() );
VariableSpace space = Variables.getADefaultVariableSpace();
String[] keys = space.listVariables();
for ( String key : keys ) {
sp.put( key, space.getVariable( key ) );
}
for ( TransMeta transMeta : transMetas ) {
List<String> list = transMeta.getUsedVariables();
for ( String varName : list ) {
String varValue = sp.getProperty( varName, "" );
if ( vars.getRowMeta().indexOfValue( varName ) < 0 && !varName.startsWith( Const.INTERNAL_VARIABLE_PREFIX ) ) {
vars.addValue( new ValueMeta( varName, ValueMetaInterface.TYPE_STRING ), varValue );
}
}
}
for ( JobMeta jobMeta : jobMetas ) {
List<String> list = jobMeta.getUsedVariables();
for ( String varName : list ) {
String varValue = sp.getProperty( varName, "" );
if ( vars.getRowMeta().indexOfValue( varName ) < 0 && !varName.startsWith( Const.INTERNAL_VARIABLE_PREFIX ) ) {
vars.addValue( new ValueMeta( varName, ValueMetaInterface.TYPE_STRING ), varValue );
}
}
}
}
public void setVariables() {
fillVariables( variables );
// Now ask the use for more info on these!
EnterStringsDialog esd = new EnterStringsDialog( shell, SWT.NONE, variables );
esd.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.SetVariables.Title" ) );
esd.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.SetVariables.Message" ) );
esd.setReadOnly( false );
esd.setShellImage( GUIResource.getInstance().getImageVariable() );
if ( esd.open() != null ) {
applyVariables();
}
}
public void applyVariables() {
for ( int i = 0; i < variables.size(); i++ ) {
try {
String name = variables.getValueMeta( i ).getName();
String value = variables.getString( i, "" );
applyVariableToAllLoadedObjects( name, value );
} catch ( KettleValueException e ) {
// Just eat the exception. getString() should never give an
// exception.
log.logDebug( "Unexpected exception occurred : " + e.getMessage() );
}
}
}
public void applyVariableToAllLoadedObjects( String name, String value ) {
// We want to insert the variables into all loaded jobs and
// transformations
//
for ( TransMeta transMeta : getLoadedTransformations() ) {
transMeta.setVariable( name, Const.NVL( value, "" ) );
}
for ( JobMeta jobMeta : getLoadedJobs() ) {
jobMeta.setVariable( name, Const.NVL( value, "" ) );
}
// Not only that, we also want to set the variables in the
// execution configurations...
//
transExecutionConfiguration.getVariables().put( name, value );
jobExecutionConfiguration.getVariables().put( name, value );
transDebugExecutionConfiguration.getVariables().put( name, value );
}
public void showVariables() {
fillVariables( variables );
// Now ask the use for more info on these!
EnterStringsDialog esd = new EnterStringsDialog( shell, SWT.NONE, variables );
esd.setTitle( BaseMessages.getString( PKG, "Spoon.Dialog.ShowVariables.Title" ) );
esd.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ShowVariables.Message" ) );
esd.setReadOnly( true );
esd.setShellImage( GUIResource.getInstance().getImageVariable() );
esd.open();
}
public void openSpoon() {
shell = getShell();
shell.setText( APP_TITLE );
mainComposite.setRedraw( true );
mainComposite.setVisible( false );
mainComposite.setVisible( true );
mainComposite.redraw();
// Perhaps the transformation contains elements at startup?
refreshTree(); // Do a complete refresh then...
setShellText();
}
public boolean readAndDispatch() {
return display.readAndDispatch();
}
/**
* @return check whether or not the application was stopped.
*/
public boolean isStopped() {
return stopped;
}
/**
* @param stopped
* True to stop this application.
*/
public void setStopped( boolean stopped ) {
this.stopped = stopped;
}
/**
* @param destroy
* Whether or not to destroy the display.
*/
public void setDestroy( boolean destroy ) {
this.destroy = destroy;
}
/**
* @return Returns whether or not we should destroy the display.
*/
public boolean doDestroy() {
return destroy;
}
/**
* @param arguments
* The arguments to set.
*/
public void setArguments( String[] arguments ) {
this.arguments = arguments;
}
/**
* @return Returns the arguments.
*/
public String[] getArguments() {
return arguments;
}
public synchronized void dispose() {
setStopped( true );
cursor_hand.dispose();
cursor_hourglass.dispose();
if ( destroy && ( display != null ) && !display.isDisposed() ) {
try {
display.dispose();
} catch ( SWTException e ) {
// dispose errors
}
}
}
public boolean isDisposed() {
return display.isDisposed();
}
public void sleep() {
display.sleep();
}
public void undoAction() {
undoAction( getActiveUndoInterface() );
}
public void redoAction() {
redoAction( getActiveUndoInterface() );
}
/**
* It's called copySteps, but the job entries also arrive at this location
*/
public void copySteps() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
copySelected( transMeta, transMeta.getSelectedSteps(), transMeta.getSelectedNotes() );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
copyJobentries();
}
}
public void copyJobentries() {
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
delegates.jobs.copyJobEntries( jobMeta, jobMeta.getSelectedEntries() );
}
}
public void copy() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
boolean transActive = transMeta != null;
boolean jobActive = jobMeta != null;
if ( transActive ) {
if ( transMeta.getSelectedSteps().size() > 0 ) {
copySteps();
} else {
copyTransformation();
}
} else if ( jobActive ) {
if ( jobMeta.getSelectedEntries().size() > 0 ) {
copyJobentries();
} else {
copyJob();
}
}
}
public void copyFile() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
boolean transActive = transMeta != null;
boolean jobActive = jobMeta != null;
if ( transActive ) {
copyTransformation();
} else if ( jobActive ) {
copyJob();
}
}
public void cut() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
boolean transActive = transMeta != null;
boolean jobActive = jobMeta != null;
if ( transActive ) {
List<StepMeta> stepMetas = transMeta.getSelectedSteps();
if ( stepMetas != null && stepMetas.size() > 0 ) {
copySteps();
for ( StepMeta stepMeta : stepMetas ) {
delStep( transMeta, stepMeta );
}
}
} else if ( jobActive ) {
List<JobEntryCopy> jobEntryCopies = jobMeta.getSelectedEntries();
if ( jobEntryCopies != null && jobEntryCopies.size() > 0 ) {
copyJobentries();
for ( JobEntryCopy jobEntryCopy : jobEntryCopies ) {
deleteJobEntryCopies( jobMeta, jobEntryCopy );
}
}
}
}
public void removeMenuItem( String itemid, boolean removeTrailingSeparators ) {
XulMenuitem item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( itemid );
if ( item != null ) {
XulComponent menu = item.getParent();
item.getParent().removeChild( item );
if ( removeTrailingSeparators ) {
List<XulComponent> children = menu.getChildNodes();
if ( children.size() > 0 ) {
XulComponent lastMenuItem = children.get( children.size() - 1 );
if ( lastMenuItem instanceof XulMenuseparator ) {
menu.removeChild( lastMenuItem );
// above call should work, but doesn't for some reason, removing separator by force
// the menu separators seem to not be modeled as individual objects in XUL
try {
Menu swtm = (Menu) menu.getManagedObject();
swtm.getItems()[swtm.getItemCount() - 1].dispose();
} catch ( Throwable t ) {
LogChannel.GENERAL.logError( "Error removing XUL menu item", t );
}
}
}
}
} else {
log.logError( "Could not find menu item with id " + itemid + " to remove from Spoon menu" );
}
}
public void createPopupMenus() {
try {
menuMap.put( "trans-class", mainSpoonContainer.getDocumentRoot().getElementById( "trans-class" ) );
menuMap.put( "trans-class-new", mainSpoonContainer.getDocumentRoot().getElementById( "trans-class-new" ) );
menuMap.put( "job-class", mainSpoonContainer.getDocumentRoot().getElementById( "job-class" ) );
menuMap.put( "trans-hop-class", mainSpoonContainer.getDocumentRoot().getElementById( "trans-hop-class" ) );
menuMap.put( "database-class", mainSpoonContainer.getDocumentRoot().getElementById( "database-class" ) );
menuMap.put( "partition-schema-class", mainSpoonContainer.getDocumentRoot().getElementById(
"partition-schema-class" ) );
menuMap.put( "cluster-schema-class", mainSpoonContainer.getDocumentRoot().getElementById(
"cluster-schema-class" ) );
menuMap.put( "slave-cluster-class", mainSpoonContainer.getDocumentRoot().getElementById(
"slave-cluster-class" ) );
menuMap.put( "trans-inst", mainSpoonContainer.getDocumentRoot().getElementById( "trans-inst" ) );
menuMap.put( "job-inst", mainSpoonContainer.getDocumentRoot().getElementById( "job-inst" ) );
menuMap.put( "step-plugin", mainSpoonContainer.getDocumentRoot().getElementById( "step-plugin" ) );
menuMap.put( "database-inst", mainSpoonContainer.getDocumentRoot().getElementById( "database-inst" ) );
menuMap.put( "step-inst", mainSpoonContainer.getDocumentRoot().getElementById( "step-inst" ) );
menuMap.put( "job-entry-copy-inst", mainSpoonContainer.getDocumentRoot().getElementById(
"job-entry-copy-inst" ) );
menuMap.put( "trans-hop-inst", mainSpoonContainer.getDocumentRoot().getElementById( "trans-hop-inst" ) );
menuMap.put( "partition-schema-inst", mainSpoonContainer.getDocumentRoot().getElementById(
"partition-schema-inst" ) );
menuMap.put( "cluster-schema-inst", mainSpoonContainer.getDocumentRoot().getElementById(
"cluster-schema-inst" ) );
menuMap
.put( "slave-server-inst", mainSpoonContainer.getDocumentRoot().getElementById( "slave-server-inst" ) );
} catch ( Throwable t ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Title" ), BaseMessages
.getString( PKG, "Spoon.Exception.ErrorReadingXULFile.Message", XUL_FILE_MAIN ), new Exception( t ) );
}
addMenuLast();
}
public void executeTransformation() {
executeTransformation(
getActiveTransformation(), true, false, false, false, false, transExecutionConfiguration.getReplayDate(),
false, transExecutionConfiguration.getLogLevel() );
}
public void previewTransformation() {
executeTransformation(
getActiveTransformation(), true, false, false, true, false, transDebugExecutionConfiguration
.getReplayDate(), true, transDebugExecutionConfiguration.getLogLevel() );
}
public void debugTransformation() {
executeTransformation(
getActiveTransformation(), true, false, false, false, true, transPreviewExecutionConfiguration
.getReplayDate(), true, transPreviewExecutionConfiguration.getLogLevel() );
}
public void checkTrans() {
checkTrans( getActiveTransformation() );
}
public void analyseImpact() {
analyseImpact( getActiveTransformation() );
}
public void showLastImpactAnalyses() {
showLastImpactAnalyses( getActiveTransformation() );
}
public void showLastTransPreview() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.showLastPreviewResults();
}
}
public void showExecutionResults() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.showExecutionResults();
enableMenus();
} else {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.showExecutionResults();
enableMenus();
}
}
}
public boolean isExecutionResultsPaneVisible() {
TransGraph transGraph = getActiveTransGraph();
return ( transGraph != null ) && ( transGraph.isExecutionResultsPaneVisible() );
}
public void copyTransformation() {
copyTransformation( getActiveTransformation() );
}
public void copyTransformationImage() {
copyTransformationImage( getActiveTransformation() );
}
public boolean editTransformationProperties() {
return TransGraph.editProperties( getActiveTransformation(), this, rep, true );
}
public boolean editProperties() {
if ( getActiveTransformation() != null ) {
return editTransformationProperties();
} else if ( getActiveJob() != null ) {
return editJobProperties( "job-settings" );
}
// no properties were edited, so no cancel was clicked
return true;
}
public void executeJob() {
executeJob( getActiveJob(), true, false, null, false, null, 0 );
}
public void copyJob() {
copyJob( getActiveJob() );
}
public void showTips() {
new TipsDialog( shell ).open();
}
public void showWelcomePage() {
try {
LocationListener listener = new LocationListener() {
public void changing( LocationEvent event ) {
if ( event.location.endsWith( ".pdf" ) ) {
Program.launch( event.location );
event.doit = false;
} else if ( event.location.contains( "samples/transformations" )
|| event.location.contains( "samples/jobs" ) || event.location.contains( "samples/mapping" ) ) {
try {
FileObject fileObject = KettleVFS.getFileObject( event.location );
if ( fileObject.exists() ) {
if ( event.location.endsWith( ".ktr" ) || event.location.endsWith( ".kjb" ) ) {
openFile( event.location, false );
} else {
lastDirOpened = KettleVFS.getFilename( fileObject );
openFile( true );
}
event.doit = false;
}
} catch ( Exception e ) {
log.logError( "Error handling samples location: " + event.location, e );
}
}
}
public void changed( LocationEvent event ) {
// System.out.println("Changed to: " + event.location);
}
};
// see if we are in webstart mode
String webstartRoot = System.getProperty( "spoon.webstartroot" );
if ( webstartRoot != null ) {
URL url = new URL( webstartRoot + '/' + FILE_WELCOME_PAGE );
addSpoonBrowser( STRING_WELCOME_TAB_NAME, url.toString(), listener ); // ./docs/English/tips/index.htm
} else {
// see if we can find the welcome file on the file system
File file = new File( FILE_WELCOME_PAGE );
if ( file.exists() ) {
// ./docs/English/tips/index.htm
addSpoonBrowser( STRING_WELCOME_TAB_NAME, file.toURI().toURL().toString(), listener );
}
}
} catch ( MalformedURLException e1 ) {
log.logError( Const.getStackTracker( e1 ) );
}
}
public void showDocumentMap() {
try {
LocationListener listener = new LocationListener() {
public void changing( LocationEvent event ) {
if ( event.location.endsWith( ".pdf" ) ) {
Program.launch( event.location );
event.doit = false;
}
}
public void changed( LocationEvent event ) {
System.out.println( "Changed to: " + event.location );
}
};
// see if we are in webstart mode
String webstartRoot = System.getProperty( "spoon.webstartroot" );
if ( webstartRoot != null ) {
URL url = new URL( webstartRoot + '/' + FILE_DOCUMENT_MAP );
addSpoonBrowser( STRING_DOCUMENT_TAB_NAME, url.toString(), listener ); // ./docs/English/tips/index.htm
} else {
// see if we can find the welcome file on the file system
File file = new File( FILE_DOCUMENT_MAP );
if ( file.exists() ) {
// ./docs/English/tips/index.htm
addSpoonBrowser( STRING_DOCUMENT_TAB_NAME, file.toURI().toURL().toString(), listener );
}
}
} catch ( MalformedURLException e1 ) {
log.logError( Const.getStackTracker( e1 ) );
}
}
public void addMenuLast() {
org.pentaho.ui.xul.dom.Document doc = mainSpoonContainer.getDocumentRoot();
JfaceMenupopup recentFilesPopup = (JfaceMenupopup) doc.getElementById( "file-open-recent-popup" );
recentFilesPopup.removeChildren();
// Previously loaded files...
List<LastUsedFile> lastUsedFiles = props.getLastUsedFiles();
for ( int i = 0; i < lastUsedFiles.size(); i++ ) {
final LastUsedFile lastUsedFile = lastUsedFiles.get( i );
char chr = (char) ( '1' + i );
String accessKey = "ctrl-" + chr;
String accessText = "CTRL-" + chr;
String text = lastUsedFile.toString();
String id = "last-file-" + i;
if ( i > 8 ) {
accessKey = null;
accessText = null;
}
final String lastFileId = Integer.toString( i );
Action action = new Action( "open-last-file-" + ( i + 1 ), Action.AS_DROP_DOWN_MENU ) {
public void run() {
lastFileSelect( lastFileId );
}
};
// shorten the filename if necessary
int targetLength = 40;
if ( text.length() > targetLength ) {
int lastSep = text.replace( '\\', '/' ).lastIndexOf( '/' );
if ( lastSep != -1 ) {
String fileName = "..." + text.substring( lastSep );
if ( fileName.length() < targetLength ) {
// add the start of the file path
int leadSize = targetLength - fileName.length();
text = text.substring( 0, leadSize ) + fileName;
} else {
text = fileName;
}
}
}
JfaceMenuitem miFileLast = new JfaceMenuitem( null, recentFilesPopup, mainSpoonContainer, text, 0, action );
miFileLast.setLabel( text );
miFileLast.setId( id );
if ( accessText != null && accessKey != null ) {
miFileLast.setAcceltext( accessText );
miFileLast.setAccesskey( accessKey );
}
if ( lastUsedFile.isTransformation() ) {
miFileLast.setImage( GUIResource.getInstance().getImageTransGraph() );
} else if ( lastUsedFile.isJob() ) {
miFileLast.setImage( GUIResource.getInstance().getImageJobGraph() );
}
miFileLast.setCommand( "spoon.lastFileSelect('" + i + "')" );
}
}
public void lastFileSelect( String id ) {
int idx = Integer.parseInt( id );
List<LastUsedFile> lastUsedFiles = props.getLastUsedFiles();
final LastUsedFile lastUsedFile = lastUsedFiles.get( idx );
// If the file comes from a repository and it's not the same as
// the one we're connected to, ask for a username/password!
//
if ( lastUsedFile.isSourceRepository()
&& ( rep == null || !rep.getName().equalsIgnoreCase( lastUsedFile.getRepositoryName() ) ) ) {
// Ask for a username password to get the required repository access
//
loginDialog = new RepositoriesDialog( shell, lastUsedFile.getRepositoryName(), new ILoginCallback() {
public void onSuccess( Repository repository ) {
// Close the previous connection...
if ( rep != null ) {
rep.disconnect();
SpoonPluginManager
.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED );
}
setRepository( repository );
try {
loadLastUsedFile( lastUsedFile, rep == null ? null : rep.getName() );
addMenuLast();
} catch ( KettleException ke ) {
// "Error loading transformation", "I was unable to load this
// transformation from the
// XML file because of an error"
new ErrorDialog( loginDialog.getShell(),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Message" ), ke );
}
}
public void onError( Throwable t ) {
onLoginError( t );
}
public void onCancel() {
}
} );
loginDialog.show();
} else if ( !lastUsedFile.isSourceRepository() ) {
// This file must have been on the file system.
openFile( lastUsedFile.getFilename(), false );
} else {
// read from a repository...
//
try {
loadLastUsedFile( lastUsedFile, rep == null ? null : rep.getName() );
addMenuLast();
} catch ( KettleException ke ) {
// "Error loading transformation", "I was unable to load this
// transformation from the
// XML file because of an error"
new ErrorDialog( loginDialog.getShell(),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.LoadTransformationError.Message" ), ke );
}
}
}
private void addTree() {
mainComposite = new Composite( sashform, SWT.BORDER );
mainComposite.setLayout( new FormLayout() );
// int mainMargin = 4;
// TODO: add i18n keys
//
Label sep0 = new Label( mainComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
sep0.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdSep0 = new FormData();
fdSep0.left = new FormAttachment( 0, 0 );
fdSep0.right = new FormAttachment( 100, 0 );
fdSep0.top = new FormAttachment( 0, 0 );
sep0.setLayoutData( fdSep0 );
// empty panel to correct background color.
Composite tabWrapper = new Composite( mainComposite, SWT.NONE );
tabWrapper.setLayout( new FormLayout() );
tabWrapper.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdTabWrapper = new FormData();
fdTabWrapper.left = new FormAttachment( 0, 0 );
fdTabWrapper.top = new FormAttachment( sep0, 0 );
fdTabWrapper.right = new FormAttachment( 100, 0 );
tabWrapper.setLayoutData( fdTabWrapper );
CTabFolder tabFolder = new CTabFolder( tabWrapper, SWT.HORIZONTAL | SWT.FLAT );
tabFolder.setSimple( false ); // Set simple what!!?? Well it sets the style of
// the tab folder to simple or stylish (curvy
// borders)
tabFolder.setBackground( GUIResource.getInstance().getColorWhite() );
tabFolder.setBorderVisible( false );
tabFolder.setSelectionBackground( new Color[] {
display.getSystemColor( SWT.COLOR_WIDGET_NORMAL_SHADOW ),
display.getSystemColor( SWT.COLOR_WIDGET_LIGHT_SHADOW ), }, new int[] { 55, }, true );
FormData fdTab = new FormData();
fdTab.left = new FormAttachment( 0, 0 );
fdTab.top = new FormAttachment( sep0, 0 );
fdTab.right = new FormAttachment( 100, 0 );
fdTab.height = 0;
tabFolder.setLayoutData( fdTab );
view = new CTabItem( tabFolder, SWT.NONE );
view.setControl( new Composite( tabFolder, SWT.NONE ) );
view.setText( STRING_SPOON_MAIN_TREE );
view.setImage( GUIResource.getInstance().getImageExploreSolutionSmall() );
design = new CTabItem( tabFolder, SWT.NONE );
design.setText( STRING_SPOON_CORE_OBJECTS_TREE );
design.setControl( new Composite( tabFolder, SWT.NONE ) );
design.setImage( GUIResource.getInstance().getImageEditSmall() );
Label sep3 = new Label( mainComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
sep3.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdSep3 = new FormData();
fdSep3.left = new FormAttachment( 0, 0 );
fdSep3.right = new FormAttachment( 100, 0 );
fdSep3.top = new FormAttachment( tabWrapper, 0 );
sep3.setLayoutData( fdSep3 );
selectionLabel = new Label( mainComposite, SWT.HORIZONTAL );
FormData fdsLabel = new FormData();
fdsLabel.left = new FormAttachment( 0, 0 );
fdsLabel.top = new FormAttachment( sep3, 5 );
selectionLabel.setLayoutData( fdsLabel );
ToolBar treeTb = new ToolBar( mainComposite, SWT.HORIZONTAL | SWT.FLAT | SWT.BORDER );
/*
This contains a map with all the unnamed transformation (just a filename)
*/
ToolItem expandAll = new ToolItem( treeTb, SWT.PUSH );
expandAll.setImage( GUIResource.getInstance().getImageExpandAll() );
ToolItem collapseAll = new ToolItem( treeTb, SWT.PUSH );
collapseAll.setImage( GUIResource.getInstance().getImageCollapseAll() );
FormData fdTreeToolbar = new FormData();
fdTreeToolbar.top = new FormAttachment( sep3, 0 );
fdTreeToolbar.right = new FormAttachment( 95, 5 );
treeTb.setLayoutData( fdTreeToolbar );
selectionFilter =
new Text( mainComposite, SWT.SINGLE
| SWT.BORDER | SWT.LEFT | SWT.SEARCH | SWT.ICON_SEARCH | SWT.ICON_CANCEL );
selectionFilter.setToolTipText( BaseMessages.getString( PKG, "Spoon.SelectionFilter.Tooltip" ) );
FormData fdSelectionFilter = new FormData();
fdSelectionFilter.top =
new FormAttachment( treeTb, -( GUIResource.getInstance().getImageExpandAll().getBounds().height + 5 ) );
fdSelectionFilter.right = new FormAttachment( 95, -55 );
fdSelectionFilter.left = new FormAttachment( selectionLabel, 10 );
selectionFilter.setLayoutData( fdSelectionFilter );
selectionFilter.addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent arg0 ) {
if ( coreObjectsTree != null && !coreObjectsTree.isDisposed() ) {
previousShowTrans = false;
previousShowJob = false;
refreshCoreObjects();
if ( !Const.isEmpty( selectionFilter.getText() ) ) {
tidyBranches( coreObjectsTree.getItems(), true ); // expand all
} else { // no filter: collapse all
tidyBranches( coreObjectsTree.getItems(), false );
}
}
if ( selectionTree != null && !selectionTree.isDisposed() ) {
refreshTree();
if ( !Const.isEmpty( selectionFilter.getText() ) ) {
tidyBranches( selectionTree.getItems(), true ); // expand all
} else { // no filter: collapse all
tidyBranches( selectionTree.getItems(), false );
}
selectionFilter.setFocus();
}
}
} );
expandAll.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
if ( designSelected ) {
tidyBranches( coreObjectsTree.getItems(), true );
}
if ( viewSelected ) {
tidyBranches( selectionTree.getItems(), true );
}
}
} );
collapseAll.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
if ( designSelected ) {
tidyBranches( coreObjectsTree.getItems(), false );
}
if ( viewSelected ) {
tidyBranches( selectionTree.getItems(), false );
}
}
} );
tabFolder.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent arg0 ) {
if ( arg0.item == view ) {
setViewMode();
} else {
setDesignMode();
}
}
} );
Label sep4 = new Label( mainComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
sep4.setBackground( GUIResource.getInstance().getColorWhite() );
FormData fdSep4 = new FormData();
fdSep4.left = new FormAttachment( 0, 0 );
fdSep4.right = new FormAttachment( 100, 0 );
fdSep4.top = new FormAttachment( treeTb, 5 );
sep4.setLayoutData( fdSep4 );
variableComposite = new Composite( mainComposite, SWT.NONE );
variableComposite.setBackground( GUIResource.getInstance().getColorBackground() );
variableComposite.setLayout( new FillLayout() );
FormData fdVariableComposite = new FormData();
fdVariableComposite.left = new FormAttachment( 0, 0 );
fdVariableComposite.right = new FormAttachment( 100, 0 );
fdVariableComposite.top = new FormAttachment( sep4, 0 );
fdVariableComposite.bottom = new FormAttachment( 100, 0 );
variableComposite.setLayoutData( fdVariableComposite );
disposeVariableComposite( true, false, false, false );
coreStepToolTipMap = new Hashtable<String, String>();
coreJobToolTipMap = new Hashtable<String, String>();
addDefaultKeyListeners( tabFolder );
addDefaultKeyListeners( mainComposite );
}
public void addDefaultKeyListeners( Control control ) {
control.addKeyListener( new KeyAdapter() {
@Override
public void keyPressed( KeyEvent e ) {
// CTRL-W or CTRL-F4 : close tab
//
if ( ( e.keyCode == 'w' && ( e.stateMask & SWT.CONTROL ) != 0 )
|| ( e.keyCode == SWT.F4 && ( e.stateMask & SWT.CONTROL ) != 0 ) ) {
closeFile();
}
// CTRL-F5 : metastore explorer
//
if ( e.keyCode == SWT.F5 && ( e.stateMask & SWT.CONTROL ) != 0 ) {
new MetaStoreExplorerDialog( shell, metaStore ).open();
}
}
} );
}
public boolean setViewMode() {
if ( viewSelected ) {
return true;
}
selectionFilter.setText( "" ); // reset filter when switched to view
disposeVariableComposite( true, false, false, false );
refreshTree();
return false;
}
public boolean setDesignMode() {
if ( designSelected ) {
return true;
}
selectionFilter.setText( "" ); // reset filter when switched to design
disposeVariableComposite( false, false, true, false );
refreshCoreObjects();
return false;
}
private void tidyBranches( TreeItem[] items, boolean expand ) {
for ( TreeItem item : items ) {
item.setExpanded( expand );
tidyBranches( item.getItems(), expand );
}
}
public void disposeVariableComposite( boolean tree, boolean shared, boolean core, boolean history ) {
viewSelected = tree;
view.getParent().setSelection( viewSelected ? view : design );
designSelected = core;
// historySelected = history;
// sharedSelected = shared;
for ( Control control : variableComposite.getChildren() ) {
// PDI-1247 - these menus are coded for reuse, so make sure
// they don't get disposed of here (alert: dirty design)
if ( control instanceof Tree ) {
( control ).setMenu( null );
}
control.dispose();
}
previousShowTrans = false;
previousShowJob = false;
// stepHistoryChanged=true;
selectionLabel.setText( tree ? BaseMessages.getString( PKG, "Spoon.Explorer" ) : BaseMessages.getString(
PKG, "Spoon.Steps" ) );
}
public void addCoreObjectsTree() {
// Now create a new expand bar inside that item
// We're going to put the core object in there
//
coreObjectsTree = new Tree( variableComposite, SWT.V_SCROLL | SWT.SINGLE );
props.setLook( coreObjectsTree );
coreObjectsTree.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
// expand the selected tree item, collapse the rest
//
if ( props.getAutoCollapseCoreObjectsTree() ) {
TreeItem[] selection = coreObjectsTree.getSelection();
if ( selection.length == 1 ) {
// expand if clicked on the the top level entry only...
//
TreeItem top = selection[0];
while ( top.getParentItem() != null ) {
top = top.getParentItem();
}
if ( top == selection[0] ) {
boolean expanded = top.getExpanded();
for ( TreeItem item : coreObjectsTree.getItems() ) {
item.setExpanded( false );
}
top.setExpanded( !expanded );
}
}
}
}
} );
coreObjectsTree.addTreeListener( new TreeAdapter() {
public void treeExpanded( TreeEvent treeEvent ) {
if ( props.getAutoCollapseCoreObjectsTree() ) {
TreeItem treeItem = (TreeItem) treeEvent.item;
/*
* Trick for WSWT on Windows systems: a SelectionEvent is called after the TreeEvent if setSelection() is not
* used here. Otherwise the first item in the list is selected as default and collapsed again but wrong, see
* PDI-1480
*/
coreObjectsTree.setSelection( treeItem );
// expand the selected tree item, collapse the rest
//
for ( TreeItem item : coreObjectsTree.getItems() ) {
if ( item != treeItem ) {
item.setExpanded( false );
} else {
treeItem.setExpanded( true );
}
}
}
}
} );
coreObjectsTree.addMouseMoveListener( new MouseMoveListener() {
public void mouseMove( MouseEvent move ) {
// don't show tooltips in the tree if the option is not set
if ( !getProperties().showToolTips() ) {
return;
}
toolTip.hide();
TreeItem item = searchMouseOverTreeItem( coreObjectsTree.getItems(), move.x, move.y );
if ( item != null ) {
String name = item.getText();
String tip = coreStepToolTipMap.get( name );
if ( tip != null ) {
PluginInterface plugin = PluginRegistry.getInstance().findPluginWithName( StepPluginType.class, name );
if ( plugin != null ) {
Image image = GUIResource.getInstance().getImagesSteps().get( plugin.getIds()[0] );
if ( image == null ) {
toolTip.hide();
}
toolTip.setImage( image );
toolTip.setText( name + Const.CR + Const.CR + tip );
toolTip.show( new org.eclipse.swt.graphics.Point( move.x + 10, move.y + 10 ) );
}
}
tip = coreJobToolTipMap.get( name );
if ( tip != null ) {
PluginInterface plugin =
PluginRegistry.getInstance().findPluginWithName( JobEntryPluginType.class, name );
if ( plugin != null ) {
Image image = GUIResource.getInstance().getImagesJobentries().get( plugin.getIds()[0] );
toolTip.setImage( image );
toolTip.setText( name + Const.CR + Const.CR + tip );
toolTip.show( new org.eclipse.swt.graphics.Point( move.x + 10, move.y + 10 ) );
}
}
}
}
} );
addDragSourceToTree( coreObjectsTree );
addDefaultKeyListeners( coreObjectsTree );
coreObjectsTree.addMouseListener( new MouseAdapter() {
@Override
public void mouseDoubleClick( MouseEvent event ) {
boolean shift = ( event.stateMask & SWT.SHIFT ) != 0;
doubleClickedInTree( coreObjectsTree, shift );
}
} );
toolTip = new DefaultToolTip( variableComposite, ToolTip.RECREATE, true );
toolTip.setRespectMonitorBounds( true );
toolTip.setRespectDisplayBounds( true );
toolTip.setPopupDelay( 350 );
toolTip.setHideDelay( 5000 );
toolTip.setShift( new org.eclipse.swt.graphics.Point( ConstUI.TOOLTIP_OFFSET, ConstUI.TOOLTIP_OFFSET ) );
}
protected TreeItem searchMouseOverTreeItem( TreeItem[] treeItems, int x, int y ) {
for ( TreeItem treeItem : treeItems ) {
if ( treeItem.getBounds().contains( x, y ) ) {
return treeItem;
}
if ( treeItem.getItemCount() > 0 ) {
treeItem = searchMouseOverTreeItem( treeItem.getItems(), x, y );
if ( treeItem != null ) {
return treeItem;
}
}
}
return null;
}
private boolean previousShowTrans;
private boolean previousShowJob;
public boolean showTrans;
public boolean showJob;
public void refreshCoreObjects() {
if ( shell.isDisposed() ) {
return;
}
if ( !designSelected ) {
return;
}
if ( coreObjectsTree == null || coreObjectsTree.isDisposed() ) {
addCoreObjectsTree();
}
showTrans = getActiveTransformation() != null;
showJob = getActiveJob() != null;
if ( showTrans == previousShowTrans && showJob == previousShowJob ) {
return;
}
// First remove all the entries that where present...
//
TreeItem[] expandItems = coreObjectsTree.getItems();
for ( TreeItem item : expandItems ) {
item.dispose();
}
if ( showTrans ) {
selectionLabel.setText( BaseMessages.getString( PKG, "Spoon.Steps" ) );
// Fill the base components...
//
// ////////////////////////////////////////////////////////////////////////////////////////////////
// TRANSFORMATIONS
// ////////////////////////////////////////////////////////////////////////////////////////////////
PluginRegistry registry = PluginRegistry.getInstance();
final List<PluginInterface> baseSteps = registry.getPlugins( StepPluginType.class );
final List<String> baseCategories = registry.getCategories( StepPluginType.class );
for ( String baseCategory : baseCategories ) {
TreeItem item = new TreeItem( coreObjectsTree, SWT.NONE );
item.setText( baseCategory );
item.setImage( GUIResource.getInstance().getImageArrow() );
for ( PluginInterface baseStep : baseSteps ) {
if ( baseStep.getCategory().equalsIgnoreCase( baseCategory ) ) {
final Image stepImage =
GUIResource.getInstance().getImagesStepsSmall().get( baseStep.getIds()[ 0 ] );
String pluginName = baseStep.getName();
String pluginDescription = baseStep.getDescription();
if ( !filterMatch( pluginName ) && !filterMatch( pluginDescription ) ) {
continue;
}
TreeItem stepItem = new TreeItem( item, SWT.NONE );
stepItem.setImage( stepImage );
stepItem.setText( pluginName );
stepItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event event ) {
System.out.println( "Tree item Listener fired" );
}
} );
coreStepToolTipMap.put( pluginName, pluginDescription );
}
}
}
// Add History Items...
TreeItem item = new TreeItem( coreObjectsTree, SWT.NONE );
item.setText( BaseMessages.getString( PKG, "Spoon.History" ) );
item.setImage( GUIResource.getInstance().getImageArrow() );
List<ObjectUsageCount> pluginHistory = props.getPluginHistory();
// The top 10 at most, the rest is not interesting anyway
//
for ( int i = 0; i < pluginHistory.size() && i < 10; i++ ) {
ObjectUsageCount usage = pluginHistory.get( i );
PluginInterface stepPlugin =
PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, usage.getObjectName() );
if ( stepPlugin != null ) {
final Image stepImage = GUIResource.getInstance().getImagesSteps().get( stepPlugin.getIds()[0] );
String pluginName = Const.NVL( stepPlugin.getName(), "" );
String pluginDescription = Const.NVL( stepPlugin.getDescription(), "" );
if ( !filterMatch( pluginName ) && !filterMatch( pluginDescription ) ) {
continue;
}
TreeItem stepItem = new TreeItem( item, SWT.NONE );
stepItem.setImage( stepImage );
stepItem.setText( pluginName );
stepItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event event ) {
System.out.println( "Tree item Listener fired" );
}
} );
coreStepToolTipMap.put( stepPlugin.getDescription(), pluginDescription + " (" + usage.getNrUses() + ")" );
}
}
}
if ( showJob ) {
// Fill the base components...
//
// ////////////////////////////////////////////////////////////////////////////////////////////////
// JOBS
// ////////////////////////////////////////////////////////////////////////////////////////////////
selectionLabel.setText( BaseMessages.getString( PKG, "Spoon.Entries" ) );
PluginRegistry registry = PluginRegistry.getInstance();
List<PluginInterface> baseJobEntries = registry.getPlugins( JobEntryPluginType.class );
List<String> baseCategories = registry.getCategories( JobEntryPluginType.class );
TreeItem generalItem = null;
for ( String baseCategory : baseCategories ) {
TreeItem item = new TreeItem( coreObjectsTree, SWT.NONE );
item.setText( baseCategory );
item.setImage( GUIResource.getInstance().getImageArrow() );
if ( baseCategory.equalsIgnoreCase( JobEntryPluginType.GENERAL_CATEGORY ) ) {
generalItem = item;
}
for ( int j = 0; j < baseJobEntries.size(); j++ ) {
if ( !baseJobEntries.get( j ).getIds()[ 0 ].equals( "SPECIAL" ) ) {
if ( baseJobEntries.get( j ).getCategory().equalsIgnoreCase( baseCategory ) ) {
final Image jobEntryImage =
GUIResource.getInstance().getImagesJobentriesSmall().get( baseJobEntries.get( j ).getIds()[ 0 ] );
String pluginName = Const.NVL( baseJobEntries.get( j ).getName(), "" );
String pluginDescription = Const.NVL( baseJobEntries.get( j ).getDescription(), "" );
if ( !filterMatch( pluginName ) && !filterMatch( pluginDescription ) ) {
continue;
}
TreeItem stepItem = new TreeItem( item, SWT.NONE );
stepItem.setImage( jobEntryImage );
stepItem.setText( pluginName );
stepItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event arg0 ) {
System.out.println( "Tree item Listener fired" );
}
} );
// if (isPlugin)
// stepItem.setFont(GUIResource.getInstance().getFontBold());
coreJobToolTipMap.put( pluginName, pluginDescription );
}
}
}
}
// First add a few "Special entries: Start, Dummy, OK, ERROR
// We add these to the top of the base category, we don't care about
// the sort order here.
//
JobEntryCopy startEntry = JobMeta.createStartEntry();
JobEntryCopy dummyEntry = JobMeta.createDummyEntry();
String[] specialText = new String[] { startEntry.getName(), dummyEntry.getName(), };
String[] specialTooltip = new String[] { startEntry.getDescription(), dummyEntry.getDescription(), };
Image[] specialImage =
new Image[] {
GUIResource.getInstance().getImageStartSmall(), GUIResource.getInstance().getImageDummySmall() };
for ( int i = 0; i < specialText.length; i++ ) {
TreeItem specialItem = new TreeItem( generalItem, SWT.NONE, i );
specialItem.setImage( specialImage[i] );
specialItem.setText( specialText[i] );
specialItem.addListener( SWT.Selection, new Listener() {
public void handleEvent( Event arg0 ) {
System.out.println( "Tree item Listener fired" );
}
} );
coreJobToolTipMap.put( specialText[i], specialTooltip[i] );
}
}
variableComposite.layout( true, true );
previousShowTrans = showTrans;
previousShowJob = showJob;
}
protected void shareObject( SharedObjectInterface sharedObject ) {
sharedObject.setShared( true );
EngineMetaInterface meta = getActiveMeta();
try {
if ( meta != null ) {
SharedObjects sharedObjects = null;
if ( meta instanceof TransMeta ) {
sharedObjects = ( (TransMeta) meta ).getSharedObjects();
}
if ( meta instanceof JobMeta ) {
sharedObjects = ( (JobMeta) meta ).getSharedObjects();
}
if ( sharedObjects != null ) {
sharedObjects.storeObject( sharedObject );
sharedObjects.saveToFile();
}
}
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Message" ), e );
}
refreshTree();
}
protected void unShareObject( SharedObjectInterface sharedObject ) {
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.ICON_WARNING );
// "Are you sure you want to stop sharing?"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.StopSharing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.StopSharing.Title" ) ); // Warning!
int answer = mb.open();
if ( answer == SWT.YES ) {
sharedObject.setShared( false );
EngineMetaInterface meta = getActiveMeta();
try {
if ( meta != null ) {
SharedObjects sharedObjects = null;
if ( meta instanceof TransMeta ) {
sharedObjects = ( (TransMeta) meta ).getSharedObjects();
}
if ( meta instanceof JobMeta ) {
sharedObjects = ( (JobMeta) meta ).getSharedObjects();
}
if ( sharedObjects != null ) {
sharedObjects.removeObject( sharedObject );
sharedObjects.saveToFile();
}
}
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorWritingSharedObjects.Message" ), e );
}
refreshTree();
}
}
/**
* @return The object that is selected in the tree or null if we couldn't figure it out. (titles etc. == null)
*/
public TreeSelection[] getTreeObjects( final Tree tree ) {
return delegates.tree.getTreeObjects( tree, selectionTree, coreObjectsTree );
}
private void addDragSourceToTree( final Tree tree ) {
delegates.tree.addDragSourceToTree( tree, selectionTree, coreObjectsTree );
}
public void hideToolTips() {
if ( toolTip != null ) {
toolTip.hide();
}
}
/**
* If you click in the tree, you might want to show the corresponding window.
*/
public void showSelection() {
TreeSelection[] objects = getTreeObjects( selectionTree );
if ( objects.length != 1 ) {
return; // not yet supported, we can do this later when the OSX bug
// goes away
}
TreeSelection object = objects[0];
final Object selection = object.getSelection();
final Object parent = object.getParent();
TransMeta transMeta = null;
if ( selection instanceof TransMeta ) {
transMeta = (TransMeta) selection;
}
if ( parent instanceof TransMeta ) {
transMeta = (TransMeta) parent;
}
if ( transMeta != null ) {
TabMapEntry entry = delegates.tabs.findTabMapEntry( transMeta );
if ( entry != null ) {
int current = tabfolder.getSelectedIndex();
int desired = tabfolder.indexOf( entry.getTabItem() );
if ( current != desired ) {
tabfolder.setSelected( desired );
}
transMeta.setInternalKettleVariables();
if ( getCoreObjectsState() != STATE_CORE_OBJECTS_SPOON ) {
// Switch the core objects in the lower left corner to the
// spoon trans types
refreshCoreObjects();
}
}
}
JobMeta jobMeta = null;
if ( selection instanceof JobMeta ) {
jobMeta = (JobMeta) selection;
}
if ( parent instanceof JobMeta ) {
jobMeta = (JobMeta) parent;
}
if ( jobMeta != null ) {
TabMapEntry entry = delegates.tabs.findTabMapEntry( transMeta );
if ( entry != null ) {
int current = tabfolder.getSelectedIndex();
int desired = tabfolder.indexOf( entry.getTabItem() );
if ( current != desired ) {
tabfolder.setSelected( desired );
}
jobMeta.setInternalKettleVariables();
if ( getCoreObjectsState() != STATE_CORE_OBJECTS_CHEF ) {
// Switch the core objects in the lower left corner to the
// spoon job types
//
refreshCoreObjects();
}
}
}
}
private Object selectionObjectParent = null;
private Object selectionObject = null;
public void newHop() {
newHop( (TransMeta) selectionObjectParent );
}
public void sortHops() {
( (TransMeta) selectionObjectParent ).sortHops();
refreshTree();
}
public void newDatabasePartitioningSchema() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
newPartitioningSchema( transMeta );
}
}
public void newClusteringSchema() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
newClusteringSchema( transMeta );
}
}
public void newSlaveServer() {
newSlaveServer( (HasSlaveServersInterface) selectionObjectParent );
}
public void editTransformationPropertiesPopup() {
TransGraph.editProperties( (TransMeta) selectionObject, this, rep, true );
}
public void addTransLog() {
TransGraph activeTransGraph = getActiveTransGraph();
if ( activeTransGraph != null ) {
activeTransGraph.transLogDelegate.addTransLog();
activeTransGraph.transGridDelegate.addTransGrid();
}
}
public void addTransHistory() {
TransGraph activeTransGraph = getActiveTransGraph();
if ( activeTransGraph != null ) {
activeTransGraph.transHistoryDelegate.addTransHistory();
}
}
public boolean editJobProperties( String id ) {
if ( "job-settings".equals( id ) ) {
return JobGraph.editProperties( getActiveJob(), this, rep, true );
} else if ( "job-inst-settings".equals( id ) ) {
return JobGraph.editProperties( (JobMeta) selectionObject, this, rep, true );
}
return false;
}
public void editJobPropertiesPopup() {
JobGraph.editProperties( (JobMeta) selectionObject, this, rep, true );
}
public void addJobLog() {
JobGraph activeJobGraph = getActiveJobGraph();
if ( activeJobGraph != null ) {
activeJobGraph.jobLogDelegate.addJobLog();
activeJobGraph.jobGridDelegate.addJobGrid();
}
}
public void addJobHistory() {
addJobHistory( (JobMeta) selectionObject, true );
}
public void newStep() {
newStep( getActiveTransformation() );
}
public void editConnection() {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_DATABASE ) ) {
return;
}
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.editConnection( databaseMeta );
}
public void dupeConnection() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
final HasDatabasesInterface hasDatabasesInterface = (HasDatabasesInterface) selectionObjectParent;
delegates.db.dupeConnection( hasDatabasesInterface, databaseMeta );
}
public void clipConnection() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.clipConnection( databaseMeta );
}
public void delConnection() {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.DELETE_DATABASE ) ) {
return;
}
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION );
mb.setMessage( BaseMessages.getString(
PKG, "Spoon.ExploreDB.DeleteConnectionAsk.Message", databaseMeta.getName() ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.ExploreDB.DeleteConnectionAsk.Title" ) );
int response = mb.open();
if ( response != SWT.YES ) {
return;
}
final HasDatabasesInterface hasDatabasesInterface = (HasDatabasesInterface) selectionObjectParent;
delegates.db.delConnection( hasDatabasesInterface, databaseMeta );
}
public void sqlConnection() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.sqlConnection( databaseMeta );
}
public void clearDBCache( String id ) {
if ( "database-class-clear-cache".equals( id ) ) {
delegates.db.clearDBCache( null );
}
if ( "database-inst-clear-cache".equals( id ) ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.clearDBCache( databaseMeta );
}
}
public void exploreDatabase() {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.EXPLORE_DATABASE ) ) {
return;
}
// Show a minimal window to allow you to quickly select the database
// connection to explore
//
List<DatabaseMeta> databases = new ArrayList<DatabaseMeta>();
// First load the connections from the loaded file
//
HasDatabasesInterface databasesInterface = getActiveHasDatabasesInterface();
if ( databasesInterface != null ) {
databases.addAll( databasesInterface.getDatabases() );
}
// Overwrite the information with the connections from the repository
//
if ( rep != null ) {
try {
List<DatabaseMeta> list = rep.readDatabases();
for ( DatabaseMeta databaseMeta : list ) {
int index = databases.indexOf( databaseMeta );
if ( index < 0 ) {
databases.add( databaseMeta );
} else {
databases.set( index, databaseMeta );
}
}
} catch ( KettleException e ) {
log.logError( "Unexpected repository error", e.getMessage() );
}
}
if ( databases.size() == 0 ) {
return;
}
// OK, get a list of all the database names...
//
String[] databaseNames = new String[databases.size()];
for ( int i = 0; i < databases.size(); i++ ) {
databaseNames[i] = databases.get( i ).getName();
}
// show the shell...
//
EnterSelectionDialog dialog = new EnterSelectionDialog( shell, databaseNames,
BaseMessages.getString( PKG, "Spoon.ExploreDB.SelectDB.Title" ),
BaseMessages.getString( PKG, "Spoon.ExploreDB.SelectDB.Message" ) );
String name = dialog.open();
if ( name != null ) {
selectionObject = DatabaseMeta.findDatabase( databases, name );
exploreDB();
}
}
public void exploreDB() {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
delegates.db.exploreDB( databaseMeta, true );
}
public void editStep() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final StepMeta stepMeta = (StepMeta) selectionObject;
delegates.steps.editStep( transMeta, stepMeta );
}
public void dupeStep() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final StepMeta stepMeta = (StepMeta) selectionObject;
delegates.steps.dupeStep( transMeta, stepMeta );
}
public void delStep() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final StepMeta stepMeta = (StepMeta) selectionObject;
delegates.steps.delStep( transMeta, stepMeta );
}
public void helpStep() {
final StepMeta stepMeta = (StepMeta) selectionObject;
PluginInterface stepPlugin =
PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, stepMeta.getStepID() );
HelpUtils.openHelpDialog( shell, stepPlugin );
}
public void shareObject( String id ) {
if ( "database-inst-share".equals( id ) ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
if ( databaseMeta.isShared() ) {
unShareObject( databaseMeta );
} else {
shareObject( databaseMeta );
}
}
if ( "step-inst-share".equals( id ) ) {
final StepMeta stepMeta = (StepMeta) selectionObject;
shareObject( stepMeta );
}
if ( "partition-schema-inst-share".equals( id ) ) {
final PartitionSchema partitionSchema = (PartitionSchema) selectionObject;
shareObject( partitionSchema );
}
if ( "cluster-schema-inst-share".equals( id ) ) {
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
shareObject( clusterSchema );
}
if ( "slave-server-inst-share".equals( id ) ) {
final SlaveServer slaveServer = (SlaveServer) selectionObject;
shareObject( slaveServer );
}
}
public void editJobEntry() {
final JobMeta jobMeta = (JobMeta) selectionObjectParent;
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
editJobEntry( jobMeta, jobEntry );
}
public void dupeJobEntry() {
final JobMeta jobMeta = (JobMeta) selectionObjectParent;
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
delegates.jobs.dupeJobEntry( jobMeta, jobEntry );
}
public void deleteJobEntryCopies() {
final JobMeta jobMeta = (JobMeta) selectionObjectParent;
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
deleteJobEntryCopies( jobMeta, jobEntry );
}
public void helpJobEntry() {
final JobEntryCopy jobEntry = (JobEntryCopy) selectionObject;
String jobName = jobEntry.getName();
PluginInterface jobEntryPlugin =
PluginRegistry.getInstance().findPluginWithName( JobEntryPluginType.class, jobName );
HelpUtils.openHelpDialog( shell, jobEntryPlugin );
}
public void editHop() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final TransHopMeta transHopMeta = (TransHopMeta) selectionObject;
editHop( transMeta, transHopMeta );
}
public void delHop() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final TransHopMeta transHopMeta = (TransHopMeta) selectionObject;
delHop( transMeta, transHopMeta );
}
public void editPartitionSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final PartitionSchema partitionSchema = (PartitionSchema) selectionObject;
editPartitionSchema( transMeta, partitionSchema );
}
public void delPartitionSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final PartitionSchema partitionSchema = (PartitionSchema) selectionObject;
delPartitionSchema( transMeta, partitionSchema );
}
public void editClusterSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
editClusterSchema( transMeta, clusterSchema );
}
public void delClusterSchema() {
final TransMeta transMeta = (TransMeta) selectionObjectParent;
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
delClusterSchema( transMeta, clusterSchema );
}
public void monitorClusterSchema() throws KettleException {
final ClusterSchema clusterSchema = (ClusterSchema) selectionObject;
monitorClusterSchema( clusterSchema );
}
public void editSlaveServer() {
final SlaveServer slaveServer = (SlaveServer) selectionObject;
editSlaveServer( slaveServer );
}
public void delSlaveServer() {
final HasSlaveServersInterface hasSlaveServersInterface = (HasSlaveServersInterface) selectionObjectParent;
final SlaveServer slaveServer = (SlaveServer) selectionObject;
delSlaveServer( hasSlaveServersInterface, slaveServer );
}
public void addSpoonSlave() {
final SlaveServer slaveServer = (SlaveServer) selectionObject;
addSpoonSlave( slaveServer );
}
private synchronized void setMenu( Tree tree ) {
TreeSelection[] objects = getTreeObjects( tree );
if ( objects.length != 1 ) {
return; // not yet supported, we can do this later when the OSX bug
// goes away
}
TreeSelection object = objects[0];
selectionObject = object.getSelection();
Object selection = selectionObject;
selectionObjectParent = object.getParent();
// Not clicked on a real object: returns a class
XulMenupopup spoonMenu = null;
if ( selection instanceof Class<?> ) {
if ( selection.equals( TransMeta.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "trans-class" );
} else if ( selection.equals( JobMeta.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "job-class" );
} else if ( selection.equals( TransHopMeta.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "trans-hop-class" );
} else if ( selection.equals( DatabaseMeta.class ) ) {
spoonMenu = (XulMenupopup) menuMap.get( "database-class" );
} else if ( selection.equals( PartitionSchema.class ) ) {
// New
spoonMenu = (XulMenupopup) menuMap.get( "partition-schema-class" );
} else if ( selection.equals( ClusterSchema.class ) ) {
spoonMenu = (XulMenupopup) menuMap.get( "cluster-schema-class" );
} else if ( selection.equals( SlaveServer.class ) ) {
spoonMenu = (XulMenupopup) menuMap.get( "slave-cluster-class" );
} else {
spoonMenu = null;
}
} else {
if ( selection instanceof TransMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "trans-inst" );
} else if ( selection instanceof JobMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "job-inst" );
} else if ( selection instanceof PluginInterface ) {
spoonMenu = (XulMenupopup) menuMap.get( "step-plugin" );
} else if ( selection instanceof DatabaseMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "database-inst" );
// disable for now if the connection is an SAP ERP type of database...
//
XulMenuitem item =
(XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( "database-inst-explore" );
if ( item != null ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selection;
item.setDisabled( !databaseMeta.isExplorable() );
}
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( "database-inst-clear-cache" );
if ( item != null ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
item.setLabel( BaseMessages.getString( PKG, "Spoon.Menu.Popup.CONNECTIONS.ClearDBCache" )
+ databaseMeta.getName() ); // Clear
}
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( "database-inst-share" );
if ( item != null ) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selection;
if ( databaseMeta.isShared() ) {
item.setLabel( BaseMessages.getString( PKG, "Spoon.Menu.Popup.CONNECTIONS.UnShare" ) );
} else {
item.setLabel( BaseMessages.getString( PKG, "Spoon.Menu.Popup.CONNECTIONS.Share" ) );
}
}
} else if ( selection instanceof StepMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "step-inst" );
} else if ( selection instanceof JobEntryCopy ) {
spoonMenu = (XulMenupopup) menuMap.get( "job-entry-copy-inst" );
} else if ( selection instanceof TransHopMeta ) {
spoonMenu = (XulMenupopup) menuMap.get( "trans-hop-inst" );
} else if ( selection instanceof PartitionSchema ) {
spoonMenu = (XulMenupopup) menuMap.get( "partition-schema-inst" );
} else if ( selection instanceof ClusterSchema ) {
spoonMenu = (XulMenupopup) menuMap.get( "cluster-schema-inst" );
} else if ( selection instanceof SlaveServer ) {
spoonMenu = (XulMenupopup) menuMap.get( "slave-server-inst" );
}
}
if ( spoonMenu != null ) {
ConstUI.displayMenu( spoonMenu, tree );
} else {
tree.setMenu( null );
}
}
/**
* Reaction to double click
*
*/
private void doubleClickedInTree( Tree tree ) {
doubleClickedInTree( tree, false );
}
/**
* Reaction to double click
*
*/
private void doubleClickedInTree( Tree tree, boolean shift ) {
TreeSelection[] objects = getTreeObjects( tree );
if ( objects.length != 1 ) {
return; // not yet supported, we can do this later when the OSX bug
// goes away
}
TreeSelection object = objects[0];
final Object selection = object.getSelection();
final Object parent = object.getParent();
if ( selection instanceof Class<?> ) {
if ( selection.equals( TransMeta.class ) ) {
newTransFile();
}
if ( selection.equals( JobMeta.class ) ) {
newJobFile();
}
if ( selection.equals( TransHopMeta.class ) ) {
newHop( (TransMeta) parent );
}
if ( selection.equals( DatabaseMeta.class ) ) {
delegates.db.newConnection();
}
if ( selection.equals( PartitionSchema.class ) ) {
newPartitioningSchema( (TransMeta) parent );
}
if ( selection.equals( ClusterSchema.class ) ) {
newClusteringSchema( (TransMeta) parent );
}
if ( selection.equals( SlaveServer.class ) ) {
newSlaveServer( (HasSlaveServersInterface) parent );
}
} else {
if ( selection instanceof TransMeta ) {
TransGraph.editProperties( (TransMeta) selection, this, rep, true );
}
if ( selection instanceof JobMeta ) {
JobGraph.editProperties( (JobMeta) selection, this, rep, true );
}
if ( selection instanceof PluginInterface ) {
PluginInterface plugin = (PluginInterface) selection;
if ( plugin.getPluginType().equals( StepPluginType.class ) ) {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.addStepToChain( plugin, shift );
}
}
if ( plugin.getPluginType().equals( JobEntryPluginType.class ) ) {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.addJobEntryToChain( object.getItemText(), shift );
}
}
// newStep( getActiveTransformation() );
}
if ( selection instanceof DatabaseMeta ) {
delegates.db.editConnection( (DatabaseMeta) selection );
}
if ( selection instanceof StepMeta ) {
delegates.steps.editStep( (TransMeta) parent, (StepMeta) selection );
}
if ( selection instanceof JobEntryCopy ) {
editJobEntry( (JobMeta) parent, (JobEntryCopy) selection );
}
if ( selection instanceof TransHopMeta ) {
editHop( (TransMeta) parent, (TransHopMeta) selection );
}
if ( selection instanceof PartitionSchema ) {
editPartitionSchema( (TransMeta) parent, (PartitionSchema) selection );
}
if ( selection instanceof ClusterSchema ) {
editClusterSchema( (TransMeta) parent, (ClusterSchema) selection );
}
if ( selection instanceof SlaveServer ) {
editSlaveServer( (SlaveServer) selection );
}
}
}
protected void monitorClusterSchema( ClusterSchema clusterSchema ) throws KettleException {
for ( int i = 0; i < clusterSchema.getSlaveServers().size(); i++ ) {
SlaveServer slaveServer = clusterSchema.getSlaveServers().get( i );
addSpoonSlave( slaveServer );
}
}
protected void editSlaveServer( SlaveServer slaveServer ) {
// slaveServer.getVariable("MASTER_HOST")
SlaveServerDialog dialog = new SlaveServerDialog( shell, slaveServer );
if ( dialog.open() ) {
refreshTree();
refreshGraph();
}
}
private void addTabs() {
if ( tabComp != null ) {
tabComp.dispose();
}
tabComp = new Composite( sashform, SWT.BORDER );
props.setLook( tabComp );
tabComp.setLayout( new FillLayout() );
tabfolder = new TabSet( tabComp );
tabfolder.setChangedFont( GUIResource.getInstance().getFontBold() );
props.setLook( tabfolder.getSwtTabset(), Props.WIDGET_STYLE_TAB );
final CTabFolder cTabFolder = tabfolder.getSwtTabset();
cTabFolder.addMenuDetectListener( new MenuDetectListener() {
@Override
public void menuDetected( MenuDetectEvent event ) {
org.eclipse.swt.graphics.Point real = new org.eclipse.swt.graphics.Point( event.x, event.y );
org.eclipse.swt.graphics.Point point = display.map( null, cTabFolder, real );
final CTabItem item = cTabFolder.getItem( point );
if ( item != null ) {
Menu menu = new Menu( cTabFolder );
MenuItem closeItem = new MenuItem( menu, SWT.NONE );
closeItem.setText( BaseMessages.getString( PKG, "Spoon.Tab.Close" ) );
closeItem.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
int index = tabfolder.getSwtTabset().indexOf( item );
if ( index >= 0 ) {
TabMapEntry entry = delegates.tabs.getTabs().get( index );
tabClose( entry.getTabItem() );
}
}
} );
MenuItem closeAllItems = new MenuItem( menu, SWT.NONE );
closeAllItems.setText( BaseMessages.getString( PKG, "Spoon.Tab.CloseAll" ) );
closeAllItems.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
tabClose( entry.getTabItem() );
}
}
} );
MenuItem closeOtherItems = new MenuItem( menu, SWT.NONE );
closeOtherItems.setText( BaseMessages.getString( PKG, "Spoon.Tab.CloseOthers" ) );
closeOtherItems.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
int index = tabfolder.getSwtTabset().indexOf( item );
if ( index >= 0 ) {
TabMapEntry entry = delegates.tabs.getTabs().get( index );
for ( TabMapEntry closeEntry : delegates.tabs.getTabs() ) {
if ( !closeEntry.equals( entry ) ) {
tabClose( closeEntry.getTabItem() );
}
}
}
}
} );
menu.setLocation( real );
menu.setVisible( true );
}
}
} );
int[] weights = props.getSashWeights();
sashform.setWeights( weights );
sashform.setVisible( true );
// Set a minimum width on the sash so that the view and design buttons
// on the left panel are always visible.
//
Control[] comps = sashform.getChildren();
for ( Control comp : comps ) {
if ( comp instanceof Sash ) {
int limit = 10;
final int SASH_LIMIT = Const.isOSX() ? 150 : limit;
final Sash sash = (Sash) comp;
sash.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
Rectangle rect = sash.getParent().getClientArea();
event.x = Math.min( Math.max( event.x, SASH_LIMIT ), rect.width - SASH_LIMIT );
if ( event.detail != SWT.DRAG ) {
sash.setBounds( event.x, event.y, event.width, event.height );
sashform.layout();
}
}
} );
}
}
tabfolder.addListener( this ); // methods: tabDeselected, tabClose,
// tabSelected
}
public void tabDeselected( TabItem item ) {
}
public boolean tabCloseSelected() {
// this gets called on by the file-close menu item
String activePerspectiveId = SpoonPerspectiveManager.getInstance().getActivePerspective().getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( etlPerspective ) {
return tabClose( tabfolder.getSelected() );
}
// hack to make the plugins see file-close commands
// this should be resolved properly when resolving PDI-6054
// maybe by extending the SpoonPerspectiveInterface to register event handlers from Spoon?
try {
SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();
Class<? extends SpoonPerspective> cls = activePerspective.getClass();
Method m = cls.getMethod( "onFileClose" );
return (Boolean) m.invoke( activePerspective );
} catch ( Exception e ) {
// ignore any errors resulting from the hack
// e.printStackTrace();
}
return false;
}
public boolean tabClose( TabItem item ) {
try {
return delegates.tabs.tabClose( item );
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Unexpected error closing tab!", e );
return false;
}
}
public TabSet getTabSet() {
return tabfolder;
}
public void tabSelected( TabItem item ) {
delegates.tabs.tabSelected( item );
enableMenus();
}
public String getRepositoryName() {
if ( rep == null ) {
return null;
}
return rep.getName();
}
public void pasteXML( TransMeta transMeta, String clipcontent, Point loc ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
try {
Document doc = XMLHandler.loadXMLString( clipcontent );
Node transNode = XMLHandler.getSubNode( doc, Spoon.XML_TAG_TRANSFORMATION_STEPS );
// De-select all, re-select pasted steps...
transMeta.unselectAll();
Node stepsNode = XMLHandler.getSubNode( transNode, "steps" );
int nr = XMLHandler.countNodes( stepsNode, "step" );
if ( getLog().isDebug() ) {
// "I found "+nr+" steps to paste on location: "
getLog().logDebug( BaseMessages.getString( PKG, "Spoon.Log.FoundSteps", "" + nr ) + loc );
}
StepMeta[] steps = new StepMeta[nr];
ArrayList<String> stepOldNames = new ArrayList<String>( nr );
// Point min = new Point(loc.x, loc.y);
Point min = new Point( 99999999, 99999999 );
// Load the steps...
for ( int i = 0; i < nr; i++ ) {
Node stepNode = XMLHandler.getSubNodeByNr( stepsNode, "step", i );
steps[i] = new StepMeta( stepNode, transMeta.getDatabases(), metaStore );
if ( loc != null ) {
Point p = steps[i].getLocation();
if ( min.x > p.x ) {
min.x = p.x;
}
if ( min.y > p.y ) {
min.y = p.y;
}
}
}
// Load the hops...
Node hopsNode = XMLHandler.getSubNode( transNode, "order" );
nr = XMLHandler.countNodes( hopsNode, "hop" );
if ( getLog().isDebug() ) {
// "I found "+nr+" hops to paste."
getLog().logDebug( BaseMessages.getString( PKG, "Spoon.Log.FoundHops", "" + nr ) );
}
TransHopMeta[] hops = new TransHopMeta[nr];
ArrayList<StepMeta> alSteps = new ArrayList<StepMeta>();
Collections.addAll( alSteps, steps );
for ( int i = 0; i < nr; i++ ) {
Node hopNode = XMLHandler.getSubNodeByNr( hopsNode, "hop", i );
hops[i] = new TransHopMeta( hopNode, alSteps );
}
// This is the offset:
//
Point offset = new Point( loc.x - min.x, loc.y - min.y );
// Undo/redo object positions...
int[] position = new int[steps.length];
for ( int i = 0; i < steps.length; i++ ) {
Point p = steps[i].getLocation();
String name = steps[i].getName();
steps[i].setLocation( p.x + offset.x, p.y + offset.y );
steps[i].setDraw( true );
// Check the name, find alternative...
stepOldNames.add( name );
steps[i].setName( transMeta.getAlternativeStepname( name ) );
transMeta.addStep( steps[i] );
position[i] = transMeta.indexOfStep( steps[i] );
steps[i].setSelected( true );
}
// Add the hops too...
for ( TransHopMeta hop : hops ) {
transMeta.addTransHop( hop );
}
// Load the notes...
Node notesNode = XMLHandler.getSubNode( transNode, "notepads" );
nr = XMLHandler.countNodes( notesNode, "notepad" );
if ( getLog().isDebug() ) {
// "I found "+nr+" notepads to paste."
getLog().logDebug( BaseMessages.getString( PKG, "Spoon.Log.FoundNotepads", "" + nr ) );
}
NotePadMeta[] notes = new NotePadMeta[nr];
for ( int i = 0; i < notes.length; i++ ) {
Node noteNode = XMLHandler.getSubNodeByNr( notesNode, "notepad", i );
notes[i] = new NotePadMeta( noteNode );
Point p = notes[i].getLocation();
notes[i].setLocation( p.x + offset.x, p.y + offset.y );
transMeta.addNote( notes[i] );
notes[i].setSelected( true );
}
// Set the source and target steps ...
for ( StepMeta step : steps ) {
StepMetaInterface smi = step.getStepMetaInterface();
smi.searchInfoAndTargetSteps( transMeta.getSteps() );
}
// Set the error handling hops
Node errorHandlingNode = XMLHandler.getSubNode( transNode, TransMeta.XML_TAG_STEP_ERROR_HANDLING );
int nrErrorHandlers = XMLHandler.countNodes( errorHandlingNode, StepErrorMeta.XML_TAG );
for ( int i = 0; i < nrErrorHandlers; i++ ) {
Node stepErrorMetaNode = XMLHandler.getSubNodeByNr( errorHandlingNode, StepErrorMeta.XML_TAG, i );
StepErrorMeta stepErrorMeta =
new StepErrorMeta( transMeta.getParentVariableSpace(), stepErrorMetaNode, transMeta.getSteps() );
// Handle pasting multiple times, need to update source and target step names
int srcStepPos = stepOldNames.indexOf( stepErrorMeta.getSourceStep().getName() );
int tgtStepPos = stepOldNames.indexOf( stepErrorMeta.getTargetStep().getName() );
StepMeta sourceStep = transMeta.findStep( steps[srcStepPos].getName() );
if ( sourceStep != null ) {
sourceStep.setStepErrorMeta( stepErrorMeta );
}
StepMeta targetStep = transMeta.findStep( steps[tgtStepPos].getName() );
stepErrorMeta.setSourceStep( sourceStep );
stepErrorMeta.setTargetStep( targetStep );
}
// Save undo information too...
addUndoNew( transMeta, steps, position, false );
int[] hopPos = new int[hops.length];
for ( int i = 0; i < hops.length; i++ ) {
hopPos[i] = transMeta.indexOfTransHop( hops[i] );
}
addUndoNew( transMeta, hops, hopPos, true );
int[] notePos = new int[notes.length];
for ( int i = 0; i < notes.length; i++ ) {
notePos[i] = transMeta.indexOfNote( notes[i] );
}
addUndoNew( transMeta, notes, notePos, true );
if ( transMeta.haveStepsChanged() ) {
refreshTree();
refreshGraph();
}
} catch ( KettleException e ) {
// "Error pasting steps...",
// "I was unable to paste steps to this transformation"
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Dialog.UnablePasteSteps.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.UnablePasteSteps.Message" ), e );
}
}
public void copySelected( TransMeta transMeta, List<StepMeta> steps, List<NotePadMeta> notes ) {
if ( steps == null || steps.size() == 0 ) {
return;
}
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
StringBuilder xml = new StringBuilder( 5000 ).append( XMLHandler.getXMLHeader() );
try {
xml.append( XMLHandler.openTag( Spoon.XML_TAG_TRANSFORMATION_STEPS ) ).append( Const.CR );
xml.append( XMLHandler.openTag( Spoon.XML_TAG_STEPS ) ).append( Const.CR );
for ( StepMeta step : steps ) {
xml.append( step.getXML() );
}
xml.append( XMLHandler.closeTag( Spoon.XML_TAG_STEPS ) ).append( Const.CR );
// Also check for the hops in between the selected steps...
xml.append( XMLHandler.openTag( TransMeta.XML_TAG_ORDER ) ).append( Const.CR );
for ( StepMeta step1 : steps ) {
for ( StepMeta step2 : steps ) {
if ( step1 != step2 ) {
TransHopMeta hop = transMeta.findTransHop( step1, step2, true );
if ( hop != null ) {
// Ok, we found one...
xml.append( hop.getXML() ).append( Const.CR );
}
}
}
}
xml.append( XMLHandler.closeTag( TransMeta.XML_TAG_ORDER ) ).append( Const.CR );
xml.append( XMLHandler.openTag( TransMeta.XML_TAG_NOTEPADS ) ).append( Const.CR );
if ( notes != null ) {
for ( NotePadMeta note : notes ) {
xml.append( note.getXML() );
}
}
xml.append( XMLHandler.closeTag( TransMeta.XML_TAG_NOTEPADS ) ).append( Const.CR );
xml.append( XMLHandler.openTag( TransMeta.XML_TAG_STEP_ERROR_HANDLING ) ).append( Const.CR );
for ( StepMeta step : steps ) {
if ( step.getStepErrorMeta() != null ) {
xml.append( step.getStepErrorMeta().getXML() ).append( Const.CR );
}
}
xml.append( XMLHandler.closeTag( TransMeta.XML_TAG_STEP_ERROR_HANDLING ) ).append( Const.CR );
xml.append( XMLHandler.closeTag( Spoon.XML_TAG_TRANSFORMATION_STEPS ) ).append( Const.CR );
toClipboard( xml.toString() );
} catch ( Exception ex ) {
new ErrorDialog( getShell(), "Error", "Error encoding to XML", ex );
}
}
public void editHop( TransMeta transMeta, TransHopMeta transHopMeta ) {
// Backup situation BEFORE edit:
String name = transHopMeta.toString();
TransHopMeta before = (TransHopMeta) transHopMeta.clone();
TransHopDialog hd = new TransHopDialog( shell, SWT.NONE, transHopMeta, transMeta );
if ( hd.open() != null ) {
// Backup situation for redo/undo:
TransHopMeta after = (TransHopMeta) transHopMeta.clone();
addUndoChange( transMeta, new TransHopMeta[] { before }, new TransHopMeta[] { after }, new int[] { transMeta
.indexOfTransHop( transHopMeta ) } );
String newName = transHopMeta.toString();
if ( !name.equalsIgnoreCase( newName ) ) {
refreshTree();
refreshGraph(); // color, nr of copies...
}
}
setShellText();
}
public void delHop( TransMeta transMeta, TransHopMeta transHopMeta ) {
int index = transMeta.indexOfTransHop( transHopMeta );
addUndoDelete( transMeta, new Object[] { (TransHopMeta) transHopMeta.clone() }, new int[] { index } );
transMeta.removeTransHop( index );
// If this is an error handling hop, disable it
//
if ( transHopMeta.getFromStep().isDoingErrorHandling() ) {
StepErrorMeta stepErrorMeta = transHopMeta.getFromStep().getStepErrorMeta();
// We can only disable error handling if the target of the hop is the same as the target of the error handling.
//
if ( stepErrorMeta.getTargetStep() != null
&& stepErrorMeta.getTargetStep().equals( transHopMeta.getToStep() ) ) {
StepMeta stepMeta = transHopMeta.getFromStep();
// Only if the target step is where the error handling is going to...
//
StepMeta before = (StepMeta) stepMeta.clone();
stepErrorMeta.setEnabled( false );
index = transMeta.indexOfStep( stepMeta );
addUndoChange( transMeta, new Object[] { before }, new Object[] { stepMeta }, new int[] { index } );
}
}
refreshTree();
refreshGraph();
}
public void newHop( TransMeta transMeta, StepMeta fr, StepMeta to ) {
TransHopMeta hi = new TransHopMeta( fr, to );
TransHopDialog hd = new TransHopDialog( shell, SWT.NONE, hi, transMeta );
if ( hd.open() != null ) {
newHop( transMeta, hi );
}
}
public void newHop( TransMeta transMeta, TransHopMeta transHopMeta ) {
if ( checkIfHopAlreadyExists( transMeta, transHopMeta ) ) {
transMeta.addTransHop( transHopMeta );
int idx = transMeta.indexOfTransHop( transHopMeta );
if ( !performNewTransHopChecks( transMeta, transHopMeta ) ) {
// Some error occurred: loops, existing hop, etc.
// Remove it again...
//
transMeta.removeTransHop( idx );
} else {
addUndoNew( transMeta, new TransHopMeta[] { transHopMeta }, new int[] { transMeta
.indexOfTransHop( transHopMeta ) } );
}
// Just to make sure
transHopMeta.getFromStep().drawStep();
transHopMeta.getToStep().drawStep();
refreshTree();
refreshGraph();
}
}
/**
* @param transMeta transformation's meta
* @param newHop hop to be checked
* @return true when the hop was added, false if there was an error
*/
public boolean checkIfHopAlreadyExists( TransMeta transMeta, TransHopMeta newHop ) {
boolean ok = true;
if ( transMeta.findTransHop( newHop.getFromStep(), newHop.getToStep() ) != null ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.HopExists.Message" ) ); // "This hop already exists!"
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.HopExists.Title" ) ); // Error!
mb.open();
ok = false;
}
return ok;
}
/**
* @param transMeta transformation's meta
* @param newHop hop to be checked
* @return true when the hop was added, false if there was an error
*/
public boolean performNewTransHopChecks( TransMeta transMeta, TransHopMeta newHop ) {
boolean ok = true;
if ( transMeta.hasLoop( newHop.getFromStep() ) || transMeta.hasLoop( newHop.getToStep() ) ) {
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.ICON_WARNING );
mb.setMessage( BaseMessages.getString( PKG, "TransGraph.Dialog.HopCausesLoop.Message" ) );
mb.setText( BaseMessages.getString( PKG, "TransGraph.Dialog.HopCausesLoop.Title" ) );
mb.open();
ok = false;
}
if ( ok ) { // only do the following checks, e.g. checkRowMixingStatically
// when not looping, otherwise we get a loop with
// StackOverflow there ;-)
try {
if ( !newHop.getToStep().getStepMetaInterface().excludeFromRowLayoutVerification() ) {
transMeta.checkRowMixingStatically( newHop.getToStep(), null );
}
} catch ( KettleRowException re ) {
// Show warning about mixing rows with conflicting layouts...
new ErrorDialog(
shell, BaseMessages.getString( PKG, "TransGraph.Dialog.HopCausesRowMixing.Title" ), BaseMessages
.getString( PKG, "TransGraph.Dialog.HopCausesRowMixing.Message" ), re );
}
verifyCopyDistribute( transMeta, newHop.getFromStep() );
}
return ok;
}
public void verifyCopyDistribute( TransMeta transMeta, StepMeta fr ) {
List<StepMeta> nextSteps = transMeta.findNextSteps( fr );
int nrNextSteps = nextSteps.size();
// don't show it for 3 or more hops, by then you should have had the
// message
if ( nrNextSteps == 2 ) {
boolean distributes = fr.getStepMetaInterface().excludeFromCopyDistributeVerification();
boolean customDistribution = false;
if ( props.showCopyOrDistributeWarning()
&& !fr.getStepMetaInterface().excludeFromCopyDistributeVerification() ) {
MessageDialogWithToggle md =
new MessageDialogWithToggle(
shell, BaseMessages.getString( PKG, "System.Warning" ), null, BaseMessages.getString(
PKG, "Spoon.Dialog.CopyOrDistribute.Message", fr.getName(), Integer.toString( nrNextSteps ) ),
MessageDialog.WARNING, getRowDistributionLabels(), 0, BaseMessages.getString(
PKG, "Spoon.Message.Warning.NotShowWarning" ), !props.showCopyOrDistributeWarning() );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
int idx = md.open();
props.setShowCopyOrDistributeWarning( !md.getToggleState() );
props.saveProps();
distributes = idx == Spoon.MESSAGE_DIALOG_WITH_TOGGLE_YES_BUTTON_ID;
customDistribution = idx == Spoon.MESSAGE_DIALOG_WITH_TOGGLE_CUSTOM_DISTRIBUTION_BUTTON_ID;
}
if ( distributes ) {
fr.setDistributes( true );
fr.setRowDistribution( null );
} else if ( customDistribution ) {
RowDistributionInterface rowDistribution = getActiveTransGraph().askUserForCustomDistributionMethod();
fr.setDistributes( true );
fr.setRowDistribution( rowDistribution );
} else {
fr.setDistributes( false );
fr.setDistributes( false );
}
refreshTree();
refreshGraph();
}
}
private String[] getRowDistributionLabels() {
ArrayList<String> labels = new ArrayList<String>();
labels.add( BaseMessages.getString( PKG, "Spoon.Dialog.CopyOrDistribute.Distribute" ) );
labels.add( BaseMessages.getString( PKG, "Spoon.Dialog.CopyOrDistribute.Copy" ) );
if ( PluginRegistry.getInstance().getPlugins( RowDistributionPluginType.class ).size() > 0 ) {
labels.add( BaseMessages.getString( PKG, "Spoon.Dialog.CopyOrDistribute.CustomRowDistribution" ) );
}
return labels.toArray( new String[labels.size()] );
}
public void newHop( TransMeta transMeta ) {
newHop( transMeta, null, null );
}
public void openRepository() {
// Check to tabs are dirty and warn user that they must save tabs prior to connecting. Don't connect!
if ( Spoon.getInstance().isTabsChanged() ) {
MessageBox mb = new MessageBox( Spoon.getInstance().getShell(), SWT.OK );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.WarnToSaveAllPriorToConnect.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.WarnToCloseAllForce.Disconnect.Title" ) );
mb.open();
// Don't connect, user will need to save all their dirty tabs.
return;
}
loginDialog = new RepositoriesDialog( shell, null, new ILoginCallback() {
public void onSuccess( Repository repository ) {
// Close previous repository...
if ( rep != null ) {
rep.disconnect();
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED );
}
setRepository( repository );
loadSessionInformation( repository, true );
refreshTree();
setShellText();
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CONNECTED );
}
public void onError( Throwable t ) {
closeRepository();
onLoginError( t );
}
public void onCancel() {
}
} );
loginDialog.show();
}
private void loadSessionInformation( Repository repository, boolean saveOldDatabases ) {
JobMeta[] jobMetas = getLoadedJobs();
for ( JobMeta jobMeta : jobMetas ) {
for ( int i = 0; i < jobMeta.nrDatabases(); i++ ) {
jobMeta.getDatabase( i ).setObjectId( null );
}
// Set for the existing job the ID at -1!
jobMeta.setObjectId( null );
// Keep track of the old databases for now.
List<DatabaseMeta> oldDatabases = jobMeta.getDatabases();
// In order to re-match the databases on name (not content), we
// need to load the databases from the new repository.
// NOTE: for purposes such as DEVELOP - TEST - PRODUCTION
// cycles.
// first clear the list of databases and slave servers
jobMeta.setDatabases( new ArrayList<DatabaseMeta>() );
jobMeta.setSlaveServers( new ArrayList<SlaveServer>() );
// Read them from the new repository.
try {
SharedObjects sharedObjects =
repository != null ? repository.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", makeTabName( jobMeta, true ) ),
e
);
}
// Then we need to re-match the databases at save time...
for ( DatabaseMeta oldDatabase : oldDatabases ) {
DatabaseMeta newDatabase = DatabaseMeta.findDatabase( jobMeta.getDatabases(), oldDatabase.getName() );
// If it exists, change the settings...
if ( newDatabase != null ) {
//
// A database connection with the same name exists in
// the new repository.
// Change the old connections to reflect the settings in
// the new repository
//
oldDatabase.setDatabaseInterface( newDatabase.getDatabaseInterface() );
} else {
if ( saveOldDatabases ) {
//
// The old database is not present in the new
// repository: simply add it to the list.
// When the job gets saved, it will be added
// to the repository.
//
jobMeta.addDatabase( oldDatabase );
}
}
}
if ( repository != null ) {
try {
// For the existing job, change the directory too:
// Try to find the same directory in the new repository...
RepositoryDirectoryInterface rdi =
repository.findDirectory( jobMeta.getRepositoryDirectory().getPath() );
if ( rdi != null ) {
jobMeta.setRepositoryDirectory( rdi );
} else {
// the root is the default!
jobMeta.setRepositoryDirectory( repository.loadRepositoryDirectoryTree() );
}
} catch ( KettleException ke ) {
rep = null;
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Message", Const.CR ), ke
);
}
}
}
TransMeta[] transMetas = getLoadedTransformations();
for ( TransMeta transMeta : transMetas ) {
for ( int i = 0; i < transMeta.nrDatabases(); i++ ) {
transMeta.getDatabase( i ).setObjectId( null );
}
// Set for the existing transformation the ID at -1!
transMeta.setObjectId( null );
// Keep track of the old databases for now.
List<DatabaseMeta> oldDatabases = transMeta.getDatabases();
// In order to re-match the databases on name (not content), we
// need to load the databases from the new repository.
// NOTE: for purposes such as DEVELOP - TEST - PRODUCTION
// cycles.
// first clear the list of databases, partition schemas, slave
// servers, clusters
transMeta.setDatabases( new ArrayList<DatabaseMeta>() );
transMeta.setPartitionSchemas( new ArrayList<PartitionSchema>() );
transMeta.setSlaveServers( new ArrayList<SlaveServer>() );
transMeta.setClusterSchemas( new ArrayList<ClusterSchema>() );
// Read them from the new repository.
try {
SharedObjects sharedObjects =
repository != null ? repository.readTransSharedObjects( transMeta ) : transMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", makeTabName(
transMeta, true ) ), e
);
}
// Then we need to re-match the databases at save time...
for ( DatabaseMeta oldDatabase : oldDatabases ) {
DatabaseMeta newDatabase = DatabaseMeta.findDatabase( transMeta.getDatabases(), oldDatabase.getName() );
// If it exists, change the settings...
if ( newDatabase != null ) {
//
// A database connection with the same name exists in
// the new repository.
// Change the old connections to reflect the settings in
// the new repository
//
oldDatabase.setDatabaseInterface( newDatabase.getDatabaseInterface() );
} else {
if ( saveOldDatabases ) {
//
// The old database is not present in the new
// repository: simply add it to the list.
// When the transformation gets saved, it will be added
// to the repository.
//
transMeta.addDatabase( oldDatabase );
}
}
}
if ( repository != null ) {
try {
// For the existing transformation, change the directory too:
// Try to find the same directory in the new repository...
RepositoryDirectoryInterface rdi =
repository.findDirectory( transMeta.getRepositoryDirectory().getPath() );
if ( rdi != null ) {
transMeta.setRepositoryDirectory( rdi );
} else {
// the root is the default!
transMeta.setRepositoryDirectory( repository.loadRepositoryDirectoryTree() );
}
} catch ( KettleException ke ) {
rep = null;
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorConnectingRepository.Message", Const.CR ), ke
);
}
}
}
}
public void clearSharedObjectCache() throws KettleException {
if ( rep != null ) {
rep.clearSharedObjectCache();
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
rep.readTransSharedObjects( transMeta );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
rep.readJobMetaSharedObjects( jobMeta );
}
}
}
public void exploreRepository() {
if ( rep != null ) {
final RepositoryExplorerCallback cb = new RepositoryExplorerCallback() {
public boolean open( UIRepositoryContent element, String revision ) {
String objName = element.getName();
if ( objName != null ) {
RepositoryObjectType objectType = element.getRepositoryElementType();
RepositoryDirectory repDir = element.getRepositoryDirectory();
if ( element.getObjectId() != null ) { // new way
loadObjectFromRepository( element.getObjectId(), objectType, revision );
} else { // old way
loadObjectFromRepository( objName, objectType, repDir, revision );
}
}
return false; // do not close explorer
}
};
try {
final XulWaitBox box = (XulWaitBox) this.mainSpoonContainer.getDocumentRoot().createElement( "waitbox" );
box.setIndeterminate( true );
box.setCanCancel( false );
box.setTitle( BaseMessages.getString(
RepositoryDialogInterface.class, "RepositoryExplorerDialog.Connection.Wait.Title" ) );
box.setMessage( BaseMessages.getString(
RepositoryDialogInterface.class, "RepositoryExplorerDialog.Explorer.Wait.Message" ) );
box.setDialogParent( shell );
box.setRunnable( new WaitBoxRunnable( box ) {
@Override
public void run() {
shell.getDisplay().syncExec( new Runnable() {
public void run() {
try {
RepositoryExplorer explorer =
new RepositoryExplorer( shell, rep, cb, Variables.getADefaultVariableSpace() );
box.stop();
explorer.show();
explorer.dispose();
} catch ( final Throwable e ) {
shell.getDisplay().asyncExec( new Runnable() {
public void run() {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), e.getMessage(), e );
}
} );
}
}
} );
}
@Override
public void cancel() {
}
} );
box.start();
} catch ( Throwable e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), e.getMessage(), e );
}
}
}
private void loadObjectFromRepository( ObjectId objectId, RepositoryObjectType objectType, String revision ) {
// Try to open the selected transformation.
if ( objectType.equals( RepositoryObjectType.TRANSFORMATION ) ) {
try {
TransLoadProgressDialog progressDialog = new TransLoadProgressDialog( shell, rep, objectId, revision );
TransMeta transMeta = progressDialog.open();
transMeta.clearChanged();
if ( transMeta != null ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString(
PKG, "Spoon.Log.LoadToTransformation", transMeta.getName(), transMeta
.getRepositoryDirectory().getName() ) );
}
props.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, transMeta.getName(), transMeta
.getRepositoryDirectory().getPath(), true, rep.getName() );
addMenuLast();
addTransGraph( transMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
new ErrorDialog( ( (Spoon) SpoonFactory.getInstance() ).getShell(), BaseMessages.getString(
Spoon.class, "Spoon.Dialog.ErrorOpeningById.Message", objectId ), e.getMessage(), e );
}
} else if ( objectType.equals( RepositoryObjectType.JOB ) ) {
try {
JobLoadProgressDialog progressDialog = new JobLoadProgressDialog( shell, rep, objectId, revision );
JobMeta jobMeta = progressDialog.open();
jobMeta.clearChanged();
if ( jobMeta != null ) {
props.addLastFile( LastUsedFile.FILE_TYPE_JOB, jobMeta.getName(), jobMeta
.getRepositoryDirectory().getPath(), true, rep.getName() );
saveSettings();
addMenuLast();
addJobGraph( jobMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
new ErrorDialog( ( (Spoon) SpoonFactory.getInstance() ).getShell(), BaseMessages.getString(
Spoon.class, "Spoon.Dialog.ErrorOpeningById.Message", objectId ), e.getMessage(), e );
}
}
}
public void loadObjectFromRepository( String objName, RepositoryObjectType objectType,
RepositoryDirectoryInterface repDir, String versionLabel ) {
// Try to open the selected transformation.
if ( objectType.equals( RepositoryObjectType.TRANSFORMATION ) ) {
try {
TransLoadProgressDialog progressDialog =
new TransLoadProgressDialog( shell, rep, objName, repDir, versionLabel );
TransMeta transMeta = progressDialog.open();
transMeta.clearChanged();
if ( transMeta != null ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.LoadToTransformation", objName, repDir
.getName() ) );
}
props
.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, objName, repDir.getPath(), true, rep.getName() );
addMenuLast();
addTransGraph( transMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Message" )
+ objName + Const.CR + e.getMessage() ); // "Error opening : "
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Title" ) );
mb.open();
}
} else if ( objectType.equals( RepositoryObjectType.JOB ) ) {
// Try to open the selected job.
try {
JobLoadProgressDialog progressDialog =
new JobLoadProgressDialog( shell, rep, objName, repDir, versionLabel );
JobMeta jobMeta = progressDialog.open();
jobMeta.clearChanged();
if ( jobMeta != null ) {
props.addLastFile( LastUsedFile.FILE_TYPE_JOB, objName, repDir.getPath(), true, rep.getName() );
saveSettings();
addMenuLast();
addJobGraph( jobMeta );
}
refreshTree();
refreshGraph();
} catch ( Exception e ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Message" )
+ objName + Const.CR + e.getMessage() ); // "Error opening : "
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorOpening.Title" ) );
mb.open();
}
}
}
public void closeRepository() {
if ( rep != null ) {
// Prompt and close all tabs as user disconnected from the repo
boolean shouldDisconnect = Spoon.getInstance().closeAllJobsAndTransformations();
if ( shouldDisconnect ) {
loadSessionInformation( null, false );
rep.disconnect();
if ( metaStore.getMetaStoreList().size() > 1 ) {
try {
metaStore.getMetaStoreList().remove( 0 );
metaStore.setActiveMetaStoreName( metaStore.getMetaStoreList().get( 0 ).getName() );
} catch ( MetaStoreException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.ErrorRemovingMetaStore.Title" ),
BaseMessages.getString( PKG, "Spoon.ErrorRemovingMetaStore.Message" ), e );
}
}
setRepository( null );
setShellText();
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_DISCONNECTED );
enableMenus();
}
}
}
public void openFile() {
openFile( false );
}
public void importFile() {
openFile( true );
}
public void openFile( boolean importfile ) {
SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();
// In case the perspective wants to handle open/save itself, let it...
//
if ( !importfile ) {
if ( activePerspective instanceof SpoonPerspectiveOpenSaveInterface ) {
( (SpoonPerspectiveOpenSaveInterface) activePerspective ).open();
return;
}
}
String activePerspectiveId = activePerspective.getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( rep == null || importfile || !etlPerspective ) { // Load from XML
FileDialog dialog = new FileDialog( shell, SWT.OPEN );
LinkedHashSet<String> extensions = new LinkedHashSet<String>();
LinkedHashSet<String> extensionNames = new LinkedHashSet<String>();
StringBuilder allExtensions = new StringBuilder();
for ( FileListener l : fileListeners ) {
for ( String ext : l.getSupportedExtensions() ) {
extensions.add( "*." + ext );
allExtensions.append( "*." ).append( ext ).append( ";" );
}
Collections.addAll( extensionNames, l.getFileTypeDisplayNames( Locale.getDefault() ) );
}
extensions.add( "*" );
extensionNames.add( BaseMessages.getString( PKG, "Spoon.Dialog.OpenFile.AllFiles" ) );
String[] exts = new String[extensions.size() + 1];
exts[0] = allExtensions.toString();
System.arraycopy( extensions.toArray( new String[extensions.size()] ), 0, exts, 1, extensions.size() );
String[] extNames = new String[extensionNames.size() + 1];
extNames[0] = BaseMessages.getString( PKG, "Spoon.Dialog.OpenFile.AllTypes" );
System.arraycopy(
extensionNames.toArray( new String[extensionNames.size()] ), 0, extNames, 1, extensionNames.size() );
dialog.setFilterExtensions( exts );
setFilterPath( dialog );
String filename = dialog.open();
if ( filename != null ) {
if ( importfile ) {
if ( activePerspective instanceof SpoonPerspectiveOpenSaveInterface ) {
( (SpoonPerspectiveOpenSaveInterface) activePerspective ).importFile( filename );
return;
}
}
lastDirOpened = dialog.getFilterPath();
openFile( filename, importfile );
}
} else {
SelectObjectDialog sod = new SelectObjectDialog( shell, rep );
if ( sod.open() != null ) {
RepositoryObjectType type = sod.getObjectType();
String name = sod.getObjectName();
RepositoryDirectoryInterface repDir = sod.getDirectory();
// Load a transformation
if ( RepositoryObjectType.TRANSFORMATION.equals( type ) ) {
TransLoadProgressDialog tlpd = new TransLoadProgressDialog( shell, rep, name, repDir, null ); // Loads
// the
// last
// version
TransMeta transMeta = tlpd.open();
sharedObjectsFileMap.put( transMeta.getSharedObjects().getFilename(), transMeta.getSharedObjects() );
setTransMetaVariables( transMeta );
if ( transMeta != null ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.LoadToTransformation", name, repDir
.getName() ) );
}
props.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, name, repDir.getPath(), true, rep.getName() );
addMenuLast();
transMeta.clearChanged();
// transMeta.setFilename(name); // Don't do it, it's a bad idea!
addTransGraph( transMeta );
}
refreshGraph();
refreshTree();
} else if ( RepositoryObjectType.JOB.equals( type ) ) {
// Load a job
JobLoadProgressDialog jlpd = new JobLoadProgressDialog( shell, rep, name, repDir, null ); // Loads
// the last version
JobMeta jobMeta = jlpd.open();
sharedObjectsFileMap.put( jobMeta.getSharedObjects().getFilename(), jobMeta.getSharedObjects() );
setJobMetaVariables( jobMeta );
if ( jobMeta != null ) {
props.addLastFile( LastUsedFile.FILE_TYPE_JOB, name, repDir.getPath(), true, rep.getName() );
saveSettings();
addMenuLast();
addJobGraph( jobMeta );
}
refreshGraph();
refreshTree();
}
}
}
}
private void setFilterPath( FileDialog dialog ) {
if ( !Const.isEmpty( lastDirOpened ) ) {
if ( new File( lastDirOpened ).exists() ) {
dialog.setFilterPath( lastDirOpened );
}
}
}
private String lastFileOpened = null;
public String getLastFileOpened() {
if ( lastFileOpened == null ) {
lastFileOpened = System.getProperty( "org.pentaho.di.defaultVFSPath", "" );
}
return lastFileOpened;
}
public void setLastFileOpened( String inLastFileOpened ) {
lastFileOpened = inLastFileOpened;
}
public void displayCmdLine() {
String cmdFile = getCmdLine();
if ( Const.isEmpty( cmdFile ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Title" ) );
mb.open();
} else {
ShowBrowserDialog sbd =
new ShowBrowserDialog( shell, BaseMessages.getString( PKG, "ExportCmdLine.CommandLine.Title" ), cmdFile );
sbd.open();
}
}
public void createCmdLineFile() {
String cmdFile = getCmdLine();
if ( Const.isEmpty( cmdFile ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLine.JobOrTransformationMissing.Title" ) );
mb.open();
} else {
boolean export = true;
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setFilterExtensions( new String[] { "*.bat", ".sh", "*.*" } );
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "ExportCmdLine.BatFiles" ),
BaseMessages.getString( PKG, "ExportCmdLineShFiles" ),
BaseMessages.getString( PKG, "ExportCmdLine.AllFiles" ) } );
String filename = dialog.open();
if ( filename != null ) {
// See if the file already exists...
int id = SWT.YES;
try {
FileObject f = KettleVFS.getFileObject( filename );
if ( f.exists() ) {
MessageBox mb = new MessageBox( shell, SWT.NO | SWT.YES | SWT.ICON_WARNING );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLineShFiles.FileExistsReplace", filename ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLineShFiles.ConfirmOverwrite" ) );
id = mb.open();
}
} catch ( Exception e ) {
// Ignore errors
}
if ( id == SWT.NO ) {
export = false;
}
if ( export ) {
java.io.FileWriter out = null;
try {
out = new java.io.FileWriter( filename );
out.write( cmdFile );
out.flush();
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "ExportCmdLineShFiles.ErrorWritingFile.Title" ), BaseMessages
.getString( PKG, "ExportCmdLineShFiles.ErrorWritingFile.Message", filename ), e );
} finally {
if ( out != null ) {
try {
out.close();
} catch ( Exception e ) {
// Ignore errors
}
}
}
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "ExportCmdLineShFiles.CmdExported.Message", filename ) );
mb.setText( BaseMessages.getString( PKG, "ExportCmdLineShFiles.CmdExported.Title" ) );
mb.open();
}
}
}
}
private String getCmdLine() {
TransMeta transMeta = getActiveTransformation();
JobMeta jobMeta = getActiveJob();
String cmdFile = "";
if ( rep != null && ( jobMeta != null || transMeta != null ) ) {
if ( jobMeta != null ) {
if ( jobMeta.getName() != null ) {
if ( Const.isWindows() ) {
cmdFile =
"kitchen "
+ "/rep:\"" + rep.getName() + "\"" + " /user:\""
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" ) + "\"" + " /pass:\""
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo().getPassword() ) + "\""
+ " /job:\"" + jobMeta.getName() + '"' + " /dir:\""
+ jobMeta.getRepositoryDirectory().getPath() + "\"" + " /level:Basic";
} else {
cmdFile =
"sh kitchen.sh "
+ "-rep='"
+ rep.getName()
+ "'"
+ " -user='"
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" )
+ "'"
+ " -pass='"
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo() != null ? rep
.getUserInfo().getPassword() : "" ) + "'" + " -job='" + jobMeta.getName() + "'"
+ " -dir='" + jobMeta.getRepositoryDirectory().getPath() + "'" + " -level=Basic";
}
}
} else {
if ( transMeta.getName() != null ) {
if ( Const.isWindows() ) {
cmdFile =
"pan "
+ "/rep:\""
+ rep.getName()
+ "\""
+ " /user:\""
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" )
+ "\""
+ " /pass:\""
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo() != null ? rep
.getUserInfo().getPassword() : "" ) + "\"" + " /trans:\"" + transMeta.getName() + "\""
+ " /dir:\"" + transMeta.getRepositoryDirectory().getPath() + "\"" + " /level:Basic";
} else {
cmdFile =
"sh pan.sh "
+ "-rep='"
+ rep.getName()
+ "'"
+ " -user='"
+ ( rep.getUserInfo() != null ? rep.getUserInfo().getLogin() : "" )
+ "'"
+ " -pass='"
+ Encr.encryptPasswordIfNotUsingVariables( rep.getUserInfo() != null ? rep
.getUserInfo().getPassword() : "" ) + "'" + " -trans='" + transMeta.getName() + "'"
+ " -dir='" + transMeta.getRepositoryDirectory().getPath() + "'" + " -level=Basic";
}
}
}
} else if ( rep == null && ( jobMeta != null || transMeta != null ) ) {
if ( jobMeta != null ) {
if ( jobMeta.getFilename() != null ) {
if ( Const.isWindows() ) {
cmdFile = "kitchen " + "/file:\"" + jobMeta.getFilename() + "\"" + " /level:Basic";
} else {
cmdFile = "sh kitchen.sh " + "-file='" + jobMeta.getFilename() + "'" + " -level=Basic";
}
}
} else {
if ( transMeta.getFilename() != null ) {
if ( Const.isWindows() ) {
cmdFile = "pan " + "/file:\"" + transMeta.getFilename() + "\"" + " /level:Basic";
} else {
cmdFile = "sh pan.sh " + "-file:'" + transMeta.getFilename() + "'" + " -level=Basic";
}
}
}
}
return cmdFile;
}
// private String lastVfsUsername="";
// private String lastVfsPassword="";
public void openFileVFSFile() {
FileObject initialFile;
FileObject rootFile;
try {
initialFile = KettleVFS.getFileObject( getLastFileOpened() );
rootFile = initialFile.getFileSystem().getRoot();
} catch ( Exception e ) {
String message = Const.getStackTracker( e );
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), message, e );
return;
}
FileObject selectedFile =
getVfsFileChooserDialog( rootFile, initialFile ).open(
shell, null, Const.STRING_TRANS_AND_JOB_FILTER_EXT, Const.getTransformationAndJobFilterNames(),
VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE );
if ( selectedFile != null ) {
setLastFileOpened( selectedFile.getName().getFriendlyURI() );
openFile( selectedFile.getName().getFriendlyURI(), false );
}
}
public void addFileListener( FileListener listener ) {
this.fileListeners.add( listener );
for ( String s : listener.getSupportedExtensions() ) {
if ( !fileExtensionMap.containsKey( s ) ) {
fileExtensionMap.put( s, listener );
}
}
}
public void openFile( String filename, boolean importfile ) {
try {
// Open the XML and see what's in there.
// We expect a single <transformation> or <job> root at this time...
boolean loaded = false;
FileListener listener = null;
Node root = null;
// match by extension first
int idx = filename.lastIndexOf( '.' );
if ( idx != -1 ) {
for ( FileListener li : fileListeners ) {
if ( li.accepts( filename ) ) {
listener = li;
break;
}
}
}
// Attempt to find a root XML node name. Fails gracefully for non-XML file
// types.
try {
Document document = XMLHandler.loadXMLFile( filename );
root = document.getDocumentElement();
} catch ( KettleXMLException e ) {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.File.Xml.Parse.Error" ) );
}
}
// otherwise try by looking at the root node if we were able to parse file
// as XML
if ( listener == null && root != null ) {
for ( FileListener li : fileListeners ) {
if ( li.acceptsXml( root.getNodeName() ) ) {
listener = li;
break;
}
}
}
// You got to have a file name!
//
if ( !Const.isEmpty( filename ) ) {
if ( listener != null ) {
loaded = listener.open( root, filename, importfile );
}
if ( !loaded ) {
// Give error back
hideSplash();
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.UnknownFileType.Message", filename ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.UnknownFileType.Title" ) );
mb.open();
} else {
applyVariables(); // set variables in the newly loaded
// transformation(s) and job(s).
}
}
} catch ( KettleMissingPluginsException e ) {
// There are missing plugins, let's try to handle them in the marketplace...
//
if ( marketPluginIsAvailable() ) {
handleMissingPluginsExceptionWithMarketplace( e );
}
}
}
/**
* Check to see if the market plugin is available.
*
* @return true if the market plugin is installed and ready, false if it is not.
*/
private boolean marketPluginIsAvailable() {
PluginInterface marketPlugin = findMarketPlugin();
return marketPlugin != null;
}
private PluginInterface findMarketPlugin() {
return PluginRegistry.getInstance().findPluginWithId( SpoonPluginType.class, "market" );
}
/**
* Shows a dialog listing the missing plugins, asking if you want to go into the marketplace
*
* @param missingPluginsException
* The missing plugins exception
*/
public void handleMissingPluginsExceptionWithMarketplace( KettleMissingPluginsException missingPluginsException ) {
try {
hideSplash();
MessageBox box = new MessageBox( shell, SWT.ICON_QUESTION | SWT.YES | SWT.NO );
box.setText( BaseMessages.getString( PKG, "Spoon.MissingPluginsFoundDialog.Title" ) );
box.setMessage( BaseMessages.getString(
PKG, "Spoon.MissingPluginsFoundDialog.Message", Const.CR, missingPluginsException.getPluginsMessage() ) );
int answer = box.open();
if ( ( answer & SWT.YES ) != 0 ) {
String controllerClassName = "org.pentaho.di.ui.spoon.dialog.MarketplaceController";
PluginInterface marketPlugin = findMarketPlugin();
ClassLoader classLoader = PluginRegistry.getInstance().getClassLoader( marketPlugin );
Class<?> controllerClass = classLoader.loadClass( controllerClassName );
Method method = controllerClass.getMethod( "showMarketPlaceDialog" );
method.invoke( null );
}
} catch ( Exception ex ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.ErrorShowingMarketplaceDialog.Title" ), BaseMessages
.getString( PKG, "Spoon.ErrorShowingMarketplaceDialog.Message" ), ex );
}
}
public PropsUI getProperties() {
return props;
}
/*
* public void newFileDropDown() { newFileDropDown(toolbar); }
*/
public void newFileDropDown() {
// Drop down a list below the "New" icon (new.png)
// First problem: where is that icon?
XulToolbarbutton button = (XulToolbarbutton) this.mainToolbar.getElementById( "file-new" );
Object object = button.getManagedObject();
if ( object instanceof ToolItem ) {
// OK, let's determine the location of this widget...
//
ToolItem item = (ToolItem) object;
Rectangle bounds = item.getBounds();
org.eclipse.swt.graphics.Point p =
item.getParent().toDisplay( new org.eclipse.swt.graphics.Point( bounds.x, bounds.y ) );
fileMenus.setLocation( p.x, p.y + bounds.height );
fileMenus.setVisible( true );
}
}
public void newTransFile() {
TransMeta transMeta = new TransMeta();
transMeta.addObserver( this );
// Set the variables that were previously defined in this session on the
// transformation metadata too.
//
setTransMetaVariables( transMeta );
// Pass repository information
//
transMeta.setRepository( rep );
transMeta.setMetaStore( metaStore );
try {
SharedObjects sharedObjects =
rep != null ? rep.readTransSharedObjects( transMeta ) : transMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
transMeta.clearChanged();
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Exception.ErrorReadingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Exception.ErrorReadingSharedObjects.Message" ), e );
}
// Set the location of the new transMeta to that of the default location or the last saved location
transMeta.setRepositoryDirectory( getDefaultSaveLocation( transMeta ) );
int nr = 1;
transMeta.setName( STRING_TRANSFORMATION + " " + nr );
// See if a transformation with the same name isn't already loaded...
//
while ( findTransformation( delegates.tabs.makeTabName( transMeta, false ) ) != null ) {
nr++;
transMeta.setName( STRING_TRANSFORMATION + " " + nr ); // rename
}
addTransGraph( transMeta );
applyVariables();
// switch to design mode...
//
if ( setDesignMode() ) {
// No refresh done yet, do so
refreshTree();
}
loadPerspective( MainSpoonPerspective.ID );
}
public void newJobFile() {
try {
JobMeta jobMeta = new JobMeta();
jobMeta.addObserver( this );
// Set the variables that were previously defined in this session on
// the transformation metadata too.
//
setJobMetaVariables( jobMeta );
// Pass repository information
//
jobMeta.setRepository( rep );
jobMeta.setMetaStore( metaStore );
try {
SharedObjects sharedObjects =
rep != null ? rep.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects();
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message", delegates.tabs.makeTabName(
jobMeta, true ) ), e );
}
// Set the location of the new jobMeta to that of the default location or the last saved location
jobMeta.setRepositoryDirectory( getDefaultSaveLocation( jobMeta ) );
int nr = 1;
jobMeta.setName( STRING_JOB + " " + nr );
// See if a transformation with the same name isn't already
// loaded...
while ( findJob( delegates.tabs.makeTabName( jobMeta, false ) ) != null ) {
nr++;
jobMeta.setName( STRING_JOB + " " + nr ); // rename
}
jobMeta.clearChanged();
addJobGraph( jobMeta );
applyVariables();
// switch to design mode...
//
if ( setDesignMode() ) {
// No refresh done yet, do so
refreshTree();
}
loadPerspective( MainSpoonPerspective.ID );
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Exception.ErrorCreatingNewJob.Title" ), BaseMessages
.getString( PKG, "Spoon.Exception.ErrorCreatingNewJob.Message" ), e );
}
}
/**
* Set previously defined variables (set variables dialog) on the specified transformation
*
* @param transMeta transformation's meta
*/
public void setTransMetaVariables( TransMeta transMeta ) {
for ( int i = 0; i < variables.size(); i++ ) {
try {
String name = variables.getValueMeta( i ).getName();
String value = variables.getString( i, "" );
transMeta.setVariable( name, Const.NVL( value, "" ) );
} catch ( Exception e ) {
// Ignore the exception, it should never happen on a getString()
// anyway.
}
}
// Also set the parameters
//
setParametersAsVariablesInUI( transMeta, transMeta );
}
/**
* Set previously defined variables (set variables dialog) on the specified job
*
* @param jobMeta job's meta
*/
public void setJobMetaVariables( JobMeta jobMeta ) {
for ( int i = 0; i < variables.size(); i++ ) {
try {
String name = variables.getValueMeta( i ).getName();
String value = variables.getString( i, "" );
jobMeta.setVariable( name, Const.NVL( value, "" ) );
} catch ( Exception e ) {
// Ignore the exception, it should never happen on a getString()
// anyway.
}
}
// Also set the parameters
//
setParametersAsVariablesInUI( jobMeta, jobMeta );
}
public void loadRepositoryObjects( TransMeta transMeta ) {
// Load common database info from active repository...
if ( rep != null ) {
try {
SharedObjects sharedObjects = rep.readTransSharedObjects( transMeta );
sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Error.UnableToLoadSharedObjects.Title" ), BaseMessages
.getString( PKG, "Spoon.Error.UnableToLoadSharedObjects.Message" ), e );
}
}
}
public boolean quitFile( boolean canCancel ) throws KettleException {
if ( log.isDetailed() ) {
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.QuitApplication" ) ); // "Quit application."
}
boolean exit = true;
saveSettings();
if ( props.showExitWarning() && canCancel ) {
// Display message: are you sure you want to exit?
//
MessageDialogWithToggle md =
new MessageDialogWithToggle( shell,
BaseMessages.getString( PKG, "System.Warning" ), // "Warning!"
null,
BaseMessages.getString( PKG, "Spoon.Message.Warning.PromptExit" ),
MessageDialog.WARNING, new String[] {
// "Yes",
BaseMessages.getString( PKG, "Spoon.Message.Warning.Yes" ),
// "No"
BaseMessages.getString( PKG, "Spoon.Message.Warning.No" )
}, 1,
// "Please, don't show this warning anymore."
BaseMessages.getString( PKG, "Spoon.Message.Warning.NotShowWarning" ),
!props.showExitWarning() );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
int idx = md.open();
props.setExitWarningShown( !md.getToggleState() );
props.saveProps();
if ( ( idx & 0xFF ) == 1 ) {
return false; // No selected: don't exit!
}
}
// Check all tabs to see if we can close them...
//
List<TabMapEntry> list = delegates.tabs.getTabs();
for ( TabMapEntry mapEntry : list ) {
TabItemInterface itemInterface = mapEntry.getObject();
if ( !itemInterface.canBeClosed() ) {
// Show the tab
tabfolder.setSelected( mapEntry.getTabItem() );
// Unsaved work that needs to changes to be applied?
//
int reply = itemInterface.showChangedWarning();
if ( reply == SWT.YES ) {
exit = itemInterface.applyChanges();
} else {
if ( reply == SWT.CANCEL ) {
return false;
} else { // SWT.NO
exit = true;
}
}
}
}
if ( exit || !canCancel ) {
// we have asked about it all and we're still here. Now close
// all the tabs, stop the running transformations
for ( TabMapEntry mapEntry : list ) {
if ( !mapEntry.getObject().canBeClosed() ) {
// Unsaved transformation?
//
if ( mapEntry.getObject() instanceof TransGraph ) {
TransMeta transMeta = (TransMeta) mapEntry.getObject().getManagedObject();
if ( transMeta.hasChanged() ) {
delegates.tabs.removeTab( mapEntry );
}
}
// A running transformation?
//
if ( mapEntry.getObject() instanceof TransGraph ) {
TransGraph transGraph = (TransGraph) mapEntry.getObject();
if ( transGraph.isRunning() ) {
transGraph.stop();
delegates.tabs.removeTab( mapEntry );
}
}
}
}
}
// and now we call the listeners
try {
lifecycleSupport.onExit( this );
} catch ( LifecycleException e ) {
MessageBox box = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
box.setMessage( e.getMessage() );
box.open();
}
if ( exit ) {
close();
}
return exit;
}
public boolean saveFile() {
try {
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
return saveToFile( meta );
}
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.File.Save.Fail.Title" ), BaseMessages.getString(
PKG, "Spoon.File.Save.Fail.Message" ), e );
}
return false;
}
public boolean saveToFile( EngineMetaInterface meta ) throws KettleException {
if ( meta == null ) {
return false;
}
boolean saved = false;
if ( meta instanceof TransMeta ) {
( (TransMeta) meta ).setRepository( rep );
( (TransMeta) meta ).setMetaStore( metaStore );
}
if ( meta instanceof JobMeta ) {
( (JobMeta) meta ).setRepository( rep );
( (JobMeta) meta ).setMetaStore( metaStore );
}
if ( log.isDetailed() ) {
// "Save to file or repository...
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.SaveToFileOrRepository" ) );
}
SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();
// In case the perspective wants to handle open/save itself, let it...
//
if ( activePerspective instanceof SpoonPerspectiveOpenSaveInterface ) {
return ( (SpoonPerspectiveOpenSaveInterface) activePerspective ).save( meta );
}
String activePerspectiveId = activePerspective.getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( rep != null && etlPerspective ) {
saved = saveToRepository( meta );
} else {
if ( meta.getFilename() != null ) {
saved = save( meta, meta.getFilename(), false );
} else {
if ( meta.canSave() ) {
saved = saveFileAs( meta );
}
}
}
meta.saveSharedObjects(); // throws Exception in case anything goes wrong
try {
if ( props.useDBCache() && meta instanceof TransMeta ) {
( (TransMeta) meta ).getDbCache().saveCache();
}
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingDatabaseCache.Title" ),
// "An error occurred saving the database cache to disk"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingDatabaseCache.Message" ), e );
}
delegates.tabs.renameTabs(); // filename or name of transformation might
// have changed.
refreshTree();
// Update menu status for the newly saved object
enableMenus();
return saved;
}
public boolean saveToRepository( EngineMetaInterface meta ) throws KettleException {
return saveToRepository( meta, meta.getObjectId() == null );
}
public boolean saveToRepository( EngineMetaInterface meta, boolean ask_name ) throws KettleException {
// Verify repository security first...
//
if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_TRANSFORMATION ) ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_TRANSFORMATION ) ) {
return false;
}
}
if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_JOB ) ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_JOB ) ) {
return false;
}
}
if ( log.isDetailed() ) {
// "Save to repository..."
//
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.SaveToRepository" ) );
}
if ( rep != null ) {
boolean answer = true;
boolean ask = ask_name;
// If the repository directory is root then get the default save directory
if ( meta.getRepositoryDirectory() == null || meta.getRepositoryDirectory().isRoot() ) {
meta.setRepositoryDirectory( rep.getDefaultSaveDirectory( meta ) );
}
while ( answer && ( ask || Const.isEmpty( meta.getName() ) ) ) {
if ( !ask ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_WARNING );
// "Please give this transformation a name before saving it in the database."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptTransformationName.Message" ) );
// "Transformation has no name."
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptTransformationName.Title" ) );
mb.open();
}
ask = false;
if ( meta instanceof TransMeta ) {
answer = TransGraph.editProperties( (TransMeta) meta, this, rep, false );
}
if ( meta instanceof JobMeta ) {
answer = JobGraph.editProperties( (JobMeta) meta, this, rep, false );
}
}
if ( answer && !Const.isEmpty( meta.getName() ) ) {
int response = SWT.YES;
ObjectId existingId = null;
if ( meta instanceof TransMeta ) {
existingId = rep.getTransformationID( meta.getName(), meta.getRepositoryDirectory() );
}
if ( meta instanceof JobMeta ) {
existingId = rep.getJobId( meta.getName(), meta.getRepositoryDirectory() );
}
// If there is no object id (import from XML) and there is an existing object.
//
// or...
//
// If the transformation/job has an object id and it's different from the one in the repository.
//
if ( ( meta.getObjectId() == null && existingId != null )
|| existingId != null && !meta.getObjectId().equals( existingId ) ) {
// In case we support revisions, we can simply overwrite
// without a problem so we simply don't ask.
// However, if we import from a file we should ask.
//
if ( !rep.getRepositoryMeta().getRepositoryCapabilities().supportsRevisions()
|| meta.getObjectId() == null ) {
MessageBox mb = new MessageBox( shell, SWT.YES | SWT.NO | SWT.ICON_QUESTION );
// There already is a transformation called ... in the repository.
// Do you want to overwrite the transformation?
//
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteTransformation.Message", meta
.getName(), Const.CR ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteTransformation.Title" ) );
response = mb.open();
}
}
boolean saved = false;
if ( response == SWT.YES ) {
if ( meta.getObjectId() == null ) {
meta.setObjectId( existingId );
}
try {
shell.setCursor( cursor_hourglass );
// Keep info on who & when this transformation was
// created and or modified...
if ( meta.getCreatedDate() == null ) {
meta.setCreatedDate( new Date() );
if ( capabilities.supportsUsers() ) {
meta.setCreatedUser( rep.getUserInfo().getLogin() );
}
}
// Keep info on who & when this transformation was
// changed...
meta.setModifiedDate( new Date() );
if ( capabilities.supportsUsers() ) {
meta.setModifiedUser( rep.getUserInfo().getLogin() );
}
// Finally before saving, ask for a version comment (if
// applicable)
//
String versionComment = null;
boolean versionOk = false;
while ( !versionOk ) {
versionComment = RepositorySecurityUI.getVersionComment( shell, rep, meta.getName() );
// if the version comment is null, the user hit cancel, exit.
if ( rep != null
&& rep.getSecurityProvider() != null && rep.getSecurityProvider().allowsVersionComments()
&& versionComment == null ) {
return false;
}
if ( Const.isEmpty( versionComment ) && rep.getSecurityProvider().isVersionCommentMandatory() ) {
if ( !RepositorySecurityUI.showVersionCommentMandatoryDialog( shell ) ) {
return false; // no, I don't want to enter a
// version comment and yes,
// it's mandatory.
}
} else {
versionOk = true;
}
}
if ( versionOk ) {
SaveProgressDialog spd = new SaveProgressDialog( shell, rep, meta, versionComment );
if ( spd.open() ) {
saved = true;
if ( !props.getSaveConfirmation() ) {
MessageDialogWithToggle md =
new MessageDialogWithToggle(
shell, BaseMessages.getString( PKG, "Spoon.Message.Warning.SaveOK" ), null, BaseMessages
.getString( PKG, "Spoon.Message.Warning.TransformationWasStored" ),
MessageDialog.QUESTION, new String[] {
BaseMessages.getString( PKG, "Spoon.Message.Warning.OK" ) },
0,
BaseMessages.getString( PKG, "Spoon.Message.Warning.NotShowThisMessage" ),
props.getSaveConfirmation() );
MessageDialogWithToggle.setDefaultImage( GUIResource.getInstance().getImageSpoon() );
md.open();
props.setSaveConfirmation( md.getToggleState() );
}
// Handle last opened files...
props.addLastFile(
meta.getFileType(), meta.getName(), meta.getRepositoryDirectory().getPath(), true,
getRepositoryName() );
saveSettings();
addMenuLast();
setShellText();
}
}
} finally {
shell.setCursor( null );
}
}
return saved;
}
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
// "There is no repository connection available."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.NoRepositoryConnection.Message" ) );
// "No repository available."
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.NoRepositoryConnection.Title" ) );
mb.open();
}
return false;
}
public boolean saveJobRepository( JobMeta jobMeta ) throws KettleException {
return saveToRepository( jobMeta, false );
}
public boolean saveJobRepository( JobMeta jobMeta, boolean ask_name ) throws KettleException {
return saveToRepository( jobMeta, ask_name );
}
public boolean saveFileAs() throws KettleException {
try {
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
if ( meta.canSave() ) {
return saveFileAs( meta );
}
}
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.File.Save.Fail.Title" ), BaseMessages.getString(
PKG, "Spoon.File.Save.Fail.Message" ), e );
}
return false;
}
public boolean saveFileAs( EngineMetaInterface meta ) throws KettleException {
boolean saved;
if ( log.isBasic() ) {
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.SaveAs" ) ); // "Save as..."
}
String activePerspectiveId = SpoonPerspectiveManager.getInstance().getActivePerspective().getId();
boolean etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
if ( rep != null && etlPerspective ) {
meta.setObjectId( null );
saved = saveToRepository( meta, true );
} else {
saved = saveXMLFile( meta, false );
}
delegates.tabs.renameTabs(); // filename or name of transformation might
// have changed.
refreshTree();
if ( saved && ( meta instanceof TransMeta || meta instanceof JobMeta ) ) {
TabMapEntry tabEntry = delegates.tabs.findTabMapEntry( meta );
TabItem tabItem = tabEntry.getTabItem();
if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_TRANSFORMATION ) ) {
tabItem.setImage( GUIResource.getInstance().getImageTransGraph() );
} else if ( meta.getFileType().equals( LastUsedFile.FILE_TYPE_JOB ) ) {
tabItem.setImage( GUIResource.getInstance().getImageJobGraph() );
}
}
// Update menu status for the newly saved object
enableMenus();
return saved;
}
public boolean exportXMLFile() {
return saveXMLFile( true );
}
/**
* Export this job or transformation including all depending resources to a single zip file.
*/
public void exportAllXMLFile() {
ResourceExportInterface resourceExportInterface = getActiveTransformation();
if ( resourceExportInterface == null ) {
resourceExportInterface = getActiveJob();
}
if ( resourceExportInterface == null ) {
return; // nothing to do here, prevent an NPE
}
// ((VariableSpace)resourceExportInterface).getVariable("Internal.Transformation.Filename.Directory");
// Ask the user for a zip file to export to:
//
try {
String zipFilename = null;
while ( Const.isEmpty( zipFilename ) ) {
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceSelectZipFile" ) );
dialog.setFilterExtensions( new String[] { "*.zip;*.ZIP", "*" } );
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "System.FileType.ZIPFiles" ),
BaseMessages.getString( PKG, "System.FileType.AllFiles" ), } );
setFilterPath( dialog );
if ( dialog.open() != null ) {
lastDirOpened = dialog.getFilterPath();
zipFilename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
FileObject zipFileObject = KettleVFS.getFileObject( zipFilename );
if ( zipFileObject.exists() ) {
MessageBox box = new MessageBox( shell, SWT.YES | SWT.NO | SWT.CANCEL );
box
.setMessage( BaseMessages
.getString( PKG, "Spoon.ExportResourceZipFileExists.Message", zipFilename ) );
box.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceZipFileExists.Title" ) );
int answer = box.open();
if ( answer == SWT.CANCEL ) {
return;
}
if ( answer == SWT.NO ) {
zipFilename = null;
}
}
} else {
return;
}
}
// Export the resources linked to the currently loaded file...
//
TopLevelResource topLevelResource =
ResourceUtil.serializeResourceExportInterface(
zipFilename, resourceExportInterface, (VariableSpace) resourceExportInterface, rep, metaStore );
String message =
ResourceUtil.getExplanation( zipFilename, topLevelResource.getResourceName(), resourceExportInterface );
/*
* // Add the ZIP file as a repository to the repository list... // RepositoriesMeta repositoriesMeta = new
* RepositoriesMeta(); repositoriesMeta.readData();
*
* KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(
* KettleFileRepositoryMeta.REPOSITORY_TYPE_ID, "Export " + baseFileName, "Export to file : " + zipFilename,
* "zip://" + zipFilename + "!"); fileRepositoryMeta.setReadOnly(true); // A ZIP file is read-only int nr = 2;
* String baseName = fileRepositoryMeta.getName(); while
* (repositoriesMeta.findRepository(fileRepositoryMeta.getName()) != null) { fileRepositoryMeta.setName(baseName +
* " " + nr); nr++; }
*
* repositoriesMeta.addRepository(fileRepositoryMeta); repositoriesMeta.writeData();
*/
// Show some information concerning all this work...
EnterTextDialog enterTextDialog =
new EnterTextDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ResourceSerialized" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ResourceSerializedSuccesfully" ), message );
enterTextDialog.setReadOnly();
enterTextDialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), BaseMessages.getString(
PKG, "Spoon.ErrorExportingFile" ), e );
}
}
/**
* Export this job or transformation including all depending resources to a single ZIP file containing a file
* repository.
*/
public void exportAllFileRepository() {
ResourceExportInterface resourceExportInterface = getActiveTransformation();
if ( resourceExportInterface == null ) {
resourceExportInterface = getActiveJob();
}
if ( resourceExportInterface == null ) {
return; // nothing to do here, prevent an NPE
}
// Ask the user for a zip file to export to:
//
try {
String zipFilename = null;
while ( Const.isEmpty( zipFilename ) ) {
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
dialog.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceSelectZipFile" ) );
dialog.setFilterExtensions( new String[] { "*.zip;*.ZIP", "*" } );
dialog.setFilterNames( new String[] {
BaseMessages.getString( PKG, "System.FileType.ZIPFiles" ),
BaseMessages.getString( PKG, "System.FileType.AllFiles" ), } );
setFilterPath( dialog );
if ( dialog.open() != null ) {
lastDirOpened = dialog.getFilterPath();
zipFilename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
FileObject zipFileObject = KettleVFS.getFileObject( zipFilename );
if ( zipFileObject.exists() ) {
MessageBox box = new MessageBox( shell, SWT.YES | SWT.NO | SWT.CANCEL );
box
.setMessage( BaseMessages
.getString( PKG, "Spoon.ExportResourceZipFileExists.Message", zipFilename ) );
box.setText( BaseMessages.getString( PKG, "Spoon.ExportResourceZipFileExists.Title" ) );
int answer = box.open();
if ( answer == SWT.CANCEL ) {
return;
}
if ( answer == SWT.NO ) {
zipFilename = null;
}
}
} else {
return;
}
}
// Export the resources linked to the currently loaded file...
//
TopLevelResource topLevelResource =
ResourceUtil.serializeResourceExportInterface(
zipFilename, resourceExportInterface, (VariableSpace) resourceExportInterface, rep, metaStore );
String message =
ResourceUtil.getExplanation( zipFilename, topLevelResource.getResourceName(), resourceExportInterface );
/*
* // Add the ZIP file as a repository to the repository list... // RepositoriesMeta repositoriesMeta = new
* RepositoriesMeta(); repositoriesMeta.readData();
*
* KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(
* KettleFileRepositoryMeta.REPOSITORY_TYPE_ID, "Export " + baseFileName, "Export to file : " + zipFilename,
* "zip://" + zipFilename + "!"); fileRepositoryMeta.setReadOnly(true); // A ZIP file is read-only int nr = 2;
* String baseName = fileRepositoryMeta.getName(); while
* (repositoriesMeta.findRepository(fileRepositoryMeta.getName()) != null) { fileRepositoryMeta.setName(baseName +
* " " + nr); nr++; }
*
* repositoriesMeta.addRepository(fileRepositoryMeta); repositoriesMeta.writeData();
*/
// Show some information concerning all this work...
//
EnterTextDialog enterTextDialog =
new EnterTextDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ResourceSerialized" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ResourceSerializedSuccesfully" ), message );
enterTextDialog.setReadOnly();
enterTextDialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Error" ), BaseMessages.getString(
PKG, "Spoon.ErrorExportingFile" ), e );
}
}
public void exportRepositoryAll() {
exportRepositoryDirectory( null );
}
/**
* @param directoryToExport
* set to null to export the complete repository
* @return false if we want to stop processing. true if we need to continue.
*/
public boolean exportRepositoryDirectory( RepositoryDirectory directoryToExport ) {
FileDialog dialog = this.getExportFileDialog();
if ( dialog.open() == null ) {
return false;
}
String filename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.Exporting" ), BaseMessages.getString(
PKG, "Spoon.Log.ExportObjectsToFile", filename ) );
// check if file is exists
MessageBox box = RepositoryExportProgressDialog.checkIsFileIsAcceptable( shell, log, filename );
int answer = ( box == null ) ? SWT.OK : box.open();
if ( answer != SWT.OK ) {
// seems user don't want to overwrite file...
return false;
}
//ok, let's show one more modal dialog, users like modal dialogs.
//They feel that their opinion are important to us.
box =
new MessageBox( shell, SWT.ICON_QUESTION
| SWT.APPLICATION_MODAL | SWT.SHEET | SWT.YES | SWT.NO | SWT.CANCEL );
box.setText( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRulesToExport.Title" ) );
box.setMessage( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRulesToExport.Message" ) );
answer = box.open();
if ( answer == SWT.CANCEL ) {
return false;
}
// Get the import rules
//
ImportRules importRules = new ImportRules();
if ( answer == SWT.YES ) {
ImportRulesDialog importRulesDialog = new ImportRulesDialog( shell, importRules );
if ( !importRulesDialog.open() ) {
return false;
}
}
RepositoryExportProgressDialog repd =
new RepositoryExportProgressDialog( shell, rep, directoryToExport, filename, importRules );
repd.open();
return true;
}
/**
* local method to be able to use Spoon localization messages.
* @return
*/
public FileDialog getExportFileDialog() {
FileDialog dialog = new FileDialog( shell, SWT.SAVE | SWT.SINGLE );
dialog.setText( BaseMessages.getString( PKG, "Spoon.SelectAnXMLFileToExportTo.Message" ) );
return dialog;
}
public void importDirectoryToRepository() {
FileDialog dialog = new FileDialog( shell, SWT.OPEN | SWT.MULTI );
dialog.setText( BaseMessages.getString( PKG, "Spoon.SelectAnXMLFileToImportFrom.Message" ) );
if ( dialog.open() == null ) {
return;
}
// Ask for a set of import rules
//
MessageBox box =
new MessageBox( shell, SWT.ICON_QUESTION
| SWT.APPLICATION_MODAL | SWT.SHEET | SWT.YES | SWT.NO | SWT.CANCEL );
box.setText( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRules.Title" ) );
box.setMessage( BaseMessages.getString( PKG, "Spoon.QuestionApplyImportRules.Message" ) );
int answer = box.open();
if ( answer == SWT.CANCEL ) {
return;
}
// Get the import rules
//
ImportRules importRules = new ImportRules();
if ( answer == SWT.YES ) {
ImportRulesDialog importRulesDialog = new ImportRulesDialog( shell, importRules );
if ( !importRulesDialog.open() ) {
return;
}
}
// Ask for a destination in the repository...
//
SelectDirectoryDialog sdd = new SelectDirectoryDialog( shell, SWT.NONE, rep );
RepositoryDirectoryInterface baseDirectory = sdd.open();
if ( baseDirectory == null ) {
return;
}
// Finally before importing, ask for a version comment (if applicable)
//
String versionComment = null;
boolean versionOk = false;
while ( !versionOk ) {
versionComment =
RepositorySecurityUI.getVersionComment( shell, rep, "Import of files into ["
+ baseDirectory.getPath() + "]" );
// if the version comment is null, the user hit cancel, exit.
if ( rep != null
&& rep.getSecurityProvider() != null && rep.getSecurityProvider().allowsVersionComments()
&& versionComment == null ) {
return;
}
if ( Const.isEmpty( versionComment ) && rep.getSecurityProvider().isVersionCommentMandatory() ) {
if ( !RepositorySecurityUI.showVersionCommentMandatoryDialog( shell ) ) {
versionOk = true;
}
} else {
versionOk = true;
}
}
String[] filenames = dialog.getFileNames();
if ( filenames.length > 0 ) {
RepositoryImportProgressDialog ripd =
new RepositoryImportProgressDialog(
shell, SWT.NONE, rep, dialog.getFilterPath(), filenames, baseDirectory, versionComment, importRules );
ripd.open();
refreshTree();
}
}
public boolean saveXMLFile( boolean export ) {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
return saveXMLFile( transMeta, export );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
return saveXMLFile( jobMeta, export );
}
return false;
}
public boolean saveXMLFile( EngineMetaInterface meta, boolean export ) {
if ( log.isBasic() ) {
log.logBasic( "Save file as..." );
}
boolean saved = false;
String beforeFilename = meta.getFilename();
String beforeName = meta.getName();
FileDialog dialog = new FileDialog( shell, SWT.SAVE );
String[] extensions = meta.getFilterExtensions();
dialog.setFilterExtensions( extensions );
dialog.setFilterNames( meta.getFilterNames() );
setFilterPath( dialog );
String filename = dialog.open();
if ( filename != null ) {
lastDirOpened = dialog.getFilterPath();
// Is the filename ending on .ktr, .xml?
boolean ending = false;
for ( int i = 0; i < extensions.length - 1; i++ ) {
String[] parts = extensions[i].split( ";" );
for ( String part : parts ) {
if ( filename.toLowerCase().endsWith( part.substring( 1 ).toLowerCase() ) ) {
ending = true;
}
}
}
if ( filename.endsWith( meta.getDefaultExtension() ) ) {
ending = true;
}
if ( !ending ) {
if ( !meta.getDefaultExtension().startsWith( "." ) && !filename.endsWith( "." ) ) {
filename += ".";
}
filename += meta.getDefaultExtension();
}
// See if the file already exists...
int id = SWT.YES;
try {
FileObject f = KettleVFS.getFileObject( filename );
if ( f.exists() ) {
MessageBox mb = new MessageBox( shell, SWT.NO | SWT.YES | SWT.ICON_WARNING );
// "This file already exists. Do you want to overwrite it?"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Message" ) );
// "This file already exists!"
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Title" ) );
id = mb.open();
}
} catch ( Exception e ) {
// TODO do we want to show an error dialog here? My first guess
// is not, but we might.
}
if ( id == SWT.YES ) {
if ( !export && !Const.isEmpty( beforeFilename ) && !beforeFilename.equals( filename ) ) {
meta.setName( Const.createName( filename ) );
meta.setFilename( filename );
// If the user hits cancel here, don't save anything
//
if ( !editProperties() ) {
// Revert the changes!
//
meta.setFilename( beforeFilename );
meta.setName( beforeName );
return saved;
}
}
saved = save( meta, filename, export );
if ( !saved ) {
meta.setFilename( beforeFilename );
meta.setName( beforeName );
}
}
}
return saved;
}
public boolean saveXMLFileToVfs() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
return saveXMLFileToVfs( transMeta );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
return saveXMLFileToVfs( jobMeta );
}
return false;
}
public boolean saveXMLFileToVfs( EngineMetaInterface meta ) {
if ( log.isBasic() ) {
log.logBasic( "Save file as..." );
}
FileObject rootFile;
FileObject initialFile;
try {
initialFile = KettleVFS.getFileObject( getLastFileOpened() );
rootFile = KettleVFS.getFileObject( getLastFileOpened() ).getFileSystem().getRoot();
} catch ( Exception e ) {
MessageBox messageDialog = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
messageDialog.setText( "Error" );
messageDialog.setMessage( e.getMessage() );
messageDialog.open();
return false;
}
String filename = null;
FileObject selectedFile =
getVfsFileChooserDialog( rootFile, initialFile ).open(
shell, "Untitled", Const.STRING_TRANS_AND_JOB_FILTER_EXT, Const.getTransformationAndJobFilterNames(),
VfsFileChooserDialog.VFS_DIALOG_SAVEAS );
if ( selectedFile != null ) {
filename = selectedFile.getName().getFriendlyURI();
}
String[] extensions = meta.getFilterExtensions();
if ( filename != null ) {
// Is the filename ending on .ktr, .xml?
boolean ending = false;
for ( int i = 0; i < extensions.length - 1; i++ ) {
if ( filename.endsWith( extensions[i].substring( 1 ) ) ) {
ending = true;
}
}
if ( filename.endsWith( meta.getDefaultExtension() ) ) {
ending = true;
}
if ( !ending ) {
filename += '.' + meta.getDefaultExtension();
}
// See if the file already exists...
int id = SWT.YES;
try {
FileObject f = KettleVFS.getFileObject( filename );
if ( f.exists() ) {
MessageBox mb = new MessageBox( shell, SWT.NO | SWT.YES | SWT.ICON_WARNING );
// "This file already exists. Do you want to overwrite it?"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PromptOverwriteFile.Title" ) );
id = mb.open();
}
} catch ( Exception e ) {
// TODO do we want to show an error dialog here? My first guess
// is not, but we might.
}
if ( id == SWT.YES ) {
save( meta, filename, false );
}
}
return false;
}
public boolean save( EngineMetaInterface meta, String filename, boolean export ) {
boolean saved = false;
// the only file types that are subject to ascii-only rule are those that are not trans and not job
boolean isNotTransOrJob =
!LastUsedFile.FILE_TYPE_TRANSFORMATION.equals( meta.getFileType() )
&& !LastUsedFile.FILE_TYPE_JOB.equals( meta.getFileType() );
if ( isNotTransOrJob ) {
Pattern pattern = Pattern.compile( "\\p{ASCII}+" );
Matcher matcher = pattern.matcher( filename );
if ( !matcher.matches() ) {
/*
* Temporary fix for AGILEBI-405 Don't allow saving of files that contain special characters until AGILEBI-394
* is resolved. AGILEBI-394 Naming an analyzer report with spanish accents gives error when publishing.
*/
MessageBox box = new MessageBox( staticSpoon.shell, SWT.ICON_ERROR | SWT.OK );
box.setMessage( "Special characters are not allowed in the filename. Please use ASCII characters only" );
box.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingConnection.Title" ) );
box.open();
return false;
}
}
FileListener listener = null;
// match by extension first
int idx = filename.lastIndexOf( '.' );
if ( idx != -1 ) {
String extension = filename.substring( idx + 1 );
listener = fileExtensionMap.get( extension );
}
if ( listener == null ) {
String xt = meta.getDefaultExtension();
listener = fileExtensionMap.get( xt );
}
if ( listener != null ) {
String sync = BasePropertyHandler.getProperty( SYNC_TRANS );
if ( Boolean.parseBoolean( sync ) ) {
listener.syncMetaName( meta, Const.createName( filename ) );
delegates.tabs.renameTabs();
}
saved = listener.save( meta, filename, export );
}
return saved;
}
public boolean saveMeta( EngineMetaInterface meta, String filename ) {
meta.setFilename( filename );
if ( Const.isEmpty( meta.getName() )
|| delegates.jobs.isDefaultJobName( meta.getName() )
|| delegates.trans.isDefaultTransformationName( meta.getName() ) ) {
meta.nameFromFilename();
}
boolean saved = false;
try {
String xml = XMLHandler.getXMLHeader() + meta.getXML();
DataOutputStream dos = new DataOutputStream( KettleVFS.getOutputStream( filename, false ) );
dos.write( xml.getBytes( Const.XML_ENCODING ) );
dos.close();
saved = true;
// Handle last opened files...
props.addLastFile( meta.getFileType(), filename, null, false, null );
saveSettings();
addMenuLast();
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Spoon.Log.FileWritten" ) + " [" + filename + "]" ); // "File
}
// written
// to
meta.setFilename( filename );
meta.clearChanged();
setShellText();
} catch ( Exception e ) {
if ( log.isDebug() ) {
// "Error opening file for writing! --> "
log.logDebug( BaseMessages.getString( PKG, "Spoon.Log.ErrorOpeningFileForWriting" ) + e.toString() );
}
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingFile.Title" ),
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingFile.Message" )
+ Const.CR + e.toString(), e );
}
return saved;
}
public void helpAbout() {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION | SWT.CENTER | SWT.SHEET );
String releaseText = Const.RELEASE.getMessage();
StringBuilder messageBuilder = new StringBuilder();
BuildVersion buildVersion = BuildVersion.getInstance();
// buildVersionInfo correspond to
// ${release.major.number}.${release.minor.number}.${release.milestone.number}.${build.id}
String buildVersionInfo = buildVersion.getVersion();
if ( Const.isEmpty( buildVersionInfo ) ) {
buildVersionInfo = "Unknown";
}
// suppose buildVersion consists of releaseInfo and commit id
String releaseInfo = "";
String buildStatus = "";
// build the result message
messageBuilder.append( BaseMessages.getString( PKG, "System.ProductInfo" ) );
messageBuilder.append( releaseText );
messageBuilder.append( " - " );
// Regex represents the string that contains a git 40-character checksum hash
String containingChecksumRegex = ".+\\b([a-f0-9]{40})\\b";
// check if string contains VCS checksum
if ( !buildVersionInfo.matches( containingChecksumRegex ) ) {
releaseInfo = buildVersionInfo;
} else {
// The next actions will not have sense when we will have separate string for commit id in manifest file in
// kettle-engine jar
String[] buildVsionInfoElts = buildVersionInfo.split( "\\." );
int elementCount = buildVsionInfoElts.length;
for ( int i = 0; i < elementCount; i++ ) {
String currentElement = buildVsionInfoElts[i];
// check if current element VCS checksum
if ( currentElement.length() != 40 ) {
releaseInfo += currentElement + ".";
} else {
buildStatus = currentElement;
}
}
// delete dot symbol at the end position
releaseInfo = new String( releaseInfo.substring( 0, releaseInfo.length() - 1 ) );
}
messageBuilder.append( releaseInfo );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( BaseMessages.getString( PKG, "System.CompanyInfo", Const.COPYRIGHT_YEAR ) );
messageBuilder.append( Const.CR );
messageBuilder.append( BaseMessages.getString( PKG, "System.ProductWebsiteUrl" ) );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( Const.CR );
messageBuilder.append( "Build version : " );
messageBuilder.append( releaseInfo );
if ( !buildStatus.isEmpty() ) {
messageBuilder.append( Const.CR );
messageBuilder.append( "Commit ID : " );
messageBuilder.append( buildStatus );
}
messageBuilder.append( Const.CR );
messageBuilder.append( "Build date : " );
String inputStringDate = buildVersion.getBuildDate();
String outputStringDate = "";
SimpleDateFormat inputFormat = null;
SimpleDateFormat otputFormat = null;
if ( inputStringDate.matches( "^\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}.\\d{3}$" ) ) {
inputFormat = new SimpleDateFormat( "yyyy/MM/dd hh:mm:ss.SSS" );
}
if ( inputStringDate.matches( "^\\d{4}-\\d{1,2}-\\d{1,2}\\_\\d{1,2}-\\d{2}-\\d{2}$" ) ) {
inputFormat = new SimpleDateFormat( "yyyy-MM-dd_hh-mm-ss" );
}
if ( inputStringDate.matches( "^\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}.\\d{2}.\\d{2}$" ) ) {
inputFormat = new SimpleDateFormat( "yyyy-MM-dd hh.mm.ss" );
}
otputFormat = new SimpleDateFormat( "MMMM d, yyyy hh:mm:ss" );
try {
if ( inputFormat != null ) {
Date date = inputFormat.parse( inputStringDate );
outputStringDate = otputFormat.format( date );
} else {
// If date isn't correspond to formats above just show date in origin format
outputStringDate = inputStringDate;
}
} catch ( ParseException e ) {
// Just show date in origin format
outputStringDate = inputStringDate;
}
messageBuilder.append( outputStringDate );
// set the text in the message box
mb.setMessage( messageBuilder.toString() );
mb.setText( APP_NAME );
// now open the message box
mb.open();
}
/**
* Show a plugin browser
*/
public void showPluginInfo() {
try {
// First we collect information concerning all the plugin types...
//
Map<String, RowMetaInterface> metaMap = new HashMap<String, RowMetaInterface>();
Map<String, List<Object[]>> dataMap = new HashMap<String, List<Object[]>>();
PluginRegistry registry = PluginRegistry.getInstance();
List<Class<? extends PluginTypeInterface>> pluginTypeClasses = registry.getPluginTypes();
for ( Class<? extends PluginTypeInterface> pluginTypeClass : pluginTypeClasses ) {
PluginTypeInterface pluginTypeInterface = registry.getPluginType( pluginTypeClass );
String subject = pluginTypeInterface.getName();
RowBuffer pluginInformation = registry.getPluginInformation( pluginTypeClass );
metaMap.put( subject, pluginInformation.getRowMeta() );
dataMap.put( subject, pluginInformation.getBuffer() );
}
// Now push it all to a subject data browser...
//
SubjectDataBrowserDialog dialog =
new SubjectDataBrowserDialog( shell, metaMap, dataMap, "Plugin browser", "Plugin type" );
dialog.open();
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error listing plugins", e );
}
}
public void editUnselectAll() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
transMeta.unselectAll();
getActiveTransGraph().redraw();
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
jobMeta.unselectAll();
getActiveJobGraph().redraw();
}
}
public void editSelectAll() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
transMeta.selectAll();
getActiveTransGraph().redraw();
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
jobMeta.selectAll();
getActiveJobGraph().redraw();
}
}
public void editOptions() {
EnterOptionsDialog eod = new EnterOptionsDialog( shell );
if ( eod.open() != null ) {
props.saveProps();
loadSettings();
changeLooks();
MessageBox mb = new MessageBox( shell, SWT.ICON_INFORMATION );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.PleaseRestartApplication.Message" ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.PleaseRestartApplication.Title" ) );
mb.open();
}
}
public void editKettlePropertiesFile() {
KettlePropertiesFileDialog dialog = new KettlePropertiesFileDialog( shell, SWT.NONE );
Map<String, String> newProperties = dialog.open();
if ( newProperties != null ) {
for ( String name : newProperties.keySet() ) {
String value = newProperties.get( name );
applyVariableToAllLoadedObjects( name, value );
// Also set as a JVM property
//
System.setProperty( name, value );
}
}
}
/**
* Matches if the filter is non-empty
*
* @param string string to match
* @return true in case string matches filter
*/
private boolean filterMatch( String string ) {
String filter = selectionFilter.getText();
if ( Const.isEmpty( string ) ) {
return true;
}
if ( Const.isEmpty( filter ) ) {
return true;
}
try {
if ( string.matches( filter ) ) {
return true;
}
} catch ( Exception e ) {
log.logError( "Not a valid pattern [" + filter + "] : " + e.getMessage() );
}
return string.toUpperCase().contains( filter.toUpperCase() );
}
/**
* Refresh the object selection tree (on the left of the screen)
*/
public void refreshTree() {
if ( shell.isDisposed() ) {
return;
}
if ( !viewSelected ) {
return; // Nothing to see here, move along...
}
if ( selectionTree == null || selectionTree.isDisposed() ) {
// //////////////////////////////////////////////////////////////////////////////////////////////////
//
// Now set up the transformation/job tree
//
selectionTree = new Tree( variableComposite, SWT.SINGLE );
props.setLook( selectionTree );
selectionTree.setLayout( new FillLayout() );
addDefaultKeyListeners( selectionTree );
/*
* ExpandItem treeItem = new ExpandItem(mainExpandBar, SWT.NONE); treeItem.setControl(selectionTree);
* treeItem.setHeight(shell.getBounds().height); setHeaderImage(treeItem,
* GUIResource.getInstance().getImageLogoSmall(), STRING_SPOON_MAIN_TREE, 0, true);
*/
// Add a tree memory as well...
TreeMemory.addTreeListener( selectionTree, STRING_SPOON_MAIN_TREE );
selectionTree.addMenuDetectListener( new MenuDetectListener() {
public void menuDetected( MenuDetectEvent e ) {
setMenu( selectionTree );
}
} );
selectionTree.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
showSelection();
}
} );
selectionTree.addSelectionListener( new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
doubleClickedInTree( selectionTree );
}
} );
// Set a listener on the tree
addDragSourceToTree( selectionTree );
}
GUIResource guiResource = GUIResource.getInstance();
TransMeta activeTransMeta = getActiveTransformation();
JobMeta activeJobMeta = getActiveJob();
boolean showAll = activeTransMeta == null && activeJobMeta == null;
// get a list of transformations from the transformation map
//
/*
* List<TransMeta> transformations = delegates.trans.getTransformationList(); Collections.sort(transformations);
* TransMeta[] transMetas = transformations.toArray(new TransMeta[transformations.size()]);
*
* // get a list of jobs from the job map List<JobMeta> jobs = delegates.jobs.getJobList(); Collections.sort(jobs);
* JobMeta[] jobMetas = jobs.toArray(new JobMeta[jobs.size()]);
*/
// Refresh the content of the tree for those transformations
//
// First remove the old ones.
selectionTree.removeAll();
// Now add the data back
//
if ( !props.isOnlyActiveFileShownInTree() || showAll || activeTransMeta != null ) {
TreeItem tiTrans = new TreeItem( selectionTree, SWT.NONE );
tiTrans.setText( STRING_TRANSFORMATIONS );
tiTrans.setImage( GUIResource.getInstance().getImageBol() );
// Set expanded if this is the only transformation shown.
if ( props.isOnlyActiveFileShownInTree() ) {
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiTrans, true );
}
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
Object managedObject = entry.getObject().getManagedObject();
if ( managedObject instanceof TransMeta ) {
TransMeta transMeta = (TransMeta) managedObject;
if ( !props.isOnlyActiveFileShownInTree()
|| showAll || ( activeTransMeta != null && activeTransMeta.equals( transMeta ) ) ) {
// Add a tree item with the name of transformation
//
String name = delegates.tabs.makeTabName( transMeta, entry.isShowingLocation() );
if ( Const.isEmpty( name ) ) {
name = STRING_TRANS_NO_NAME;
}
TreeItem tiTransName = new TreeItem( tiTrans, SWT.NONE );
tiTransName.setText( name );
tiTransName.setImage( guiResource.getImageTransGraph() );
// Set expanded if this is the only transformation
// shown.
if ( props.isOnlyActiveFileShownInTree() ) {
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiTransName, true );
}
// /////////////////////////////////////////////////////
//
// Now add the database connections
//
TreeItem tiDbTitle = new TreeItem( tiTransName, SWT.NONE );
tiDbTitle.setText( STRING_CONNECTIONS );
tiDbTitle.setImage( guiResource.getImageBol() );
String[] dbNames = new String[transMeta.nrDatabases()];
for ( int i = 0; i < dbNames.length; i++ ) {
dbNames[i] = transMeta.getDatabase( i ).getName();
}
Arrays.sort( dbNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
// Draw the connections themselves below it.
for ( String dbName : dbNames ) {
DatabaseMeta databaseMeta = transMeta.findDatabase( dbName );
if ( !filterMatch( dbName ) ) {
continue;
}
TreeItem tiDb = new TreeItem( tiDbTitle, SWT.NONE );
tiDb.setText( databaseMeta.getDisplayName() );
if ( databaseMeta.isShared() ) {
tiDb.setFont( guiResource.getFontBold() );
}
tiDb.setImage( guiResource.getImageConnection() );
}
// /////////////////////////////////////////////////////
//
// The steps
//
TreeItem tiStepTitle = new TreeItem( tiTransName, SWT.NONE );
tiStepTitle.setText( STRING_STEPS );
tiStepTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.nrSteps(); i++ ) {
StepMeta stepMeta = transMeta.getStep( i );
PluginInterface stepPlugin =
PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, stepMeta.getStepID() );
if ( !filterMatch( stepMeta.getName() ) && !filterMatch( stepMeta.getName() ) ) {
continue;
}
TreeItem tiStep = new TreeItem( tiStepTitle, SWT.NONE );
tiStep.setText( stepMeta.getName() );
if ( stepMeta.isShared() ) {
tiStep.setFont( guiResource.getFontBold() );
}
if ( !stepMeta.isDrawn() ) {
tiStep.setForeground( guiResource.getColorDarkGray() );
}
Image stepIcon = guiResource.getImagesStepsSmall().get( stepPlugin.getIds()[0] );
if ( stepIcon == null ) {
stepIcon = guiResource.getImageBol();
}
tiStep.setImage( stepIcon );
}
// /////////////////////////////////////////////////////
//
// The hops
//
TreeItem tiHopTitle = new TreeItem( tiTransName, SWT.NONE );
tiHopTitle.setText( STRING_HOPS );
tiHopTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.nrTransHops(); i++ ) {
TransHopMeta hopMeta = transMeta.getTransHop( i );
if ( !filterMatch( hopMeta.toString() ) ) {
continue;
}
TreeItem tiHop = new TreeItem( tiHopTitle, SWT.NONE );
tiHop.setText( hopMeta.toString() );
if ( hopMeta.isEnabled() ) {
tiHop.setImage( guiResource.getImageHop() );
} else {
tiHop.setImage( guiResource.getImageDisabledHop() );
}
}
// /////////////////////////////////////////////////////
//
// The partitions
//
TreeItem tiPartitionTitle = new TreeItem( tiTransName, SWT.NONE );
tiPartitionTitle.setText( STRING_PARTITIONS );
tiPartitionTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.getPartitionSchemas().size(); i++ ) {
PartitionSchema partitionSchema = transMeta.getPartitionSchemas().get( i );
if ( !filterMatch( partitionSchema.getName() ) ) {
continue;
}
TreeItem tiPartition = new TreeItem( tiPartitionTitle, SWT.NONE );
tiPartition.setText( partitionSchema.getName() );
tiPartition.setImage( guiResource.getImageFolderConnections() );
if ( partitionSchema.isShared() ) {
tiPartition.setFont( guiResource.getFontBold() );
}
}
// /////////////////////////////////////////////////////
//
// The slaves
//
TreeItem tiSlaveTitle = new TreeItem( tiTransName, SWT.NONE );
tiSlaveTitle.setText( STRING_SLAVES );
tiSlaveTitle.setImage( guiResource.getImageBol() );
// Put the slaves below it.
//
String[] slaveNames = transMeta.getSlaveServerNames();
Arrays.sort( slaveNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
for ( String slaveName : slaveNames ) {
SlaveServer slaveServer = transMeta.findSlaveServer( slaveName );
if ( !filterMatch( slaveServer.getName() ) ) {
continue;
}
TreeItem tiSlave = new TreeItem( tiSlaveTitle, SWT.NONE );
tiSlave.setText( slaveServer.getName() );
tiSlave.setImage( guiResource.getImageSlave() );
if ( slaveServer.isShared() ) {
tiSlave.setFont( guiResource.getFontBold() );
}
}
// /////////////////////////////////////////////////////
//
// The clusters
//
TreeItem tiClusterTitle = new TreeItem( tiTransName, SWT.NONE );
tiClusterTitle.setText( STRING_CLUSTERS );
tiClusterTitle.setImage( guiResource.getImageBol() );
// Put the steps below it.
for ( int i = 0; i < transMeta.getClusterSchemas().size(); i++ ) {
ClusterSchema clusterSchema = transMeta.getClusterSchemas().get( i );
if ( !filterMatch( clusterSchema.getName() ) ) {
continue;
}
TreeItem tiCluster = new TreeItem( tiClusterTitle, SWT.NONE );
tiCluster.setText( clusterSchema.toString() );
tiCluster.setImage( guiResource.getImageCluster() );
if ( clusterSchema.isShared() ) {
tiCluster.setFont( guiResource.getFontBold() );
}
}
}
}
}
}
if ( !props.isOnlyActiveFileShownInTree() || showAll || activeJobMeta != null ) {
TreeItem tiJobs = new TreeItem( selectionTree, SWT.NONE );
tiJobs.setText( STRING_JOBS );
tiJobs.setImage( GUIResource.getInstance().getImageBol() );
// Set expanded if this is the only job shown.
if ( props.isOnlyActiveFileShownInTree() ) {
tiJobs.setExpanded( true );
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiJobs, true );
}
// Now add the jobs
//
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
Object managedObject = entry.getObject().getManagedObject();
if ( managedObject instanceof JobMeta ) {
JobMeta jobMeta = (JobMeta) managedObject;
if ( !props.isOnlyActiveFileShownInTree()
|| showAll || ( activeJobMeta != null && activeJobMeta.equals( jobMeta ) ) ) {
// Add a tree item with the name of job
//
String name = delegates.tabs.makeTabName( jobMeta, entry.isShowingLocation() );
if ( Const.isEmpty( name ) ) {
name = STRING_JOB_NO_NAME;
}
if ( !filterMatch( name ) ) {
continue;
}
TreeItem tiJobName = new TreeItem( tiJobs, SWT.NONE );
tiJobName.setText( name );
tiJobName.setImage( guiResource.getImageJobGraph() );
// Set expanded if this is the only job shown.
if ( props.isOnlyActiveFileShownInTree() ) {
TreeMemory.getInstance().storeExpanded( STRING_SPOON_MAIN_TREE, tiJobName, true );
}
// /////////////////////////////////////////////////////
//
// Now add the database connections
//
TreeItem tiDbTitle = new TreeItem( tiJobName, SWT.NONE );
tiDbTitle.setText( STRING_CONNECTIONS );
tiDbTitle.setImage( guiResource.getImageBol() );
String[] dbNames = new String[jobMeta.nrDatabases()];
for ( int i = 0; i < dbNames.length; i++ ) {
dbNames[i] = jobMeta.getDatabase( i ).getName();
}
Arrays.sort( dbNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
// Draw the connections themselves below it.
for ( String dbName : dbNames ) {
DatabaseMeta databaseMeta = jobMeta.findDatabase( dbName );
if ( !filterMatch( databaseMeta.getName() ) ) {
continue;
}
TreeItem tiDb = new TreeItem( tiDbTitle, SWT.NONE );
tiDb.setText( databaseMeta.getDisplayName() );
if ( databaseMeta.isShared() ) {
tiDb.setFont( guiResource.getFontBold() );
}
tiDb.setImage( guiResource.getImageConnection() );
}
// /////////////////////////////////////////////////////
//
// The job entries
//
TreeItem tiJobEntriesTitle = new TreeItem( tiJobName, SWT.NONE );
tiJobEntriesTitle.setText( STRING_JOB_ENTRIES );
tiJobEntriesTitle.setImage( guiResource.getImageBol() );
// Put the job entries below it.
//
for ( int i = 0; i < jobMeta.nrJobEntries(); i++ ) {
JobEntryCopy jobEntry = jobMeta.getJobEntry( i );
if ( !filterMatch( jobEntry.getName() ) && !filterMatch( jobEntry.getDescription() ) ) {
continue;
}
TreeItem tiJobEntry = ConstUI.findTreeItem( tiJobEntriesTitle, jobEntry.getName() );
if ( tiJobEntry != null ) {
continue; // only show it once
}
tiJobEntry = new TreeItem( tiJobEntriesTitle, SWT.NONE );
tiJobEntry.setText( jobEntry.getName() );
// if (jobEntry.isShared())
// tiStep.setFont(guiResource.getFontBold()); TODO:
// allow job entries to be shared as well...
if ( jobEntry.isStart() ) {
tiJobEntry.setImage( GUIResource.getInstance().getImageStart() );
} else if ( jobEntry.isDummy() ) {
tiJobEntry.setImage( GUIResource.getInstance().getImageDummy() );
} else {
String key = jobEntry.getEntry().getPluginId();
Image image = GUIResource.getInstance().getImagesJobentriesSmall().get( key );
tiJobEntry.setImage( image );
}
}
// /////////////////////////////////////////////////////
//
// The slaves
//
TreeItem tiSlaveTitle = new TreeItem( tiJobName, SWT.NONE );
tiSlaveTitle.setText( STRING_SLAVES );
tiSlaveTitle.setImage( guiResource.getImageBol() );
// Put the slaves below it.
//
String[] slaveNames = jobMeta.getSlaveServerNames();
Arrays.sort( slaveNames, new Comparator<String>() {
public int compare( String o1, String o2 ) {
return o1.compareToIgnoreCase( o2 );
}
} );
for ( String slaveName : slaveNames ) {
SlaveServer slaveServer = jobMeta.findSlaveServer( slaveName );
if ( !filterMatch( slaveServer.getName() ) ) {
continue;
}
TreeItem tiSlave = new TreeItem( tiSlaveTitle, SWT.NONE );
tiSlave.setText( slaveServer.getName() );
tiSlave.setImage( guiResource.getImageSlave() );
if ( slaveServer.isShared() ) {
tiSlave.setFont( guiResource.getFontBold() );
}
}
}
}
}
}
// Set the expanded state of the complete tree.
TreeMemory.setExpandedFromMemory( selectionTree, STRING_SPOON_MAIN_TREE );
// refreshCoreObjectsHistory();
selectionTree.setFocus();
selectionTree.layout();
variableComposite.layout( true, true );
setShellText();
}
public String getActiveTabText() {
if ( tabfolder.getSelected() == null ) {
return null;
}
return tabfolder.getSelected().getText();
}
public void refreshGraph() {
if ( shell.isDisposed() ) {
return;
}
TabItem tabItem = tabfolder.getSelected();
if ( tabItem == null ) {
return;
}
TabMapEntry tabMapEntry = delegates.tabs.getTab( tabItem );
if ( tabMapEntry != null ) {
if ( tabMapEntry.getObject() instanceof TransGraph ) {
TransGraph transGraph = (TransGraph) tabMapEntry.getObject();
transGraph.redraw();
}
if ( tabMapEntry.getObject() instanceof JobGraph ) {
JobGraph jobGraph = (JobGraph) tabMapEntry.getObject();
jobGraph.redraw();
}
}
setShellText();
}
public StepMeta newStep( TransMeta transMeta ) {
return newStep( transMeta, true, true );
}
public StepMeta newStep( TransMeta transMeta, boolean openit, boolean rename ) {
if ( transMeta == null ) {
return null;
}
TreeItem[] ti = selectionTree.getSelection();
StepMeta inf = null;
if ( ti.length == 1 ) {
String stepType = ti[0].getText();
if ( log.isDebug() ) {
log.logDebug( BaseMessages.getString( PKG, "Spoon.Log.NewStep" ) + stepType ); // "New step: "
}
inf = newStep( transMeta, stepType, stepType, openit, rename );
}
return inf;
}
/**
* Allocate new step, optionally open and rename it.
*
* @param name
* Name of the new step
* @param description
* Description of the type of step
* @param openit
* Open the dialog for this step?
* @param rename
* Rename this step?
*
* @return The newly created StepMeta object.
*
*/
public StepMeta newStep( TransMeta transMeta, String name, String description, boolean openit, boolean rename ) {
StepMeta inf = null;
// See if we need to rename the step to avoid doubles!
if ( rename && transMeta.findStep( name ) != null ) {
int i = 2;
String newName = name + " " + i;
while ( transMeta.findStep( newName ) != null ) {
i++;
newName = name + " " + i;
}
name = newName;
}
PluginRegistry registry = PluginRegistry.getInstance();
PluginInterface stepPlugin = null;
try {
stepPlugin = registry.findPluginWithName( StepPluginType.class, description );
if ( stepPlugin != null ) {
StepMetaInterface info = (StepMetaInterface) registry.loadClass( stepPlugin );
info.setDefault();
if ( openit ) {
StepDialogInterface dialog = this.getStepEntryDialog( info, transMeta, name );
if ( dialog != null ) {
name = dialog.open();
}
}
inf = new StepMeta( stepPlugin.getIds()[0], name, info );
if ( name != null ) {
// OK pressed in the dialog: we have a step-name
String newName = name;
StepMeta stepMeta = transMeta.findStep( newName );
int nr = 2;
while ( stepMeta != null ) {
newName = name + " " + nr;
stepMeta = transMeta.findStep( newName );
nr++;
}
if ( nr > 2 ) {
inf.setName( newName );
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
// "This stepName already exists. Spoon changed the stepName to ["+newName+"]"
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.ChangeStepname.Message", newName ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ChangeStepname.Title" ) );
mb.open();
}
inf.setLocation( 20, 20 ); // default location at (20,20)
transMeta.addStep( inf );
addUndoNew( transMeta, new StepMeta[] { inf }, new int[] { transMeta.indexOfStep( inf ) } );
// Also store it in the pluginHistory list...
props.increasePluginHistory( stepPlugin.getIds()[0] );
// stepHistoryChanged = true;
refreshTree();
} else {
return null; // Cancel pressed in dialog.
}
setShellText();
}
} catch ( KettleException e ) {
String filename = stepPlugin.getErrorHelpFile();
if ( stepPlugin != null && !Const.isEmpty( filename ) ) {
// OK, in stead of a normal error message, we give back the
// content of the error help file... (HTML)
FileInputStream fis = null;
try {
StringBuilder content = new StringBuilder();
fis = new FileInputStream( new File( filename ) );
int ch = fis.read();
while ( ch >= 0 ) {
content.append( (char) ch );
ch = fis.read();
}
ShowBrowserDialog sbd =
new ShowBrowserDialog(
// "Error help text"
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorHelpText.Title" ), content.toString() );
sbd.open();
} catch ( Exception ex ) {
new ErrorDialog( shell,
// "Error showing help text"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorShowingHelpText.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ErrorShowingHelpText.Message" ), ex );
} finally {
if ( fis != null ) {
try {
fis.close();
} catch ( Exception ex ) {
log.logError( "Error closing plugin help file", ex );
}
}
}
} else {
new ErrorDialog( shell,
// "Error creating step"
// "I was unable to create a new step"
BaseMessages.getString( PKG, "Spoon.Dialog.UnableCreateNewStep.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.UnableCreateNewStep.Message" ), e );
}
return null;
} catch ( Throwable e ) {
if ( !shell.isDisposed() ) {
new ErrorDialog( shell,
// "Error creating step"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorCreatingStep.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.UnableCreateNewStep.Message" ), e );
}
return null;
}
return inf;
}
public void setShellText() {
if ( shell.isDisposed() ) {
return;
}
String filename = null;
String name = null;
String version = null;
ChangedFlagInterface changed = null;
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
changed = transMeta;
filename = transMeta.getFilename();
name = transMeta.getName();
version = transMeta.getObjectRevision() == null ? null : transMeta.getObjectRevision().getName();
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
changed = jobMeta;
filename = jobMeta.getFilename();
name = jobMeta.getName();
version = jobMeta.getObjectRevision() == null ? null : jobMeta.getObjectRevision().getName();
}
String text = "";
if ( rep != null ) {
text += APP_TITLE + " - [" + getRepositoryName() + "] ";
} else {
text += APP_TITLE + " - ";
}
if ( Const.isEmpty( name ) ) {
if ( !Const.isEmpty( filename ) ) {
text += filename;
} else {
String tab = getActiveTabText();
if ( !Const.isEmpty( tab ) ) {
text += tab;
} else {
text += BaseMessages.getString( PKG, "Spoon.Various.NoName" ); // "[no name]"
}
}
} else {
text += name;
}
if ( !Const.isEmpty( version ) ) {
text += " v" + version;
}
if ( changed != null && changed.hasChanged() ) {
text += " " + BaseMessages.getString( PKG, "Spoon.Various.Changed" );
}
shell.setText( text );
markTabsChanged();
}
public void enableMenus() {
boolean disableTransMenu = getActiveTransformation() == null;
boolean disableJobMenu = getActiveJob() == null;
boolean disableMetaMenu = getActiveMeta() == null;
boolean isRepositoryRunning = rep != null;
boolean disablePreviewButton = true;
String activePerspectiveId = null;
SpoonPerspectiveManager manager = SpoonPerspectiveManager.getInstance();
if ( manager != null && manager.getActivePerspective() != null ) {
activePerspectiveId = manager.getActivePerspective().getId();
}
boolean etlPerspective = false;
if ( activePerspectiveId != null && activePerspectiveId.length() > 0 ) {
etlPerspective = activePerspectiveId.equals( MainSpoonPerspective.ID );
}
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
disablePreviewButton = !( transGraph.isRunning() && !transGraph.isHalting() );
}
boolean disableSave = true;
TabItemInterface currentTab = getActiveTabitem();
if ( currentTab != null && currentTab.canHandleSave() ) {
disableSave = !currentTab.hasContentChanged();
}
EngineMetaInterface meta = getActiveMeta();
if ( meta != null ) {
disableSave = !meta.canSave();
}
org.pentaho.ui.xul.dom.Document doc;
if ( mainSpoonContainer != null ) {
doc = mainSpoonContainer.getDocumentRoot();
if ( doc != null ) {
if ( etlPerspective ) {
doc.getElementById( "file" ).setVisible( etlPerspective );
doc.getElementById( "edit" ).setVisible( etlPerspective );
doc.getElementById( "view" ).setVisible( etlPerspective );
doc.getElementById( "action" ).setVisible( etlPerspective );
doc.getElementById( "tools" ).setVisible( etlPerspective );
doc.getElementById( "help" ).setVisible( etlPerspective );
doc.getElementById( "help-tip" ).setVisible( etlPerspective );
doc.getElementById( "help-welcome" ).setVisible( etlPerspective );
doc.getElementById( "help-plugins" ).setVisible( true );
}
// Only enable certain menu-items if we need to.
disableMenuItem( doc, "file-new-database", disableTransMenu && disableJobMenu || !isRepositoryRunning );
disableMenuItem( doc, "file-save", disableTransMenu && disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "toolbar-file-save", disableTransMenu
&& disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "file-save-as", disableTransMenu && disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "toolbar-file-save-as", disableTransMenu
&& disableJobMenu && disableMetaMenu || disableSave );
disableMenuItem( doc, "file-save-as-vfs", disableTransMenu && disableJobMenu && disableMetaMenu );
disableMenuItem( doc, "file-close", disableTransMenu && disableJobMenu && disableMetaMenu );
disableMenuItem( doc, "file-print", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "file-export-to-xml", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "file-export-all-to-xml", disableTransMenu && disableJobMenu );
// Disable the undo and redo menus if there is no active transformation
// or active job
// DO NOT ENABLE them otherwise ... leave that to the undo/redo settings
//
disableMenuItem( doc, UNDO_MENU_ITEM, disableTransMenu && disableJobMenu );
disableMenuItem( doc, REDO_MENU_ITEM, disableTransMenu && disableJobMenu );
disableMenuItem( doc, "edit-clear-selection", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "edit-select-all", disableTransMenu && disableJobMenu );
updateSettingsMenu( doc, disableTransMenu, disableJobMenu );
disableMenuItem( doc, "edit-settings", disableTransMenu && disableJobMenu && disableMetaMenu );
// View Menu
( (XulMenuitem) doc.getElementById( "view-results" ) ).setSelected( isExecutionResultsPaneVisible() );
disableMenuItem( doc, "view-results", transGraph == null && disableJobMenu );
disableMenuItem( doc, "view-zoom-in", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "view-zoom-out", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "view-zoom-100pct", disableTransMenu && disableJobMenu );
// Transformations
disableMenuItem( doc, "process-run", disableTransMenu && disablePreviewButton && disableJobMenu );
disableMenuItem( doc, "trans-replay", disableTransMenu && disablePreviewButton );
disableMenuItem( doc, "trans-preview", disableTransMenu && disablePreviewButton );
disableMenuItem( doc, "trans-debug", disableTransMenu && disablePreviewButton );
disableMenuItem( doc, "trans-verify", disableTransMenu );
disableMenuItem( doc, "trans-impact", disableTransMenu );
disableMenuItem( doc, "trans-get-sql", disableTransMenu );
disableMenuItem( doc, "trans-last-impact", disableTransMenu );
// Tools
disableMenuItem( doc, "repository-connect", isRepositoryRunning );
disableMenuItem( doc, "repository-disconnect", !isRepositoryRunning );
disableMenuItem( doc, "repository-explore", !isRepositoryRunning );
disableMenuItem( doc, "repository-clear-shared-object-cache", !isRepositoryRunning );
disableMenuItem( doc, "toolbar-expore-repository", !isRepositoryRunning );
disableMenuItem( doc, "repository-export-all", !isRepositoryRunning );
disableMenuItem( doc, "repository-import-directory", !isRepositoryRunning );
disableMenuItem( doc, "trans-last-preview", !isRepositoryRunning || disableTransMenu );
// Wizard
disableMenuItem( doc, "wizard-connection", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "wizard-copy-table", disableTransMenu && disableJobMenu );
disableMenuItem( doc, "wizard-copy-tables", isRepositoryRunning && disableTransMenu && disableJobMenu );
disableMenuItem( doc, "database-inst-dependancy", !isRepositoryRunning );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.MENUS_REFRESHED );
MenuManager menuManager = getMenuBarManager();
menuManager.updateAll( true );
// What steps & plugins to show?
refreshCoreObjects();
fireMenuControlers();
}
}
}
/**
* @param doc
* @param disableJobMenu
* @param disableTransMenu
*/
private void updateSettingsMenu( org.pentaho.ui.xul.dom.Document doc, boolean disableTransMenu,
boolean disableJobMenu ) {
XulMenuitem settingsItem = (XulMenuitem) doc.getElementById( "edit-settings" );
if ( settingsItem != null ) {
if ( disableTransMenu && !disableJobMenu ) {
settingsItem.setAcceltext( "CTRL-J" );
settingsItem.setAccesskey( "ctrl-j" );
} else if ( !disableTransMenu && disableJobMenu ) {
settingsItem.setAcceltext( "CTRL-T" );
settingsItem.setAccesskey( "ctrl-t" );
} else {
settingsItem.setAcceltext( "" );
settingsItem.setAccesskey( "" );
}
}
}
public void addSpoonMenuController( ISpoonMenuController menuController ) {
if ( menuControllers != null ) {
menuControllers.add( menuController );
}
}
public boolean removeSpoonMenuController( ISpoonMenuController menuController ) {
if ( menuControllers != null ) {
return menuControllers.remove( menuController );
}
return false;
}
public ISpoonMenuController removeSpoonMenuController( String menuControllerName ) {
ISpoonMenuController result = null;
if ( menuControllers != null ) {
for ( ISpoonMenuController menuController : menuControllers ) {
if ( menuController.getName().equals( menuControllerName ) ) {
result = menuController;
menuControllers.remove( result );
break;
}
}
}
return result;
}
private void disableMenuItem( org.pentaho.ui.xul.dom.Document doc, String itemId, boolean disable ) {
XulComponent menuItem = doc.getElementById( itemId );
if ( menuItem != null ) {
menuItem.setDisabled( disable );
} else {
log.logError( "Non-Fatal error : Menu Item with id = " + itemId + " does not exist! Check 'menubar.xul'" );
}
}
private void markTabsChanged() {
boolean anyTabsChanged = false;
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
if ( entry.getTabItem().isDisposed() ) {
continue;
}
boolean changed = entry.getObject().hasContentChanged();
anyTabsChanged |= changed;
entry.getTabItem().setChanged( changed );
}
}
/**
* Check to see if any jobs or transformations are dirty
* @return true if any of the open jobs or trans are marked dirty
*/
public boolean isTabsChanged() {
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
if ( entry.getTabItem().isDisposed() ) {
continue;
}
if ( entry.getObject().hasContentChanged() ) {
return true;
}
}
return false;
}
public void printFile() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
printTransFile( transMeta );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
printJobFile( jobMeta );
}
}
private void printTransFile( TransMeta transMeta ) {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph == null ) {
return;
}
PrintSpool ps = new PrintSpool();
Printer printer = ps.getPrinter( shell );
// Create an image of the screen
Point max = transMeta.getMaximum();
Image img = transGraph.getTransformationImage( printer, max.x, max.y, 1.0f );
ps.printImage( shell, img );
img.dispose();
ps.dispose();
}
private void printJobFile( JobMeta jobMeta ) {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph == null ) {
return;
}
PrintSpool ps = new PrintSpool();
Printer printer = ps.getPrinter( shell );
// Create an image of the screen
Point max = jobMeta.getMaximum();
Image img = jobGraph.getJobImage( printer, max.x, max.y, 1.0f );
ps.printImage( shell, img );
img.dispose();
ps.dispose();
}
public TransGraph getActiveTransGraph() {
if ( tabfolder != null ) {
if ( tabfolder.getSelected() == null ) {
return null;
}
} else {
return null;
}
if ( delegates != null && delegates.tabs != null ) {
TabMapEntry mapEntry = delegates.tabs.getTab( tabfolder.getSelected() );
if ( mapEntry != null ) {
if ( mapEntry.getObject() instanceof TransGraph ) {
return (TransGraph) mapEntry.getObject();
}
}
}
return null;
}
public JobGraph getActiveJobGraph() {
if ( delegates != null && delegates.tabs != null && tabfolder != null ) {
TabMapEntry mapEntry = delegates.tabs.getTab( tabfolder.getSelected() );
if ( mapEntry.getObject() instanceof JobGraph ) {
return (JobGraph) mapEntry.getObject();
}
}
return null;
}
public EngineMetaInterface getActiveMeta() {
SpoonPerspectiveManager manager = SpoonPerspectiveManager.getInstance();
if ( manager != null && manager.getActivePerspective() != null ) {
return manager.getActivePerspective().getActiveMeta();
}
return null;
}
public TabItemInterface getActiveTabitem() {
if ( tabfolder == null ) {
return null;
}
TabItem tabItem = tabfolder.getSelected();
if ( tabItem == null ) {
return null;
}
if ( delegates != null && delegates.tabs != null ) {
TabMapEntry mapEntry = delegates.tabs.getTab( tabItem );
if ( mapEntry != null ) {
return mapEntry.getObject();
} else {
return null;
}
}
return null;
}
/**
* @return The active TransMeta object by looking at the selected TransGraph, TransLog, TransHist If nothing valueable
* is selected, we return null
*/
public TransMeta getActiveTransformation() {
EngineMetaInterface meta = getActiveMeta();
if ( meta instanceof TransMeta ) {
return (TransMeta) meta;
}
return null;
}
/**
* @return The active JobMeta object by looking at the selected JobGraph, JobLog, JobHist If nothing valueable is
* selected, we return null
*/
public JobMeta getActiveJob() {
EngineMetaInterface meta = getActiveMeta();
if ( meta instanceof JobMeta ) {
return (JobMeta) meta;
}
return null;
}
public UndoInterface getActiveUndoInterface() {
return (UndoInterface) this.getActiveMeta();
}
public TransMeta findTransformation( String tabItemText ) {
if ( delegates != null && delegates.trans != null ) {
return delegates.trans.getTransformation( tabItemText );
} else {
return null;
}
}
public JobMeta findJob( String tabItemText ) {
if ( delegates != null && delegates.jobs != null ) {
return delegates.jobs.getJob( tabItemText );
} else {
return null;
}
}
public TransMeta[] getLoadedTransformations() {
if ( delegates != null && delegates.trans != null ) {
List<TransMeta> list = delegates.trans.getTransformationList();
return list.toArray( new TransMeta[list.size()] );
} else {
return null;
}
}
public JobMeta[] getLoadedJobs() {
if ( delegates != null && delegates.jobs != null ) {
List<JobMeta> list = delegates.jobs.getJobList();
return list.toArray( new JobMeta[list.size()] );
} else {
return null;
}
}
public void saveSettings() {
if ( shell.isDisposed() ) {
// we cannot save the settings, it's too late
return;
}
WindowProperty windowProperty = new WindowProperty( shell );
windowProperty.setName( APP_TITLE );
props.setScreen( windowProperty );
props.setLogLevel( DefaultLogLevel.getLogLevel().getCode() );
props.setSashWeights( sashform.getWeights() );
// Also save the open files...
// Go over the list of tabs, then add the info to the list
// of open tab files in PropsUI
//
props.getOpenTabFiles().clear();
for ( TabMapEntry entry : delegates.tabs.getTabs() ) {
String fileType = null;
String filename = null;
String directory = null;
int openType = 0;
if ( entry.getObjectType() == ObjectType.TRANSFORMATION_GRAPH ) {
fileType = LastUsedFile.FILE_TYPE_TRANSFORMATION;
TransMeta transMeta = (TransMeta) entry.getObject().getManagedObject();
filename = rep != null ? transMeta.getName() : transMeta.getFilename();
directory = transMeta.getRepositoryDirectory().toString();
openType = LastUsedFile.OPENED_ITEM_TYPE_MASK_GRAPH;
} else if ( entry.getObjectType() == ObjectType.JOB_GRAPH ) {
fileType = LastUsedFile.FILE_TYPE_JOB;
JobMeta jobMeta = (JobMeta) entry.getObject().getManagedObject();
filename = rep != null ? jobMeta.getName() : jobMeta.getFilename();
directory = jobMeta.getRepositoryDirectory().toString();
openType = LastUsedFile.OPENED_ITEM_TYPE_MASK_GRAPH;
}
if ( fileType != null ) {
props.addOpenTabFile(
fileType, filename, directory, rep != null, rep != null ? rep.getName() : null, openType );
}
}
props.saveProps();
}
public void loadSettings() {
LogLevel logLevel = LogLevel.getLogLevelForCode( props.getLogLevel() );
DefaultLogLevel.setLogLevel( logLevel );
log.setLogLevel( logLevel );
KettleLogStore.getAppender().setMaxNrLines( props.getMaxNrLinesInLog() );
// transMeta.setMaxUndo(props.getMaxUndo());
DBCache.getInstance().setActive( props.useDBCache() );
}
public void changeLooks() {
if ( !selectionTree.isDisposed() ) {
props.setLook( selectionTree );
}
props.setLook( tabfolder.getSwtTabset(), Props.WIDGET_STYLE_TAB );
refreshTree();
refreshGraph();
}
public void undoAction( UndoInterface undoInterface ) {
if ( undoInterface == null ) {
return;
}
TransAction ta = undoInterface.previousUndo();
if ( ta == null ) {
return;
}
setUndoMenu( undoInterface ); // something changed: change the menu
if ( undoInterface instanceof TransMeta ) {
delegates.trans.undoTransformationAction( (TransMeta) undoInterface, ta );
}
if ( undoInterface instanceof JobMeta ) {
delegates.jobs.undoJobAction( (JobMeta) undoInterface, ta );
}
// Put what we undo in focus
if ( undoInterface instanceof TransMeta ) {
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( (TransMeta) undoInterface );
transGraph.forceFocus();
}
if ( undoInterface instanceof JobMeta ) {
JobGraph jobGraph = delegates.jobs.findJobGraphOfJob( (JobMeta) undoInterface );
jobGraph.forceFocus();
}
}
public void redoAction( UndoInterface undoInterface ) {
if ( undoInterface == null ) {
return;
}
TransAction ta = undoInterface.nextUndo();
if ( ta == null ) {
return;
}
setUndoMenu( undoInterface ); // something changed: change the menu
if ( undoInterface instanceof TransMeta ) {
delegates.trans.redoTransformationAction( (TransMeta) undoInterface, ta );
}
if ( undoInterface instanceof JobMeta ) {
delegates.jobs.redoJobAction( (JobMeta) undoInterface, ta );
}
// Put what we redo in focus
if ( undoInterface instanceof TransMeta ) {
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( (TransMeta) undoInterface );
transGraph.forceFocus();
}
if ( undoInterface instanceof JobMeta ) {
JobGraph jobGraph = delegates.jobs.findJobGraphOfJob( (JobMeta) undoInterface );
jobGraph.forceFocus();
}
}
/**
* Sets the text and enabled settings for the undo and redo menu items
*
* @param undoInterface
* the object which holds the undo/redo information
*/
public void setUndoMenu( UndoInterface undoInterface ) {
if ( shell.isDisposed() ) {
return;
}
TransAction prev = undoInterface != null ? undoInterface.viewThisUndo() : null;
TransAction next = undoInterface != null ? undoInterface.viewNextUndo() : null;
// Set the menubar text and enabled flags
XulMenuitem item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( UNDO_MENU_ITEM );
item.setLabel( prev == null ? UNDO_UNAVAILABLE : BaseMessages.getString(
PKG, "Spoon.Menu.Undo.Available", prev.toString() ) );
item.setDisabled( prev == null );
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById( REDO_MENU_ITEM );
item.setLabel( next == null ? REDO_UNAVAILABLE : BaseMessages.getString(
PKG, "Spoon.Menu.Redo.Available", next.toString() ) );
item.setDisabled( next == null );
}
public void addUndoNew( UndoInterface undoInterface, Object[] obj, int[] position ) {
addUndoNew( undoInterface, obj, position, false );
}
public void addUndoNew( UndoInterface undoInterface, Object[] obj, int[] position, boolean nextAlso ) {
undoInterface.addUndo( obj, null, position, null, null, TransMeta.TYPE_UNDO_NEW, nextAlso );
setUndoMenu( undoInterface );
}
// Undo delete object
public void addUndoDelete( UndoInterface undoInterface, Object[] obj, int[] position ) {
addUndoDelete( undoInterface, obj, position, false );
}
// Undo delete object
public void addUndoDelete( UndoInterface undoInterface, Object[] obj, int[] position, boolean nextAlso ) {
undoInterface.addUndo( obj, null, position, null, null, TransMeta.TYPE_UNDO_DELETE, nextAlso );
setUndoMenu( undoInterface );
}
// Change of step, connection, hop or note...
public void addUndoPosition( UndoInterface undoInterface, Object[] obj, int[] pos, Point[] prev, Point[] curr ) {
// It's better to store the indexes of the objects, not the objects
// itself!
undoInterface.addUndo( obj, null, pos, prev, curr, JobMeta.TYPE_UNDO_POSITION, false );
setUndoMenu( undoInterface );
}
// Change of step, connection, hop or note...
public void addUndoChange( UndoInterface undoInterface, Object[] from, Object[] to, int[] pos ) {
addUndoChange( undoInterface, from, to, pos, false );
}
// Change of step, connection, hop or note...
public void addUndoChange( UndoInterface undoInterface, Object[] from, Object[] to, int[] pos, boolean nextAlso ) {
undoInterface.addUndo( from, to, pos, null, null, JobMeta.TYPE_UNDO_CHANGE, nextAlso );
setUndoMenu( undoInterface );
}
/**
* Checks *all* the steps in the transformation, puts the result in remarks list
*/
public void checkTrans( TransMeta transMeta ) {
checkTrans( transMeta, false );
}
/**
* Check the steps in a transformation
*
* @param only_selected
* True: Check only the selected steps...
*/
public void checkTrans( TransMeta transMeta, boolean only_selected ) {
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
CheckTransProgressDialog ctpd =
new CheckTransProgressDialog( shell, transMeta, transGraph.getRemarks(), only_selected );
ctpd.open(); // manages the remarks arraylist...
showLastTransCheck();
}
/**
* Show the remarks of the last transformation check that was run.
*
* @see #checkTrans()
*/
public void showLastTransCheck() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
CheckResultDialog crd = new CheckResultDialog( transMeta, shell, SWT.NONE, transGraph.getRemarks() );
String stepName = crd.open();
if ( stepName != null ) {
// Go to the indicated step!
StepMeta stepMeta = transMeta.findStep( stepName );
if ( stepMeta != null ) {
delegates.steps.editStep( transMeta, stepMeta );
}
}
}
public void analyseImpact( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
AnalyseImpactProgressDialog aipd = new AnalyseImpactProgressDialog( shell, transMeta, transGraph.getImpact() );
transGraph.setImpactFinished( aipd.open() );
if ( transGraph.isImpactFinished() ) {
showLastImpactAnalyses( transMeta );
}
}
public void showLastImpactAnalyses( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
List<Object[]> rows = new ArrayList<Object[]>();
RowMetaInterface rowMeta = null;
for ( int i = 0; i < transGraph.getImpact().size(); i++ ) {
DatabaseImpact ii = transGraph.getImpact().get( i );
RowMetaAndData row = ii.getRow();
rowMeta = row.getRowMeta();
rows.add( row.getData() );
}
if ( rows.size() > 0 ) {
// Display all the rows...
PreviewRowsDialog prd =
new PreviewRowsDialog( shell, Variables.getADefaultVariableSpace(), SWT.NONE, "-", rowMeta, rows );
prd.setTitleMessage(
// "Impact analyses"
// "Result of analyses:"
BaseMessages.getString( PKG, "Spoon.Dialog.ImpactAnalyses.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ImpactAnalyses.Message" ) );
prd.open();
} else {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_INFORMATION );
if ( transGraph.isImpactFinished() ) {
// "As far as I can tell, this transformation has no impact on any database."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.TransformationNoImpactOnDatabase.Message" ) );
} else {
// "Please run the impact analyses first on this transformation."
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.RunImpactAnalysesFirst.Message" ) );
}
mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.ImpactAnalyses.Title" ) ); // Impact
mb.open();
}
}
public void toClipboard( String clipText ) {
try {
GUIResource.getInstance().toClipboard( clipText );
} catch ( Throwable e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ExceptionCopyToClipboard.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ExceptionCopyToClipboard.Message" ), e );
}
}
public String fromClipboard() {
try {
return GUIResource.getInstance().fromClipboard();
} catch ( Throwable e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ExceptionPasteFromClipboard.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ExceptionPasteFromClipboard.Message" ), e );
return null;
}
}
/**
* Paste transformation from the clipboard...
*
*/
public void pasteTransformation() {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
if ( log.isDetailed() ) {
// "Paste transformation from the clipboard!"
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.PasteTransformationFromClipboard" ) );
}
String xml = fromClipboard();
try {
Document doc = XMLHandler.loadXMLString( xml );
TransMeta transMeta = new TransMeta( XMLHandler.getSubNode( doc, TransMeta.XML_TAG ), rep );
setTransMetaVariables( transMeta );
addTransGraph( transMeta ); // create a new tab
sharedObjectsFileMap.put( transMeta.getSharedObjects().getFilename(), transMeta.getSharedObjects() );
refreshGraph();
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorPastingTransformation.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorPastingTransformation.Message" ), e );
}
}
/**
* Paste job from the clipboard...
*
*/
public void pasteJob() {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
String xml = fromClipboard();
try {
Document doc = XMLHandler.loadXMLString( xml );
JobMeta jobMeta = new JobMeta( XMLHandler.getSubNode( doc, JobMeta.XML_TAG ), rep, this );
addJobGraph( jobMeta ); // create a new tab
refreshGraph();
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog( shell,
// Error pasting transformation
// "An error occurred pasting a transformation from the clipboard"
BaseMessages.getString( PKG, "Spoon.Dialog.ErrorPastingJob.Title" ), BaseMessages.getString(
PKG, "Spoon.Dialog.ErrorPastingJob.Message" ), e );
}
}
public void copyTransformation( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
try {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_TRANSFORMATION, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
toClipboard( XMLHandler.getXMLHeader() + transMeta.getXML() );
} catch ( Exception ex ) {
new ErrorDialog( getShell(), "Error", "Error encoding to XML", ex );
}
}
public void copyJob( JobMeta jobMeta ) {
if ( jobMeta == null ) {
return;
}
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
toClipboard( XMLHandler.getXMLHeader() + jobMeta.getXML() );
}
public void copyTransformationImage( TransMeta transMeta ) {
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation( transMeta );
if ( transGraph == null ) {
return;
}
Clipboard clipboard = GUIResource.getInstance().getNewClipboard();
Point area = transMeta.getMaximum();
Image image = transGraph.getTransformationImage( Display.getCurrent(), area.x, area.y, 1.0f );
clipboard.setContents(
new Object[] { image.getImageData() }, new Transfer[] { ImageDataTransfer.getInstance() } );
}
/**
* @return Either a TransMeta or JobMeta object
*/
public HasDatabasesInterface getActiveHasDatabasesInterface() {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
return transMeta;
}
return getActiveJob();
}
/**
* Shows a wizard that creates a new database connection...
*
*/
public void createDatabaseWizard() {
HasDatabasesInterface hasDatabasesInterface = getActiveHasDatabasesInterface();
if ( hasDatabasesInterface == null ) {
return; // nowhere to put the new database
}
CreateDatabaseWizard cdw = new CreateDatabaseWizard();
DatabaseMeta newDBInfo = cdw.createAndRunDatabaseWizard( shell, props, hasDatabasesInterface.getDatabases() );
if ( newDBInfo != null ) { // finished
hasDatabasesInterface.addDatabase( newDBInfo );
refreshTree();
refreshGraph();
}
}
public List<DatabaseMeta> getActiveDatabases() {
Map<String, DatabaseMeta> map = new Hashtable<String, DatabaseMeta>();
HasDatabasesInterface hasDatabasesInterface = getActiveHasDatabasesInterface();
if ( hasDatabasesInterface != null ) {
for ( int i = 0; i < hasDatabasesInterface.nrDatabases(); i++ ) {
map.put( hasDatabasesInterface.getDatabase( i ).getName(), hasDatabasesInterface.getDatabase( i ) );
}
}
if ( rep != null ) {
try {
List<DatabaseMeta> repDBs = rep.readDatabases();
for ( DatabaseMeta databaseMeta : repDBs ) {
map.put( databaseMeta.getName(), databaseMeta );
}
} catch ( Exception e ) {
log.logError( "Unexpected error reading databases from the repository: " + e.toString() );
log.logError( Const.getStackTracker( e ) );
}
}
List<DatabaseMeta> databases = new ArrayList<DatabaseMeta>();
databases.addAll( map.values() );
return databases;
}
/**
* Create a transformation that extracts tables & data from a database.
* <p>
* <p>
*
* 0) Select the database to rip
* <p>
* 1) Select the table in the database to copy
* <p>
* 2) Select the database to dump to
* <p>
* 3) Select the repository directory in which it will end up
* <p>
* 4) Select a name for the new transformation
* <p>
* 6) Create 1 transformation for the selected table
* <p>
*/
public void copyTableWizard() {
List<DatabaseMeta> databases = getActiveDatabases();
if ( databases.size() == 0 ) {
return; // Nothing to do here
}
final CopyTableWizardPage1 page1 = new CopyTableWizardPage1( "1", databases );
page1.createControl( shell );
final CopyTableWizardPage2 page2 = new CopyTableWizardPage2( "2" );
page2.createControl( shell );
Wizard wizard = new Wizard() {
public boolean performFinish() {
return delegates.db.copyTable( page1.getSourceDatabase(), page1.getTargetDatabase(), page2.getSelection() );
}
/**
* @see org.eclipse.jface.wizard.Wizard#canFinish()
*/
public boolean canFinish() {
return page2.canFinish();
}
};
wizard.addPage( page1 );
wizard.addPage( page2 );
WizardDialog wd = new WizardDialog( shell, wizard );
WizardDialog.setDefaultImage( GUIResource.getInstance().getImageWizard() );
wd.setMinimumPageSize( 700, 400 );
wd.updateSize();
wd.open();
}
public String toString() {
return APP_NAME;
}
public void selectRep( CommandLineOption[] options ) {
RepositoryMeta repositoryMeta;
StringBuffer optionRepname = getCommandLineOption( options, "rep" ).getArgument();
StringBuffer optionFilename = getCommandLineOption( options, "file" ).getArgument();
StringBuffer optionUsername = getCommandLineOption( options, "user" ).getArgument();
StringBuffer optionPassword = getCommandLineOption( options, "pass" ).getArgument();
if ( Const.isEmpty( optionRepname )
&& Const.isEmpty( optionFilename ) && props.showRepositoriesDialogAtStartup() ) {
if ( log.isBasic() ) {
// "Asking for repository"
log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.AskingForRepository" ) );
}
loginDialog = new RepositoriesDialog( shell, null, new ILoginCallback() {
public void onSuccess( Repository repository ) {
setRepository( repository );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CONNECTED );
}
public void onError( Throwable t ) {
onLoginError( t );
}
public void onCancel() {
// do nothing
}
} );
hideSplash();
loginDialog.show();
showSplash();
} else if ( !Const.isEmpty( optionRepname ) && Const.isEmpty( optionFilename ) ) {
RepositoriesMeta repsInfo = new RepositoriesMeta();
try {
repsInfo.readData();
repositoryMeta = repsInfo.findRepository( optionRepname.toString() );
if ( repositoryMeta != null && !Const.isEmpty( optionUsername ) && !Const.isEmpty( optionPassword ) ) {
// Define and connect to the repository...
Repository repo =
PluginRegistry
.getInstance().loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class );
repo.init( repositoryMeta );
repo.connect( optionUsername != null ? optionUsername.toString() : null, optionPassword != null
? optionPassword.toString() : null );
setRepository( repo );
} else {
if ( !Const.isEmpty( optionUsername ) && !Const.isEmpty( optionPassword ) ) {
String msg = BaseMessages.getString( PKG, "Spoon.Log.NoRepositoriesDefined" );
log.logError( msg ); // "No repositories defined on this system."
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Error.Repository.NotFound", optionRepname
.toString() ) );
mb.setText( BaseMessages.getString( PKG, "Spoon.Error.Repository.NotFound.Title" ) );
mb.open();
}
loginDialog = new RepositoriesDialog( shell, null, new ILoginCallback() {
public void onSuccess( Repository repository ) {
setRepository( repository );
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CONNECTED );
}
public void onError( Throwable t ) {
onLoginError( t );
}
public void onCancel() {
// TODO Auto-generated method stub
}
} );
hideSplash();
loginDialog.show();
showSplash();
}
} catch ( Exception e ) {
hideSplash();
// Eat the exception but log it...
log.logError( "Error reading repositories xml file", e );
}
}
}
public void handleStartOptions( CommandLineOption[] options ) {
// note that at this point the rep object is populated by previous calls
StringBuffer optionRepname = getCommandLineOption( options, "rep" ).getArgument();
StringBuffer optionFilename = getCommandLineOption( options, "file" ).getArgument();
StringBuffer optionDirname = getCommandLineOption( options, "dir" ).getArgument();
StringBuffer optionTransname = getCommandLineOption( options, "trans" ).getArgument();
StringBuffer optionJobname = getCommandLineOption( options, "job" ).getArgument();
// StringBuffer optionUsername = getCommandLineOption(options,
// "user").getArgument();
// StringBuffer optionPassword = getCommandLineOption(options,
// "pass").getArgument();
try {
// Read kettle transformation specified on command-line?
if ( !Const.isEmpty( optionRepname ) || !Const.isEmpty( optionFilename ) ) {
if ( !Const.isEmpty( optionRepname ) ) {
if ( rep != null ) {
if ( Const.isEmpty( optionDirname ) ) {
optionDirname = new StringBuffer( RepositoryDirectory.DIRECTORY_SEPARATOR );
}
// Options /file, /job and /trans are mutually
// exclusive
int t =
( Const.isEmpty( optionFilename ) ? 0 : 1 )
+ ( Const.isEmpty( optionJobname ) ? 0 : 1 ) + ( Const.isEmpty( optionTransname ) ? 0 : 1 );
if ( t > 1 ) {
// "More then one mutually exclusive options /file, /job and /trans are specified."
log.logError( BaseMessages.getString( PKG, "Spoon.Log.MutuallyExcusive" ) );
} else if ( t == 1 ) {
if ( !Const.isEmpty( optionFilename ) ) {
openFile( optionFilename.toString(), false );
} else {
// OK, if we have a specified job or
// transformation, try to load it...
// If not, keep the repository logged
// in.
RepositoryDirectoryInterface rdi = rep.findDirectory( optionDirname.toString() );
if ( rdi == null ) {
log.logError( BaseMessages.getString( PKG, "Spoon.Log.UnableFindDirectory", optionDirname
.toString() ) ); // "Can't find directory ["+dirname+"] in the repository."
} else {
if ( !Const.isEmpty( optionTransname ) ) {
TransMeta transMeta =
rep.loadTransformation( optionTransname.toString(), rdi, null, true, null ); // reads
// last
// version
transMeta.clearChanged();
transMeta.setInternalKettleVariables();
addTransGraph( transMeta );
} else {
// Try to load a specified job
// if any
JobMeta jobMeta = rep.loadJob( optionJobname.toString(), rdi, null, null ); // reads
// last
// version
jobMeta.clearChanged();
jobMeta.setInternalKettleVariables();
addJobGraph( jobMeta );
}
}
}
}
} else {
// "No repositories defined on this system."
log.logError( BaseMessages.getString( PKG, "Spoon.Log.NoRepositoriesDefined" ) );
}
} else if ( !Const.isEmpty( optionFilename ) ) {
openFile( optionFilename.toString(), false );
}
}
} catch ( KettleException ke ) {
hideSplash();
log.logError( BaseMessages.getString( PKG, "Spoon.Log.ErrorOccurred" ) + Const.CR + ke.getMessage() );
log.logError( Const.getStackTracker( ke ) );
// do not just eat the exception
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Log.ErrorOccurred" ), BaseMessages.getString(
PKG, "Spoon.Log.ErrorOccurred" )
+ Const.CR + ke.getMessage(), ke );
rep = null;
}
}
private void loadLastUsedFiles() {
if ( props.openLastFile() ) {
if ( log.isDetailed() ) {
// "Trying to open the last file used."
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.TryingOpenLastUsedFile" ) );
}
List<LastUsedFile> lastUsedFiles = props.getOpenTabFiles();
for ( LastUsedFile lastUsedFile : lastUsedFiles ) {
try {
if ( !lastUsedFile.isSourceRepository()
|| lastUsedFile.isSourceRepository() && rep != null
&& rep.getName().equals( lastUsedFile.getRepositoryName() ) ) {
loadLastUsedFile( lastUsedFile, rep == null ? null : rep.getName(), false );
}
} catch ( Exception e ) {
hideSplash();
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.LoadLastUsedFile.Exception.Title" ), BaseMessages
.getString( PKG, "Spoon.LoadLastUsedFile.Exception.Message", lastUsedFile.toString() ), e );
}
}
}
}
public void start( CommandLineOption[] options ) throws KettleException {
// Show the repository connection dialog
//
selectRep( options );
// Read the start option parameters
//
handleStartOptions( options );
// Load the last loaded files
//
loadLastUsedFiles();
// Enable menus based on whether user was able to login or not
//
enableMenus();
// enable perspective switching
SpoonPerspectiveManager.getInstance().setForcePerspective( false );
if ( props.showTips() ) {
TipsDialog tip = new TipsDialog( shell );
hideSplash();
tip.open();
}
if ( splash != null ) {
splash.dispose();
splash = null;
}
// If we are a MILESTONE or RELEASE_CANDIDATE
if ( !ValueMeta.convertStringToBoolean( System.getProperty( "KETTLE_HIDE_DEVELOPMENT_VERSION_WARNING", "N" ) )
&& Const.RELEASE.equals( Const.ReleaseType.MILESTONE ) ) {
// display the same warning message
MessageBox dialog = new MessageBox( shell, SWT.ICON_WARNING );
dialog.setText( BaseMessages.getString( PKG, "Spoon.Warning.DevelopmentRelease.Title" ) );
dialog.setMessage( BaseMessages.getString(
PKG, "Spoon.Warning.DevelopmentRelease.Message", Const.CR, BuildVersion.getInstance().getVersion() ) );
dialog.open();
}
}
private void waitForDispose() {
boolean retryAfterError; // Enable the user to retry and
// continue after fatal error
do {
retryAfterError = false; // reset to false after error otherwise
// it will loop forever after
// closing Spoon
try {
while ( getShell() != null && !getShell().isDisposed() ) {
if ( !readAndDispatch() ) {
sleep();
}
}
} catch ( Throwable e ) {
// "An unexpected error occurred in Spoon: probable cause: please close all windows before stopping Spoon! "
log.logError( BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorOccurred" )
+ Const.CR + e.getMessage() );
log.logError( Const.getStackTracker( e ) );
try {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorOccurred" ), BaseMessages
.getString( PKG, "Spoon.Log.UnexpectedErrorOccurred" )
+ Const.CR + e.getMessage(), e );
// Retry dialog
MessageBox mb = new MessageBox( shell, SWT.ICON_QUESTION | SWT.NO | SWT.YES );
mb.setText( BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorRetry.Titel" ) );
mb.setMessage( BaseMessages.getString( PKG, "Spoon.Log.UnexpectedErrorRetry.Message" ) );
if ( mb.open() == SWT.YES ) {
retryAfterError = true;
}
} catch ( Throwable e1 ) {
// When the opening of a dialog crashed, we can not do
// anything more here
}
}
} while ( retryAfterError );
if ( !display.isDisposed() ) {
display.update();
}
dispose();
if ( log.isBasic() ) {
log.logBasic( APP_NAME + " " + BaseMessages.getString( PKG, "Spoon.Log.AppHasEnded" ) ); // " has ended."
}
// Close the logfile
if ( fileLoggingEventListener != null ) {
try {
fileLoggingEventListener.close();
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Error closing logging file", e );
}
KettleLogStore.getAppender().removeLoggingEventListener( fileLoggingEventListener );
}
}
// public Splash splash;
// public CommandLineOption options[];
public static CommandLineOption getCommandLineOption( CommandLineOption[] options, String opt ) {
for ( CommandLineOption option : options ) {
if ( option.getOption().equals( opt ) ) {
return option;
}
}
return null;
}
public static CommandLineOption[] getCommandLineArgs( List<String> args ) {
CommandLineOption[] clOptions =
new CommandLineOption[] {
new CommandLineOption( "rep", "Repository name", new StringBuffer() ),
new CommandLineOption( "user", "Repository username", new StringBuffer() ),
new CommandLineOption( "pass", "Repository password", new StringBuffer() ),
new CommandLineOption( "job", "The name of the job to launch", new StringBuffer() ),
new CommandLineOption( "trans", "The name of the transformation to launch", new StringBuffer() ),
new CommandLineOption( "dir", "The directory (don't forget the leading /)", new StringBuffer() ),
new CommandLineOption( "file", "The filename (Transformation in XML) to launch", new StringBuffer() ),
new CommandLineOption(
"level", "The logging level (Basic, Detailed, Debug, Rowlevel, Error, Nothing)",
new StringBuffer() ),
new CommandLineOption( "logfile", "The logging file to write to", new StringBuffer() ),
new CommandLineOption(
"log", "The logging file to write to (deprecated)", new StringBuffer(), false, true ),
new CommandLineOption( "perspective", "The perspective to start in", new StringBuffer(), false, true ) };
// start with the default logger until we find out otherwise
//
log = new LogChannel( APP_NAME );
// Parse the options...
if ( !CommandLineOption.parseArguments( args, clOptions, log ) ) {
log.logError( "Command line option not understood" );
System.exit( 8 );
}
String kettleRepname = Const.getEnvironmentVariable( "KETTLE_REPOSITORY", null );
String kettleUsername = Const.getEnvironmentVariable( "KETTLE_USER", null );
String kettlePassword = Const.getEnvironmentVariable( "KETTLE_PASSWORD", null );
if ( !Const.isEmpty( kettleRepname ) ) {
clOptions[0].setArgument( new StringBuffer( kettleRepname ) );
}
if ( !Const.isEmpty( kettleUsername ) ) {
clOptions[1].setArgument( new StringBuffer( kettleUsername ) );
}
if ( !Const.isEmpty( kettlePassword ) ) {
clOptions[2].setArgument( new StringBuffer( kettlePassword ) );
}
return clOptions;
}
private void loadLastUsedFile( LastUsedFile lastUsedFile, String repositoryName ) throws KettleException {
loadLastUsedFile( lastUsedFile, repositoryName, true );
}
private void loadLastUsedFile( LastUsedFile lastUsedFile, String repositoryName, boolean trackIt ) throws KettleException {
boolean useRepository = repositoryName != null;
// Perhaps we need to connect to the repository?
//
if ( lastUsedFile.isSourceRepository() ) {
if ( !Const.isEmpty( lastUsedFile.getRepositoryName() ) ) {
if ( useRepository && !lastUsedFile.getRepositoryName().equalsIgnoreCase( repositoryName ) ) {
// We just asked...
useRepository = false;
}
}
}
if ( useRepository && lastUsedFile.isSourceRepository() ) {
if ( rep != null ) { // load from this repository...
if ( rep.getName().equalsIgnoreCase( lastUsedFile.getRepositoryName() ) ) {
RepositoryDirectoryInterface rdi = rep.findDirectory( lastUsedFile.getDirectory() );
if ( rdi != null ) {
// Are we loading a transformation or a job?
if ( lastUsedFile.isTransformation() ) {
if ( log.isDetailed() ) {
// "Auto loading transformation ["+lastfiles[0]+"] from repository directory ["+lastdirs[0]+"]"
log.logDetailed( BaseMessages.getString( PKG, "Spoon.Log.AutoLoadingTransformation", lastUsedFile
.getFilename(), lastUsedFile.getDirectory() ) );
}
TransLoadProgressDialog tlpd =
new TransLoadProgressDialog( shell, rep, lastUsedFile.getFilename(), rdi, null );
TransMeta transMeta = tlpd.open();
if ( transMeta != null ) {
if ( trackIt ) {
props.addLastFile( LastUsedFile.FILE_TYPE_TRANSFORMATION, lastUsedFile.getFilename(), rdi
.getPath(), true, rep.getName() );
}
// transMeta.setFilename(lastUsedFile.getFilename());
transMeta.clearChanged();
addTransGraph( transMeta );
refreshTree();
}
} else if ( lastUsedFile.isJob() ) {
JobLoadProgressDialog progressDialog =
new JobLoadProgressDialog( shell, rep, lastUsedFile.getFilename(), rdi, null );
JobMeta jobMeta = progressDialog.open();
if ( jobMeta != null ) {
if ( trackIt ) {
props.addLastFile(
LastUsedFile.FILE_TYPE_JOB, lastUsedFile.getFilename(), rdi.getPath(), true, rep
.getName() );
}
jobMeta.clearChanged();
addJobGraph( jobMeta );
}
}
refreshTree();
}
}
}
}
if ( !lastUsedFile.isSourceRepository() && !Const.isEmpty( lastUsedFile.getFilename() ) ) {
if ( lastUsedFile.isTransformation() ) {
openFile( lastUsedFile.getFilename(), false );
}
if ( lastUsedFile.isJob() ) {
openFile( lastUsedFile.getFilename(), false );
}
refreshTree();
}
}
/**
* Create a new SelectValues step in between this step and the previous. If the previous fields are not there, no
* mapping can be made, same with the required fields.
*
* @param stepMeta
* The target step to map against.
*/
// retry of required fields acquisition
public void generateFieldMapping( TransMeta transMeta, StepMeta stepMeta ) {
try {
if ( stepMeta != null ) {
StepMetaInterface smi = stepMeta.getStepMetaInterface();
RowMetaInterface targetFields = smi.getRequiredFields( transMeta );
RowMetaInterface sourceFields = transMeta.getPrevStepFields( stepMeta );
// Build the mapping: let the user decide!!
String[] source = sourceFields.getFieldNames();
for ( int i = 0; i < source.length; i++ ) {
ValueMetaInterface v = sourceFields.getValueMeta( i );
source[i] += EnterMappingDialog.STRING_ORIGIN_SEPARATOR + v.getOrigin() + ")";
}
String[] target = targetFields.getFieldNames();
EnterMappingDialog dialog = new EnterMappingDialog( shell, source, target );
List<SourceToTargetMapping> mappings = dialog.open();
if ( mappings != null ) {
// OK, so we now know which field maps where.
// This allows us to generate the mapping using a
// SelectValues Step...
SelectValuesMeta svm = new SelectValuesMeta();
svm.allocate( mappings.size(), 0, 0 );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < mappings.size(); i++ ) {
SourceToTargetMapping mapping = mappings.get( i );
svm.getSelectName()[i] = sourceFields.getValueMeta( mapping.getSourcePosition() ).getName();
svm.getSelectRename()[i] = target[mapping.getTargetPosition()];
svm.getSelectLength()[i] = -1;
svm.getSelectPrecision()[i] = -1;
}
// a new comment. Sincerely yours CO ;)
// Now that we have the meta-data, create a new step info object
String stepName = stepMeta.getName() + " Mapping";
stepName = transMeta.getAlternativeStepname( stepName ); // if
// it's already there, rename it.
StepMeta newStep = new StepMeta( "SelectValues", stepName, svm );
newStep.setLocation( stepMeta.getLocation().x + 20, stepMeta.getLocation().y + 20 );
newStep.setDraw( true );
transMeta.addStep( newStep );
addUndoNew( transMeta, new StepMeta[] { newStep }, new int[] { transMeta.indexOfStep( newStep ) } );
// Redraw stuff...
refreshTree();
refreshGraph();
}
} else {
throw new KettleException( "There is no target to do a field mapping against!" );
}
} catch ( KettleException e ) {
new ErrorDialog(
shell, "Error creating mapping",
"There was an error when Kettle tried to generate a field mapping against the target step", e );
}
}
public void editPartitioning( TransMeta transMeta, StepMeta stepMeta ) {
// Before we start, check if there are any partition schemas defined...
//
String[] schemaNames = transMeta.getPartitionSchemasNames();
if ( schemaNames.length == 0 ) {
MessageBox box = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
box.setText( "Create a partition schema" );
box.setMessage( "You first need to create one or more partition schemas in "
+ "the transformation settings dialog before you can select one!" );
box.open();
return;
}
StepPartitioningMeta stepPartitioningMeta = stepMeta.getStepPartitioningMeta();
if ( stepPartitioningMeta == null ) {
stepPartitioningMeta = new StepPartitioningMeta();
}
StepMeta before = (StepMeta) stepMeta.clone();
PluginRegistry registry = PluginRegistry.getInstance();
List<PluginInterface> plugins = registry.getPlugins( PartitionerPluginType.class );
String[] options = new String[StepPartitioningMeta.methodDescriptions.length + plugins.size()];
String[] codes = new String[StepPartitioningMeta.methodDescriptions.length + plugins.size()];
System.arraycopy(
StepPartitioningMeta.methodDescriptions, 0, options, 0, StepPartitioningMeta.methodDescriptions.length );
System.arraycopy( StepPartitioningMeta.methodCodes, 0, codes, 0, StepPartitioningMeta.methodCodes.length );
Iterator<PluginInterface> it = plugins.iterator();
int idx = 0;
while ( it.hasNext() ) {
PluginInterface entry = it.next();
options[StepPartitioningMeta.methodDescriptions.length + idx] = entry.getDescription();
codes[StepPartitioningMeta.methodCodes.length + idx] = entry.getIds()[0];
idx++;
}
for ( int i = 0; i < codes.length; i++ ) {
if ( codes[i].equals( stepPartitioningMeta.getMethod() ) ) {
idx = i;
break;
}
}
EnterSelectionDialog dialog =
new EnterSelectionDialog( shell, options, "Partioning method", "Select the partitioning method" );
String methodDescription = dialog.open( idx );
if ( methodDescription != null ) {
String method = StepPartitioningMeta.methodCodes[StepPartitioningMeta.PARTITIONING_METHOD_NONE];
for ( int i = 0; i < options.length; i++ ) {
if ( options[i].equals( methodDescription ) ) {
method = codes[i];
}
}
try {
int methodType = StepPartitioningMeta.getMethodType( method );
stepPartitioningMeta.setMethodType( methodType );
stepPartitioningMeta.setMethod( method );
switch ( methodType ) {
case StepPartitioningMeta.PARTITIONING_METHOD_NONE:
break;
case StepPartitioningMeta.PARTITIONING_METHOD_MIRROR:
case StepPartitioningMeta.PARTITIONING_METHOD_SPECIAL:
// Set the partitioning schema too.
PartitionSchema partitionSchema = stepPartitioningMeta.getPartitionSchema();
idx = -1;
if ( partitionSchema != null ) {
idx = Const.indexOfString( partitionSchema.getName(), schemaNames );
}
EnterSelectionDialog askSchema =
new EnterSelectionDialog(
shell, schemaNames, "Select a partition schema", "Select the partition schema to use:" );
String schemaName = askSchema.open( idx );
if ( schemaName != null ) {
idx = Const.indexOfString( schemaName, schemaNames );
stepPartitioningMeta.setPartitionSchema( transMeta.getPartitionSchemas().get( idx ) );
}
if ( methodType == StepPartitioningMeta.PARTITIONING_METHOD_SPECIAL ) {
// ask for a field name
StepDialogInterface partitionerDialog;
try {
partitionerDialog =
delegates.steps.getPartitionerDialog( stepMeta, stepPartitioningMeta, transMeta );
partitionerDialog.open();
} catch ( Exception e ) {
new ErrorDialog(
shell, "Error",
"There was an unexpected error while editing the partitioning method specifics:", e );
}
}
break;
default:
break;
}
StepMeta after = (StepMeta) stepMeta.clone();
addUndoChange( transMeta, new StepMeta[] { before }, new StepMeta[] { after }, new int[] { transMeta
.indexOfStep( stepMeta ) } );
refreshGraph();
} catch ( Exception e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.ErrorEditingStepPartitioning.Title" ), BaseMessages
.getString( PKG, "Spoon.ErrorEditingStepPartitioning.Message" ), e );
}
}
}
/**
* Select a clustering schema for this step.
*
* @param stepMeta
* The step to set the clustering schema for.
*/
public void editClustering( TransMeta transMeta, StepMeta stepMeta ) {
List<StepMeta> stepMetas = new ArrayList<StepMeta>();
stepMetas.add( stepMeta );
editClustering( transMeta, stepMetas );
}
/**
* Select a clustering schema for this step.
*
* @param stepMetas
* The steps (at least one!) to set the clustering schema for.
*/
public void editClustering( TransMeta transMeta, List<StepMeta> stepMetas ) {
StepMeta stepMeta = stepMetas.get( 0 );
int idx = -1;
if ( stepMeta.getClusterSchema() != null ) {
idx = transMeta.getClusterSchemas().indexOf( stepMeta.getClusterSchema() );
}
String[] clusterSchemaNames = transMeta.getClusterSchemaNames();
EnterSelectionDialog dialog =
new EnterSelectionDialog(
shell, clusterSchemaNames, "Cluster schema", "Select the cluster schema to use (cancel=clear)" );
String schemaName = dialog.open( idx );
if ( schemaName == null ) {
for ( StepMeta step : stepMetas ) {
step.setClusterSchema( null );
}
} else {
ClusterSchema clusterSchema = transMeta.findClusterSchema( schemaName );
for ( StepMeta step : stepMetas ) {
step.setClusterSchema( clusterSchema );
}
}
refreshTree();
refreshGraph();
}
public void createKettleArchive( TransMeta transMeta ) {
if ( transMeta == null ) {
return;
}
JarfileGenerator.generateJarFile( transMeta );
}
/**
* This creates a new partitioning schema, edits it and adds it to the transformation metadata
*
*/
public void newPartitioningSchema( TransMeta transMeta ) {
PartitionSchema partitionSchema = new PartitionSchema();
PartitionSchemaDialog dialog =
new PartitionSchemaDialog( shell, partitionSchema, transMeta.getDatabases(), transMeta );
if ( dialog.open() ) {
transMeta.getPartitionSchemas().add( partitionSchema );
if ( rep != null ) {
try {
if ( !rep.getSecurityProvider().isReadOnly() ) {
rep.save( partitionSchema, Const.VERSION_COMMENT_INITIAL_VERSION, null );
} else {
throw new KettleException( BaseMessages.getString(
PKG, "Spoon.Dialog.Exception.ReadOnlyRepositoryUser" ) );
}
} catch ( KettleException e ) {
new ErrorDialog(
getShell(), BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingPartition.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorSavingPartition.Message", partitionSchema.getName() ), e );
}
}
refreshTree();
}
}
private void editPartitionSchema( TransMeta transMeta, PartitionSchema partitionSchema ) {
PartitionSchemaDialog dialog =
new PartitionSchemaDialog( shell, partitionSchema, transMeta.getDatabases(), transMeta );
if ( dialog.open() ) {
refreshTree();
}
}
private void delPartitionSchema( TransMeta transMeta, PartitionSchema partitionSchema ) {
try {
if ( rep != null && partitionSchema.getObjectId() != null ) {
// remove the partition schema from the repository too...
rep.deletePartitionSchema( partitionSchema.getObjectId() );
}
int idx = transMeta.getPartitionSchemas().indexOf( partitionSchema );
transMeta.getPartitionSchemas().remove( idx );
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorDeletingClusterSchema.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorDeletingClusterSchema.Message" ), e );
}
}
/**
* This creates a new clustering schema, edits it and adds it to the transformation metadata
*
*/
public void newClusteringSchema( TransMeta transMeta ) {
ClusterSchema clusterSchema = new ClusterSchema();
ClusterSchemaDialog dialog = new ClusterSchemaDialog( shell, clusterSchema, transMeta.getSlaveServers() );
if ( dialog.open() ) {
transMeta.getClusterSchemas().add( clusterSchema );
if ( rep != null ) {
try {
if ( !rep.getSecurityProvider().isReadOnly() ) {
rep.save( clusterSchema, Const.VERSION_COMMENT_INITIAL_VERSION, null );
} else {
throw new KettleException( BaseMessages.getString(
PKG, "Spoon.Dialog.Exception.ReadOnlyRepositoryUser" ) );
}
} catch ( KettleException e ) {
new ErrorDialog(
getShell(), BaseMessages.getString( PKG, "Spoon.Dialog.ErrorSavingCluster.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorSavingCluster.Message", clusterSchema.getName() ), e );
}
}
refreshTree();
}
}
private void editClusterSchema( TransMeta transMeta, ClusterSchema clusterSchema ) {
ClusterSchemaDialog dialog = new ClusterSchemaDialog( shell, clusterSchema, transMeta.getSlaveServers() );
if ( dialog.open() ) {
refreshTree();
}
}
private void delClusterSchema( TransMeta transMeta, ClusterSchema clusterSchema ) {
try {
if ( rep != null && clusterSchema.getObjectId() != null ) {
// remove the partition schema from the repository too...
rep.deleteClusterSchema( clusterSchema.getObjectId() );
}
int idx = transMeta.getClusterSchemas().indexOf( clusterSchema );
transMeta.getClusterSchemas().remove( idx );
refreshTree();
} catch ( KettleException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorDeletingPartitionSchema.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorDeletingPartitionSchema.Message" ), e );
}
}
/**
* This creates a slave server, edits it and adds it to the transformation metadata
*
*/
public void newSlaveServer( HasSlaveServersInterface hasSlaveServersInterface ) {
delegates.slaves.newSlaveServer( hasSlaveServersInterface );
}
public void delSlaveServer( HasSlaveServersInterface hasSlaveServersInterface, SlaveServer slaveServer ) {
try {
delegates.slaves.delSlaveServer( hasSlaveServersInterface, slaveServer );
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorDeletingSlave.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorDeletingSlave.Message" ), e );
}
}
/**
* Sends transformation to slave server
*
* @param executionConfiguration
*/
public void sendTransformationXMLToSlaveServer( TransMeta transMeta,
TransExecutionConfiguration executionConfiguration ) {
try {
Trans.sendToSlaveServer( transMeta, executionConfiguration, rep, metaStore );
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error sending transformation to server", e );
}
}
public void runFile() {
executeFile( true, false, false, false, false, null, false );
}
public void replayTransformation() {
TransExecutionConfiguration tc = this.getTransExecutionConfiguration();
executeFile(
tc.isExecutingLocally(), tc.isExecutingRemotely(), tc.isExecutingClustered(), false, false, new Date(),
false );
}
public void previewFile() {
executeFile( true, false, false, true, false, null, true );
}
public void debugFile() {
executeFile( true, false, false, false, true, null, true );
}
public void executeFile( boolean local, boolean remote, boolean cluster, boolean preview, boolean debug,
Date replayDate, boolean safe ) {
TransMeta transMeta = getActiveTransformation();
if ( transMeta != null ) {
executeTransformation(
transMeta, local, remote, cluster, preview, debug, replayDate, safe, transExecutionConfiguration
.getLogLevel() );
}
JobMeta jobMeta = getActiveJob();
if ( jobMeta != null ) {
executeJob( jobMeta, local, remote, replayDate, safe, null, 0 );
}
}
public void executeTransformation( final TransMeta transMeta, final boolean local, final boolean remote,
final boolean cluster, final boolean preview, final boolean debug, final Date replayDate,
final boolean safe, final LogLevel logLevel ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.EXECUTE_TRANSFORMATION ) ) {
return;
}
Thread thread = new Thread() {
public void run() {
getDisplay().asyncExec( new Runnable() {
public void run() {
try {
delegates.trans.executeTransformation(
transMeta, local, remote, cluster, preview, debug, replayDate, safe, logLevel );
} catch ( Exception e ) {
new ErrorDialog(
shell, "Execute transformation", "There was an error during transformation execution", e );
}
}
} );
}
};
thread.start();
}
public void executeJob( JobMeta jobMeta, boolean local, boolean remote, Date replayDate, boolean safe,
String startCopyName, int startCopyNr ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
try {
delegates.jobs.executeJob( jobMeta, local, remote, replayDate, safe, startCopyName, startCopyNr );
} catch ( Exception e ) {
new ErrorDialog( shell, "Execute job", "There was an error during job execution", e );
}
}
public void addSpoonSlave( SlaveServer slaveServer ) {
delegates.slaves.addSpoonSlave( slaveServer );
}
public void addJobHistory( JobMeta jobMeta, boolean select ) {
JobGraph activeJobGraph = getActiveJobGraph();
if ( activeJobGraph != null ) {
activeJobGraph.jobHistoryDelegate.addJobHistory();
}
// delegates.jobs.addJobHistory(jobMeta, select);
}
public void paste() {
String clipContent = fromClipboard();
if ( clipContent != null ) {
// Load the XML
//
try {
Document document = XMLHandler.loadXMLString( clipContent );
boolean transformation = XMLHandler.getSubNode( document, TransMeta.XML_TAG ) != null;
boolean job = XMLHandler.getSubNode( document, JobMeta.XML_TAG ) != null;
boolean steps = XMLHandler.getSubNode( document, Spoon.XML_TAG_TRANSFORMATION_STEPS ) != null;
boolean jobEntries = XMLHandler.getSubNode( document, Spoon.XML_TAG_JOB_JOB_ENTRIES ) != null;
if ( transformation ) {
pasteTransformation();
} else if ( job ) {
pasteJob();
} else if ( steps ) {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null && transGraph.getLastMove() != null ) {
pasteXML( transGraph.getManagedObject(), clipContent, transGraph.screen2real(
transGraph.getLastMove().x, transGraph.getLastMove().y ) );
}
} else if ( jobEntries ) {
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null && jobGraph.getLastMove() != null ) {
pasteXML( jobGraph.getManagedObject(), clipContent, jobGraph.getLastMove() );
}
}
} catch ( KettleXMLException e ) {
log.logError( "Unable to paste", e );
}
}
}
public JobEntryCopy newJobEntry( JobMeta jobMeta, String typeDesc, boolean openit ) {
return delegates.jobs.newJobEntry( jobMeta, typeDesc, openit );
}
public JobEntryDialogInterface getJobEntryDialog( JobEntryInterface jei, JobMeta jobMeta ) {
return delegates.jobs.getJobEntryDialog( jei, jobMeta );
}
public StepDialogInterface getStepEntryDialog( StepMetaInterface stepMeta, TransMeta transMeta, String stepName ) {
try {
return delegates.steps.getStepDialog( stepMeta, transMeta, stepName );
} catch ( Throwable t ) {
log.logError( "Could not create dialog for " + stepMeta.getDialogClassName(), t );
}
return null;
}
public void editJobEntry( JobMeta jobMeta, JobEntryCopy je ) {
delegates.jobs.editJobEntry( jobMeta, je );
}
public void deleteJobEntryCopies( JobMeta jobMeta, JobEntryCopy jobEntry ) {
delegates.jobs.deleteJobEntryCopies( jobMeta, jobEntry );
}
public void pasteXML( JobMeta jobMeta, String clipContent, Point loc ) {
if ( RepositorySecurityUI.verifyOperations( shell, rep,
RepositoryOperation.MODIFY_JOB, RepositoryOperation.EXECUTE_JOB ) ) {
return;
}
delegates.jobs.pasteXML( jobMeta, clipContent, loc );
}
public void newJobHop( JobMeta jobMeta, JobEntryCopy fr, JobEntryCopy to ) {
delegates.jobs.newJobHop( jobMeta, fr, to );
}
/**
* Create a job that extracts tables & data from a database.
* <p>
* <p>
*
* 0) Select the database to rip
* <p>
* 1) Select the tables in the database to rip
* <p>
* 2) Select the database to dump to
* <p>
* 3) Select the repository directory in which it will end up
* <p>
* 4) Select a name for the new job
* <p>
* 5) Create an empty job with the selected name.
* <p>
* 6) Create 1 transformation for every selected table
* <p>
* 7) add every created transformation to the job & evaluate
* <p>
*
*/
public void ripDBWizard() {
delegates.jobs.ripDBWizard();
}
public JobMeta ripDB( final List<DatabaseMeta> databases, final String jobName,
final RepositoryDirectory repdir, final String directory, final DatabaseMeta sourceDbInfo,
final DatabaseMeta targetDbInfo, final String[] tables ) {
return delegates.jobs.ripDB( databases, jobName, repdir, directory, sourceDbInfo, targetDbInfo, tables );
}
/**
* Set the core object state.
*
* @param state state to set
*/
public void setCoreObjectsState( int state ) {
coreObjectsState = state;
}
/**
* Get the core object state.
*
* @return state.
*/
public int getCoreObjectsState() {
return coreObjectsState;
}
public LogChannelInterface getLog() {
return log;
}
public Repository getRepository() {
return rep;
}
public void setRepository( Repository rep ) {
this.rep = rep;
try {
// Keep one metastore here...
//
if ( metaStore.getMetaStoreList().size() > 1 ) {
metaStore.getMetaStoreList().remove( 0 );
metaStore.setActiveMetaStoreName( metaStore.getMetaStoreList().get( 0 ).getName() );
}
if ( rep != null ) {
this.capabilities = rep.getRepositoryMeta().getRepositoryCapabilities();
// add a wrapper metastore to the delegation
//
IMetaStore repositoryMetaStore = rep.getMetaStore();
if ( repositoryMetaStore != null ) {
metaStore.addMetaStore( 0, repositoryMetaStore ); // first priority for explicitly connected repositories.
metaStore.setActiveMetaStoreName( repositoryMetaStore.getName() );
log.logBasic( "Connected to metastore : "
+ repositoryMetaStore.getName() + ", added to delegating metastore" );
} else {
log.logBasic( "No metastore found in the repository : "
+ rep.getName() + ", connected? " + rep.isConnected() );
}
}
} catch ( MetaStoreException e ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "Spoon.Dialog.ErrorAddingRepositoryMetaStore.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.ErrorReadingSharedObjects.Message" ), e );
}
// Registering the UI Support classes
UISupportRegistery.getInstance().registerUISupport(
RepositorySecurityProvider.class, BaseRepositoryExplorerUISupport.class );
UISupportRegistery
.getInstance().registerUISupport( RepositorySecurityManager.class, ManageUserUISupport.class );
if ( rep != null ) {
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.REPOSITORY_CHANGED );
}
delegates.update( this );
enableMenus();
}
public void addMenuListener( String id, Object listener, String methodName ) {
menuListeners.add( new Object[] { id, listener, methodName } );
}
public void addTransGraph( TransMeta transMeta ) {
delegates.trans.addTransGraph( transMeta );
}
public void addJobGraph( JobMeta jobMeta ) {
delegates.jobs.addJobGraph( jobMeta );
}
public boolean addSpoonBrowser( String name, String urlString, LocationListener locationListener ) {
return delegates.tabs.addSpoonBrowser( name, urlString, locationListener );
}
public boolean addSpoonBrowser( String name, String urlString ) {
return delegates.tabs.addSpoonBrowser( name, urlString, null );
}
public TransExecutionConfiguration getTransExecutionConfiguration() {
return transExecutionConfiguration;
}
public void editStepErrorHandling( TransMeta transMeta, StepMeta stepMeta ) {
delegates.steps.editStepErrorHandling( transMeta, stepMeta );
}
public String editStep( TransMeta transMeta, StepMeta stepMeta ) {
return delegates.steps.editStep( transMeta, stepMeta );
}
public void dupeStep( TransMeta transMeta, StepMeta stepMeta ) {
delegates.steps.dupeStep( transMeta, stepMeta );
}
public void delStep( TransMeta transMeta, StepMeta stepMeta ) {
delegates.steps.delStep( transMeta, stepMeta );
}
public String makeTabName( EngineMetaInterface transMeta, boolean showingLocation ) {
return delegates.tabs.makeTabName( transMeta, showingLocation );
}
public void newConnection() {
delegates.db.newConnection();
}
public void getSQL() {
delegates.db.getSQL();
}
public boolean overwritePrompt( String message, String rememberText, String rememberPropertyName ) {
return new PopupOverwritePrompter( shell, props ).overwritePrompt( message, rememberText, rememberPropertyName );
}
public Object[] messageDialogWithToggle( String dialogTitle, Object image, String message, int dialogImageType,
String[] buttonLabels, int defaultIndex, String toggleMessage, boolean toggleState ) {
return GUIResource.getInstance().messageDialogWithToggle(
shell, dialogTitle, (Image) image, message, dialogImageType, buttonLabels, defaultIndex, toggleMessage,
toggleState );
}
public boolean messageBox( final String message, final String text, final boolean allowCancel, final int type ) {
final StringBuffer answer = new StringBuffer( "N" );
display.syncExec( new Runnable() {
@Override
public void run() {
int flags = SWT.OK;
if ( allowCancel ) {
flags |= SWT.CANCEL;
}
switch ( type ) {
case Const.INFO:
flags |= SWT.ICON_INFORMATION;
break;
case Const.ERROR:
flags |= SWT.ICON_ERROR;
break;
case Const.WARNING:
flags |= SWT.ICON_WARNING;
break;
default:
break;
}
MessageBox mb = new MessageBox( shell, flags );
// Set the Body Message
mb.setMessage( message );
// Set the title Message
mb.setText( text );
if ( mb.open() == SWT.OK ) {
answer.setCharAt( 0, 'Y' );
}
}
} );
return "Y".equalsIgnoreCase( answer.toString() );
}
/**
* @return the previewExecutionConfiguration
*/
public TransExecutionConfiguration getTransPreviewExecutionConfiguration() {
return transPreviewExecutionConfiguration;
}
/**
* @param previewExecutionConfiguration
* the previewExecutionConfiguration to set
*/
public void setTransPreviewExecutionConfiguration( TransExecutionConfiguration previewExecutionConfiguration ) {
this.transPreviewExecutionConfiguration = previewExecutionConfiguration;
}
/**
* @return the debugExecutionConfiguration
*/
public TransExecutionConfiguration getTransDebugExecutionConfiguration() {
return transDebugExecutionConfiguration;
}
/**
* @param debugExecutionConfiguration
* the debugExecutionConfiguration to set
*/
public void setTransDebugExecutionConfiguration( TransExecutionConfiguration debugExecutionConfiguration ) {
this.transDebugExecutionConfiguration = debugExecutionConfiguration;
}
/**
* @param executionConfiguration
* the executionConfiguration to set
*/
public void setTransExecutionConfiguration( TransExecutionConfiguration executionConfiguration ) {
this.transExecutionConfiguration = executionConfiguration;
}
/**
* @return the jobExecutionConfiguration
*/
public JobExecutionConfiguration getJobExecutionConfiguration() {
return jobExecutionConfiguration;
}
/**
* @param jobExecutionConfiguration
* the jobExecutionConfiguration to set
*/
public void setJobExecutionConfiguration( JobExecutionConfiguration jobExecutionConfiguration ) {
this.jobExecutionConfiguration = jobExecutionConfiguration;
}
/*
* public XulToolbar getToolbar() { return toolbar; }
*/
public void update( ChangedFlagInterface o, Object arg ) {
try {
Method m = getClass().getMethod( arg.toString() );
if ( m != null ) {
m.invoke( this );
}
} catch ( Exception e ) {
// ignore... let the other notifiers try to do something
System.out.println( "Unable to update: " + e.getLocalizedMessage() );
}
}
public void consume( final LifeEventInfo info ) {
// if (PropsUI.getInstance().isListenerDisabled(info.getName()))
// return;
if ( info.hasHint( LifeEventInfo.Hint.DISPLAY_BROWSER ) ) {
display.asyncExec( new Runnable() {
public void run() {
delegates.tabs.addSpoonBrowser( info.getName(), info.getMessage(), false, null );
}
} );
} else {
MessageBox box =
new MessageBox( shell, ( info.getState() != LifeEventInfo.State.SUCCESS
? SWT.ICON_ERROR : SWT.ICON_INFORMATION )
| SWT.OK );
box.setText( info.getName() );
box.setMessage( info.getMessage() );
box.open();
}
}
public void setLog() {
LogSettingsDialog lsd = new LogSettingsDialog( shell, SWT.NONE, props );
lsd.open();
log.setLogLevel( DefaultLogLevel.getLogLevel() );
}
/**
* @return the display
*/
public Display getDisplay() {
return display;
}
public void zoomIn() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.zoomIn();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.zoomIn();
}
}
public void zoomOut() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.zoomOut();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.zoomOut();
}
}
public void zoom100Percent() {
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.zoom100Percent();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.zoom100Percent();
}
}
public void setParametersAsVariablesInUI( NamedParams namedParameters, VariableSpace space ) {
for ( String param : namedParameters.listParameters() ) {
try {
space.setVariable( param, Const.NVL( namedParameters.getParameterValue( param ), Const.NVL(
namedParameters.getParameterDefault( param ), Const.NVL( space.getVariable( param ), "" ) ) ) );
} catch ( Exception e ) {
// ignore this
}
}
}
public void browseVersionHistory() {
if ( rep == null ) {
return;
}
TransGraph transGraph = getActiveTransGraph();
if ( transGraph != null ) {
transGraph.browseVersionHistory();
}
JobGraph jobGraph = getActiveJobGraph();
if ( jobGraph != null ) {
jobGraph.browseVersionHistory();
}
}
public Trans findActiveTrans( Job job, JobEntryCopy jobEntryCopy ) {
JobEntryTrans jobEntryTrans = job.getActiveJobEntryTransformations().get( jobEntryCopy );
if ( jobEntryTrans == null ) {
return null;
}
return jobEntryTrans.getTrans();
}
public Job findActiveJob( Job job, JobEntryCopy jobEntryCopy ) {
JobEntryJob jobEntryJob = job.getActiveJobEntryJobs().get( jobEntryCopy );
if ( jobEntryJob == null ) {
return null;
}
return jobEntryJob.getJob();
}
public Object getSelectionObject() {
return selectionObject;
}
public RepositoryDirectoryInterface getDefaultSaveLocation( RepositoryElementInterface repositoryElement ) {
try {
if ( getRepository() != defaultSaveLocationRepository ) {
// The repository has changed, reset the defaultSaveLocation
defaultSaveLocation = null;
defaultSaveLocationRepository = null;
}
if ( defaultSaveLocation == null ) {
if ( getRepository() != null ) {
defaultSaveLocation = getRepository().getDefaultSaveDirectory( repositoryElement );
defaultSaveLocationRepository = getRepository();
} else {
defaultSaveLocation = new RepositoryDirectory();
}
}
} catch ( Exception e ) {
throw new RuntimeException( e );
}
return defaultSaveLocation;
}
/* ========================= XulEventSource Methods ========================== */
protected PropertyChangeSupport changeSupport = new PropertyChangeSupport( this );
public void addPropertyChangeListener( PropertyChangeListener listener ) {
changeSupport.addPropertyChangeListener( listener );
}
public void addPropertyChangeListener( String propertyName, PropertyChangeListener listener ) {
changeSupport.addPropertyChangeListener( propertyName, listener );
}
public void removePropertyChangeListener( PropertyChangeListener listener ) {
changeSupport.removePropertyChangeListener( listener );
}
protected void firePropertyChange( String attr, Object previousVal, Object newVal ) {
if ( previousVal == null && newVal == null ) {
return;
}
changeSupport.firePropertyChange( attr, previousVal, newVal );
}
/*
* ========================= End XulEventSource Methods ==========================
*/
/*
* ========================= Start XulEventHandler Methods ==========================
*/
public Object getData() {
return null;
}
public String getName() {
return "spoon";
}
public XulDomContainer getXulDomContainer() {
return getMainSpoonContainer();
}
public void setData( Object arg0 ) {
}
public void setName( String arg0 ) {
}
public void setXulDomContainer( XulDomContainer arg0 ) {
}
public RepositorySecurityManager getSecurityManager() {
return rep.getSecurityManager();
}
public void displayDbDependancies() {
TreeItem[] selection = selectionTree.getSelection();
if ( selection == null || selection.length != 1 ) {
return;
}
// Clear all dependencies for select connection
TreeItem parent = selection[0];
if ( parent != null ) {
int nrChilds = parent.getItemCount();
if ( nrChilds > 0 ) {
for ( int i = 0; i < nrChilds; i++ ) {
parent.getItem( i ).dispose();
}
}
}
if ( rep == null ) {
return;
}
try {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
String[] jobList = rep.getJobsUsingDatabase( databaseMeta.getObjectId() );
String[] transList = rep.getTransformationsUsingDatabase( databaseMeta.getObjectId() );
if ( jobList.length == 0 && transList.length == 0 ) {
MessageBox box = new MessageBox( shell, SWT.ICON_INFORMATION | SWT.OK );
box.setText( "Connection dependencies" );
box.setMessage( "This connection is not used by a job nor a transformation." );
box.open();
} else {
for ( String aJobList : jobList ) {
if ( aJobList != null ) {
TreeItem tidep = new TreeItem( parent, SWT.NONE );
tidep.setImage( GUIResource.getInstance().getImageJobGraph() );
tidep.setText( aJobList );
}
}
for ( String aTransList : transList ) {
if ( aTransList != null ) {
TreeItem tidep = new TreeItem( parent, SWT.NONE );
tidep.setImage( GUIResource.getInstance().getImageTransGraph() );
tidep.setText( aTransList );
}
}
parent.setExpanded( true );
}
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error getting dependencies! :", e );
}
}
public void fireMenuControlers() {
if ( !Display.getDefault().getThread().equals( Thread.currentThread() ) ) {
display.syncExec( new Runnable() {
public void run() {
fireMenuControlers();
}
} );
return;
}
org.pentaho.ui.xul.dom.Document doc;
if ( mainSpoonContainer != null ) {
doc = mainSpoonContainer.getDocumentRoot();
for ( ISpoonMenuController menuController : menuControllers ) {
menuController.updateMenu( doc );
}
}
}
public void hideSplash() {
if ( splash != null ) {
splash.hide();
}
}
private void showSplash() {
if ( splash != null ) {
splash.show();
}
}
/**
* Hides or shows the main toolbar
*
* @param visible
*/
public void setMainToolbarVisible( boolean visible ) {
mainToolbar.setVisible( visible );
}
public void setMenuBarVisible( boolean visible ) {
mainSpoonContainer.getDocumentRoot().getElementById( "edit" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "file" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "view" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "action" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "tools" ).setVisible( visible );
mainSpoonContainer.getDocumentRoot().getElementById( "help" ).setVisible( visible );
MenuManager menuManager = getMenuBarManager();
menuManager.getMenu().setVisible( visible );
menuManager.updateAll( true );
}
@Override
protected Control createContents( Composite parent ) {
shell = getShell();
init( null );
openSpoon();
// listeners
//
try {
lifecycleSupport.onStart( this );
} catch ( LifecycleException e ) {
// if severe, we have to quit
MessageBox box = new MessageBox( shell, ( e.isSevere() ? SWT.ICON_ERROR : SWT.ICON_WARNING ) | SWT.OK );
box.setMessage( e.getMessage() );
box.open();
}
try {
start( commandLineOptions );
} catch ( KettleException e ) {
MessageBox box = new MessageBox( shell, SWT.ICON_ERROR | SWT.OK );
box.setMessage( e.getMessage() );
box.open();
}
getMenuBarManager().updateAll( true );
return parent;
}
public void start() {
// We store the UI thread for the getDisplay() method
setBlockOnOpen( false );
try {
open();
waitForDispose();
// runEventLoop2(getShell());
} catch ( Throwable e ) {
LogChannel.GENERAL.logError( "Error starting Spoon shell", e );
}
System.out.println( "stopping" );
}
public String getStartupPerspective() {
return startupPerspective;
}
public DelegatingMetaStore getMetaStore() {
return metaStore;
}
public void setMetaStore( DelegatingMetaStore metaStore ) {
this.metaStore = metaStore;
}
private void onLoginError( Throwable t ) {
if ( t instanceof KettleAuthException ) {
ShowMessageDialog dialog =
new ShowMessageDialog( loginDialog.getShell(), SWT.OK | SWT.ICON_ERROR, BaseMessages.getString(
PKG, "Spoon.Dialog.LoginFailed.Title" ), t.getLocalizedMessage() );
dialog.open();
} else {
new ErrorDialog(
loginDialog.getShell(), BaseMessages.getString( PKG, "Spoon.Dialog.LoginFailed.Title" ), BaseMessages
.getString( PKG, "Spoon.Dialog.LoginFailed.Message", t ), t );
}
}
@Override
protected void handleShellCloseEvent() {
try {
if ( quitFile( true ) ) {
SpoonPluginManager.getInstance().notifyLifecycleListeners( SpoonLifeCycleEvent.SHUTDOWN );
super.handleShellCloseEvent();
}
} catch ( Exception e ) {
LogChannel.GENERAL.logError( "Error closing Spoon", e );
}
}
public void showAuthenticationOptions() {
AuthProviderDialog authProviderDialog = new AuthProviderDialog( shell );
authProviderDialog.show();
}
}
|
PDI-12513 - prompt to close tabs even if welcome tab is active
|
ui/src/org/pentaho/di/ui/spoon/Spoon.java
|
PDI-12513 - prompt to close tabs even if welcome tab is active
|
|
Java
|
apache-2.0
|
5be104efd4c042d8ba878a955ec116430d2a46da
| 0
|
gerryhocks/redpen,redpen-cc/redpen,redpen-cc/redpen,redpen-cc/redpen,kenhys/redpen,kenhys/redpen,recruit-tech/redpen,kenhys/redpen,redpen-cc/redpen,gerryhocks/redpen,redpen-cc/redpen,recruit-tech/redpen,gerryhocks/redpen,recruit-tech/redpen,recruit-tech/redpen,kenhys/redpen,gerryhocks/redpen
|
/*
* redpen: a text inspection tool
* Copyright (c) 2014-2015 Recruit Technologies Co., Ltd. and contributors
* (see CONTRIBUTORS.md)
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cc.redpen.parser.review;
import cc.redpen.RedPenException;
import cc.redpen.config.Configuration;
import cc.redpen.model.Document;
import cc.redpen.model.Paragraph;
import cc.redpen.model.Section;
import cc.redpen.parser.DocumentParser;
import cc.redpen.parser.LineOffset;
import cc.redpen.parser.SentenceExtractor;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.*;
public class ReVIEWParserTest {
@Test
public void ParseBlock() throws Exception {
String sample = "//list[yml_sample][my.yml]{";
ReVIEWParser parser = new ReVIEWParser();
ReVIEWParser.ReVIEWBlock block = parser.parseBlock(new ReVIEWLine(sample, 0));
assertEquals("list", block.type);
assertEquals(2, block.properties.size());
assertEquals("yml_sample", block.properties.get(0));
assertEquals("my.yml", block.properties.get(1));
assertTrue(block.isOpen);
}
@Test
public void ParseBlockWithoutProperties() throws Exception {
String sample = "//lead{";
ReVIEWParser parser = new ReVIEWParser();
ReVIEWParser.ReVIEWBlock block = parser.parseBlock(new ReVIEWLine(sample, 0));
assertEquals("lead", block.type);
assertEquals(0, block.properties.size());
assertTrue(block.isOpen);
}
@Test
public void testMultipleShortLine() {
String sampleText = "Tokyu\n" +
"is a good\n" +
"railway company. But there\n" +
"are competitors.";
Document doc = createFileContent(sampleText);
Section firstSections = doc.getSection(0);
Paragraph firstParagraph = firstSections.getParagraph(0);
assertEquals(2, firstParagraph.getNumberOfSentences());
assertEquals("Tokyu is a good railway company.", doc.getSection(0).getParagraph(0).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getParagraph(0).getSentence(0).getLineNumber());
assertEquals(0, doc.getSection(0).getParagraph(0).getSentence(0).getStartPositionOffset());
assertEquals(32, doc.getSection(0).getParagraph(0).getSentence(0).getOffsetMapSize());
assertEquals(" But there are competitors.", doc.getSection(0).getParagraph(0).getSentence(1).getContent());
assertEquals(3, doc.getSection(0).getParagraph(0).getSentence(1).getLineNumber());
assertEquals(16, doc.getSection(0).getParagraph(0).getSentence(1).getStartPositionOffset());
}
@Test
public void testRemoveTextDecoration() throws UnsupportedEncodingException {
String sampleText = "About @<b>{Gekioko}.\n";
Document doc = createFileContent(sampleText);
assertEquals(1, doc.size());
assertEquals("About Gekioko.", doc.getSection(0).getParagraph(0).getSentence(0).getContent());
}
@Test
public void testGenerateDocumentWithList() {
String sampleText = "There are several railway companies in Japan as follows.\n";
sampleText += "\n";
sampleText += "* Tokyu\n";
sampleText += "** Toyoko Line\n";
sampleText += "** Denentoshi Line\n";
sampleText += "* Keio\n";
sampleText += "* Odakyu\n";
Document doc = createFileContent(sampleText);
assertEquals(5, doc.getSection(0).getListBlock(0).getNumberOfListElements());
assertEquals("Tokyu", doc.getSection(0).getListBlock(0).getListElement(0).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getListBlock(0).getListElement(0).getLevel());
assertEquals(3, doc.getSection(0).getListBlock(0).getListElement(0).getSentence(0).getLineNumber());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(0).getSentence(0).getStartPositionOffset());
assertEquals("Toyoko Line", doc.getSection(0).getListBlock(0).getListElement(1).getSentence(0).getContent());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(1).getLevel());
assertEquals(4, doc.getSection(0).getListBlock(0).getListElement(1).getSentence(0).getLineNumber());
assertEquals(3, doc.getSection(0).getListBlock(0).getListElement(1).getSentence(0).getStartPositionOffset());
assertEquals("Denentoshi Line", doc.getSection(0).getListBlock(0).getListElement(2).getSentence(0).getContent());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(2).getLevel());
assertEquals(5, doc.getSection(0).getListBlock(0).getListElement(2).getSentence(0).getLineNumber());
assertEquals(3, doc.getSection(0).getListBlock(0).getListElement(2).getSentence(0).getStartPositionOffset());
assertEquals("Keio", doc.getSection(0).getListBlock(0).getListElement(3).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getListBlock(0).getListElement(3).getLevel());
assertEquals(6, doc.getSection(0).getListBlock(0).getListElement(3).getSentence(0).getLineNumber());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(3).getSentence(0).getStartPositionOffset());
assertEquals("Odakyu", doc.getSection(0).getListBlock(0).getListElement(4).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getListBlock(0).getListElement(4).getLevel());
assertEquals(7, doc.getSection(0).getListBlock(0).getListElement(4).getSentence(0).getLineNumber());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(4).getSentence(0).getStartPositionOffset());
}
@Test
public void testLabelledList() {
String sampleText = "= SampleDoc\n" +
"v0.0.2, 2015-11-17\n" +
":last-update-label!:\n" +
"\n" +
"== 用語定義\n" +
"ユビキタス言語を定義します。\n" +
"\n" +
": Some word\n" +
"なにかの意味をのせて用例をのせます。\n" +
"\n" +
": リリース\n" +
"ソフトウェアを顧客に提供することです。\n" +
"\n" +
": redpen\n" +
"RedPen はオープンソースの校正ツールです。RedPen は技術文書が文書規約に従って書かれているかを自動検査します。 現在の RedPen 日本語ドキュメントは十分検査されておりません。校正にはもう少々時間がかかる予定です。誤りなど見つかりましたら、https://github.com/redpen-cc/redpen-doc-ja に Issue 登録しておしらせ頂けると幸いです。";
Document doc = createFileContent(sampleText);
assertEquals(3, doc.getSection(1).getListBlock(0).getNumberOfListElements());
assertEquals("なにかの意味をのせて用例をのせます。", doc.getSection(1).getListBlock(0).getListElement(0).getSentence(0).getContent());
assertEquals(15, doc.getSection(1).getListBlock(0).getListElement(2).getSentence(0).getLineNumber());
}
@Test
public void testComment() {
String sampleText = "#@# BLAH BLAH" +
"\n" +
"Potato";
Document doc = createFileContent(sampleText);
for (Section section : doc) {
for (Paragraph paragraph : section.getParagraphs()) {
paragraph.getSentences().forEach(sentence -> {
assertEquals("Potato", sentence.getContent());
});
}
}
}
@Test
public void testTable() {
String sampleText = "#@# BLAH BLAH" +
"\n" +
"//table[envvars][重要な環境変数]{" +
"名前 意味" +
"-------------------------------------------------------------" +
"PATH コマンドの存在するディレクトリ" +
"TERM 使っている端末の種類。linux・kterm・vt100など" +
"//}";
Document doc = createFileContent(sampleText);
for (Section section : doc) {
for (Paragraph paragraph : section.getParagraphs()) {
paragraph.getSentences().forEach(sentence -> {
assertEquals("Potato", sentence.getContent());
});
}
}
}
@Test
public void testSectionHeader() throws UnsupportedEncodingException {
String sampleText = "= About @<i>{Gekioko}.\n\n" +
"Gekioko means angry.";
Document doc = createFileContent(sampleText);
assertEquals(1, doc.size());
assertEquals("About Gekioko.", doc.getSection(0).getHeaderContent(0).getContent());
}
@Test
public void testDocumentWithBoldWord() {
String sampleText = "It is a @<b>{good} day.";
Document doc = createFileContent(sampleText);
Section firstSections = doc.getSection(0);
Paragraph firstParagraph = firstSections.getParagraph(0);
assertEquals("It is a good day.", firstParagraph.getSentence(0).getContent());
List<LineOffset> expectedOffsets = initializeMappingTable(
new LineOffset(1, 0),
new LineOffset(1, 1),
new LineOffset(1, 2),
new LineOffset(1, 3),
new LineOffset(1, 4),
new LineOffset(1, 5),
new LineOffset(1, 6),
new LineOffset(1, 7),
new LineOffset(1, 13),
new LineOffset(1, 14),
new LineOffset(1, 15),
new LineOffset(1, 16),
new LineOffset(1, 18),
new LineOffset(1, 19),
new LineOffset(1, 20),
new LineOffset(1, 21),
new LineOffset(1, 22));
assertEquals(expectedOffsets.size(), firstParagraph.getSentence(0).getOffsetMapSize());
for (int i = 0; i < expectedOffsets.size(); i++) {
assertEquals(expectedOffsets.get(i), firstParagraph.getSentence(0).getOffset(i).get());
}
}
private static List<LineOffset> initializeMappingTable(LineOffset... offsets) {
List<LineOffset> offsetTable = new ArrayList<>();
for (LineOffset offset : offsets) {
offsetTable.add(offset);
}
return offsetTable;
}
private Document createFileContent(String inputDocumentString) {
DocumentParser parser = DocumentParser.REVIEW;
Document doc = null;
try {
Configuration configuration = Configuration.builder().build();
doc = parser.parse(
inputDocumentString,
new SentenceExtractor(configuration.getSymbolTable()),
configuration.getTokenizer());
} catch (RedPenException e) {
e.printStackTrace();
fail();
}
return doc;
}
}
|
redpen-core/src/test/java/cc/redpen/parser/review/ReVIEWParserTest.java
|
/*
* redpen: a text inspection tool
* Copyright (c) 2014-2015 Recruit Technologies Co., Ltd. and contributors
* (see CONTRIBUTORS.md)
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cc.redpen.parser.review;
import cc.redpen.RedPenException;
import cc.redpen.config.Configuration;
import cc.redpen.model.Document;
import cc.redpen.model.Paragraph;
import cc.redpen.model.Section;
import cc.redpen.parser.DocumentParser;
import cc.redpen.parser.SentenceExtractor;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
import static org.junit.Assert.*;
public class ReVIEWParserTest {
@Test
public void ParseBlock() throws Exception {
String sample = "//list[yml_sample][my.yml]{";
ReVIEWParser parser = new ReVIEWParser();
ReVIEWParser.ReVIEWBlock block = parser.parseBlock(new ReVIEWLine(sample, 0));
assertEquals("list", block.type);
assertEquals(2, block.properties.size());
assertEquals("yml_sample", block.properties.get(0));
assertEquals("my.yml", block.properties.get(1));
assertTrue(block.isOpen);
}
@Test
public void ParseBlockWithoutProperties() throws Exception {
String sample = "//lead{";
ReVIEWParser parser = new ReVIEWParser();
ReVIEWParser.ReVIEWBlock block = parser.parseBlock(new ReVIEWLine(sample, 0));
assertEquals("lead", block.type);
assertEquals(0, block.properties.size());
assertTrue(block.isOpen);
}
@Test
public void testMultipleShortLine() {
String sampleText = "Tokyu\n" +
"is a good\n" +
"railway company. But there\n" +
"are competitors.";
Document doc = createFileContent(sampleText);
Section firstSections = doc.getSection(0);
Paragraph firstParagraph = firstSections.getParagraph(0);
assertEquals(2, firstParagraph.getNumberOfSentences());
assertEquals("Tokyu is a good railway company.", doc.getSection(0).getParagraph(0).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getParagraph(0).getSentence(0).getLineNumber());
assertEquals(0, doc.getSection(0).getParagraph(0).getSentence(0).getStartPositionOffset());
assertEquals(32, doc.getSection(0).getParagraph(0).getSentence(0).getOffsetMapSize());
assertEquals(" But there are competitors.", doc.getSection(0).getParagraph(0).getSentence(1).getContent());
assertEquals(3, doc.getSection(0).getParagraph(0).getSentence(1).getLineNumber());
assertEquals(16, doc.getSection(0).getParagraph(0).getSentence(1).getStartPositionOffset());
}
@Test
public void testRemoveTextDecoration() throws UnsupportedEncodingException {
String sampleText = "About @<b>{Gekioko}.\n";
Document doc = createFileContent(sampleText);
assertEquals(1, doc.size());
assertEquals("About Gekioko.", doc.getSection(0).getParagraph(0).getSentence(0).getContent());
}
@Test
public void testGenerateDocumentWithList() {
String sampleText = "There are several railway companies in Japan as follows.\n";
sampleText += "\n";
sampleText += "* Tokyu\n";
sampleText += "** Toyoko Line\n";
sampleText += "** Denentoshi Line\n";
sampleText += "* Keio\n";
sampleText += "* Odakyu\n";
Document doc = createFileContent(sampleText);
assertEquals(5, doc.getSection(0).getListBlock(0).getNumberOfListElements());
assertEquals("Tokyu", doc.getSection(0).getListBlock(0).getListElement(0).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getListBlock(0).getListElement(0).getLevel());
assertEquals(3, doc.getSection(0).getListBlock(0).getListElement(0).getSentence(0).getLineNumber());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(0).getSentence(0).getStartPositionOffset());
assertEquals("Toyoko Line", doc.getSection(0).getListBlock(0).getListElement(1).getSentence(0).getContent());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(1).getLevel());
assertEquals(4, doc.getSection(0).getListBlock(0).getListElement(1).getSentence(0).getLineNumber());
assertEquals(3, doc.getSection(0).getListBlock(0).getListElement(1).getSentence(0).getStartPositionOffset());
assertEquals("Denentoshi Line", doc.getSection(0).getListBlock(0).getListElement(2).getSentence(0).getContent());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(2).getLevel());
assertEquals(5, doc.getSection(0).getListBlock(0).getListElement(2).getSentence(0).getLineNumber());
assertEquals(3, doc.getSection(0).getListBlock(0).getListElement(2).getSentence(0).getStartPositionOffset());
assertEquals("Keio", doc.getSection(0).getListBlock(0).getListElement(3).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getListBlock(0).getListElement(3).getLevel());
assertEquals(6, doc.getSection(0).getListBlock(0).getListElement(3).getSentence(0).getLineNumber());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(3).getSentence(0).getStartPositionOffset());
assertEquals("Odakyu", doc.getSection(0).getListBlock(0).getListElement(4).getSentence(0).getContent());
assertEquals(1, doc.getSection(0).getListBlock(0).getListElement(4).getLevel());
assertEquals(7, doc.getSection(0).getListBlock(0).getListElement(4).getSentence(0).getLineNumber());
assertEquals(2, doc.getSection(0).getListBlock(0).getListElement(4).getSentence(0).getStartPositionOffset());
}
@Test
public void testLabelledList() {
String sampleText = "= SampleDoc\n" +
"v0.0.2, 2015-11-17\n" +
":last-update-label!:\n" +
"\n" +
"== 用語定義\n" +
"ユビキタス言語を定義します。\n" +
"\n" +
": Some word\n" +
"なにかの意味をのせて用例をのせます。\n" +
"\n" +
": リリース\n" +
"ソフトウェアを顧客に提供することです。\n" +
"\n" +
": redpen\n" +
"RedPen はオープンソースの校正ツールです。RedPen は技術文書が文書規約に従って書かれているかを自動検査します。 現在の RedPen 日本語ドキュメントは十分検査されておりません。校正にはもう少々時間がかかる予定です。誤りなど見つかりましたら、https://github.com/redpen-cc/redpen-doc-ja に Issue 登録しておしらせ頂けると幸いです。";
Document doc = createFileContent(sampleText);
assertEquals(3, doc.getSection(1).getListBlock(0).getNumberOfListElements());
assertEquals("なにかの意味をのせて用例をのせます。", doc.getSection(1).getListBlock(0).getListElement(0).getSentence(0).getContent());
assertEquals(15, doc.getSection(1).getListBlock(0).getListElement(2).getSentence(0).getLineNumber());
}
@Test
public void testComment() {
String sampleText = "#@# BLAH BLAH" +
"\n" +
"Potato";
Document doc = createFileContent(sampleText);
for (Section section : doc) {
for (Paragraph paragraph : section.getParagraphs()) {
paragraph.getSentences().forEach(sentence -> {
assertEquals("Potato", sentence.getContent());
});
}
}
}
@Test
public void testTable() {
String sampleText = "#@# BLAH BLAH" +
"\n" +
"//table[envvars][重要な環境変数]{" +
"名前 意味" +
"-------------------------------------------------------------" +
"PATH コマンドの存在するディレクトリ" +
"TERM 使っている端末の種類。linux・kterm・vt100など" +
"//}";
Document doc = createFileContent(sampleText);
for (Section section : doc) {
for (Paragraph paragraph : section.getParagraphs()) {
paragraph.getSentences().forEach(sentence -> {
assertEquals("Potato", sentence.getContent());
});
}
}
}
@Test
public void testSectionHeader() throws UnsupportedEncodingException {
String sampleText = "= About @<i>{Gekioko}.\n\n" +
"Gekioko means angry.";
Document doc = createFileContent(sampleText);
assertEquals(1, doc.size());
assertEquals("About Gekioko.", doc.getSection(0).getHeaderContent(0).getContent());
}
private Document createFileContent(String inputDocumentString) {
DocumentParser parser = DocumentParser.REVIEW;
Document doc = null;
try {
Configuration configuration = Configuration.builder().build();
doc = parser.parse(
inputDocumentString,
new SentenceExtractor(configuration.getSymbolTable()),
configuration.getTokenizer());
} catch (RedPenException e) {
e.printStackTrace();
fail();
}
return doc;
}
}
|
Add a test case for character offsets
|
redpen-core/src/test/java/cc/redpen/parser/review/ReVIEWParserTest.java
|
Add a test case for character offsets
|
|
Java
|
apache-2.0
|
a221f9c6922c707fb2e74a5730b858498aa61008
| 0
|
gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa
|
/*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package uk.ac.ebi.gxa.loader.steps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.MAGETABInvestigation;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.graph.utils.GraphUtils;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.sdrf.node.*;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.sdrf.node.attribute.ArrayDesignAttribute;
import uk.ac.ebi.gxa.analytics.compute.AtlasComputeService;
import uk.ac.ebi.gxa.analytics.compute.ComputeTask;
import uk.ac.ebi.gxa.analytics.compute.RUtil;
import uk.ac.ebi.gxa.exceptions.LogUtil;
import uk.ac.ebi.gxa.loader.AtlasLoaderException;
import uk.ac.ebi.gxa.loader.cache.AtlasLoadCache;
import uk.ac.ebi.gxa.loader.datamatrix.DataMatrixFileBuffer;
import uk.ac.ebi.gxa.loader.service.AtlasLoaderServiceListener;
import uk.ac.ebi.gxa.utils.FileUtil;
import uk.ac.ebi.microarray.atlas.model.Assay;
import uk.ac.ebi.rcloud.server.RServices;
import uk.ac.ebi.rcloud.server.RType.RChar;
import uk.ac.ebi.rcloud.server.RType.RObject;
import javax.annotation.Nonnull;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.rmi.RemoteException;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import static com.google.common.io.ByteStreams.copy;
import static com.google.common.io.Closeables.closeQuietly;
import static uk.ac.ebi.gxa.utils.FileUtil.deleteDirectory;
/**
* Experiment loading step that prepares data matrix to be stored in data files.
* Based on the original handlers code by Tony Burdett.
*
* @author Nikolay Pultsin
*/
public class ArrayDataStep {
private final static Logger log = LoggerFactory.getLogger(ArrayDataStep.class);
private static final String USE_PROCCESSED_FILES = "; Please try using processed experimental data instead";
public static String displayName() {
return "Processing data matrix";
}
private static class RawData {
final File dataDir;
final HashMap<String, String> celFiles = new HashMap<String, String>();
final HashMap<String, Assay> assays = new HashMap<String, Assay>();
RawData() {
dataDir = FileUtil.createTempDirectory("atlas-loader");
}
}
private static final Object COPY_FILE_LOCK = new Object();
private static void copyFile(URL from, File to) throws IOException {
synchronized (COPY_FILE_LOCK) {
URLConnection connection = null;
InputStream is = null;
OutputStream os = null;
try {
connection = from.openConnection();
is = connection.getInputStream();
os = new FileOutputStream(to);
copy(is, os);
} finally {
closeQuietly(os);
closeQuietly(is);
if (connection != null && connection instanceof HttpURLConnection) {
((HttpURLConnection) connection).disconnect();
}
}
}
}
private static final Object EXTRACT_ZIP_LOCK = new Object();
private static void extractZip(File zipFile, File dir) throws IOException {
synchronized (EXTRACT_ZIP_LOCK) {
ZipInputStream zipInputStream = null;
try {
zipInputStream = new ZipInputStream(new FileInputStream(zipFile));
ZipEntry zipEntry = zipInputStream.getNextEntry();
while (zipEntry != null) {
final String entryName = zipEntry.getName();
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(new File(dir, entryName));
copy(zipInputStream, fileOutputStream);
} finally {
closeQuietly(fileOutputStream);
zipInputStream.closeEntry();
}
zipEntry = zipInputStream.getNextEntry();
}
} finally {
closeQuietly(zipInputStream);
}
}
}
public boolean readArrayData(@Nonnull AtlasComputeService computeService, MAGETABInvestigation investigation, AtlasLoaderServiceListener listener, AtlasLoadCache cache) throws AtlasLoaderException {
final URL sdrfURL = investigation.SDRF.getLocation();
final File sdrfDir = new File(sdrfURL.getFile()).getParentFile();
final HashMap<String, RawData> dataByArrayDesign = new HashMap<String, RawData>();
final HashMap<String, File> zipFiles = new HashMap<String, File>();
try {
// set this variable to false to avoid attempts of load
// CEL files from the same location as IDF/SDRF files;
// ftp link will be used
// set this variable to true to try local files firstly
boolean useLocalCopy = true;
final Collection<ArrayDataNode> dataNodes =
investigation.SDRF.getNodes(ArrayDataNode.class);
if (dataNodes.isEmpty()) {
log.warn("No data nodes for raw data are defined in " + sdrfURL);
// the experiment loading logic will use the processed files instead
return false;
}
listener.setProgress("Loading CEL files");
for (ArrayDataNode node : dataNodes) {
log.info("Found array data matrix node '" + node.getNodeName() + "'");
final Collection<HybridizationNode> hybridizationNodes = GraphUtils.findUpstreamNodes(node, HybridizationNode.class);
final Collection<AssayNode> assayNodes = GraphUtils.findUpstreamNodes(node, AssayNode.class);
if (hybridizationNodes.size() + assayNodes.size() != 1) {
throw new AtlasLoaderException("ArrayDataNode " + node.getNodeName() + " corresponds to " + (hybridizationNodes.size() + assayNodes.size()) + " assays");
}
final HybridizationNode assayNode =
hybridizationNodes.size() == 0 ? assayNodes.iterator().next() : hybridizationNodes.iterator().next();
Assay assay = cache.fetchAssay(assayNode.getNodeName());
if (assay == null) {
throw new AtlasLoaderException("Cannot fetch an assay for node " + assayNode.getNodeName());
}
final Collection<ScanNode> scanNodes = GraphUtils.findUpstreamNodes(node, ScanNode.class);
if (scanNodes.size() > 1) {
throw new AtlasLoaderException("ArrayDataNode " + node.getNodeName() + " corresponds to " + scanNodes.size() + " scans");
}
final ScanNode scanNode = scanNodes.size() == 1 ? scanNodes.iterator().next() : null;
final List<ArrayDesignAttribute> arrayDesigns = assayNode.arrayDesigns;
if (arrayDesigns.size() != 1) {
throw new AtlasLoaderException("Assay node " + assayNode.getNodeName() + " has " + arrayDesigns.size() + " array designs");
}
final String arrayDesignName = arrayDesigns.get(0).getNodeName();
final String dataFileName = node.getNodeName();
final String scanName = scanNode != null ? scanNode.getNodeName() : assayNode.getNodeName();
// TODO: use better way to check this if such way exists
if (!arrayDesignName.toLowerCase().contains("affy")) {
log.warn("Array design " + arrayDesignName + " is not an Affymetrix");
// For non-Affymetrics chip we don't throw and exception but allow the experiment loading logic
// to silently move to using the processed files instead.
return false;
}
if (dataFileName == null || dataFileName.length() == 0) {
continue;
}
RawData adData = dataByArrayDesign.get(arrayDesignName);
if (adData == null) {
adData = new RawData();
dataByArrayDesign.put(arrayDesignName, adData);
}
if (adData.celFiles.get(dataFileName) != null)
throw new AtlasLoaderException("Error processing file: '" + dataFileName + "' - this file is used twice" + USE_PROCCESSED_FILES);
adData.celFiles.put(dataFileName, scanName);
adData.assays.put(dataFileName, assay);
final File tempFile = new File(adData.dataDir, dataFileName);
if (useLocalCopy) {
final File localFile = new File(sdrfDir, dataFileName);
URL localFileURL;
try {
localFileURL = sdrfURL.getPort() == -1
? new URL(sdrfURL.getProtocol(),
sdrfURL.getHost(),
localFile.toString().replaceAll("\\\\", "/"))
: new URL(sdrfURL.getProtocol(),
sdrfURL.getHost(),
sdrfURL.getPort(),
localFile.toString().replaceAll("\\\\", "/"));
copyFile(localFileURL, tempFile);
} catch (IOException e) {
// ignore
}
}
if (!tempFile.exists() && node.comments != null) {
useLocalCopy = false;
final String zipName = DataUtils.fixZipURL(node.comments.get("ArrayExpress FTP file"));
if (zipName != null) {
File localZipFile = zipFiles.get(zipName);
if (localZipFile == null) {
try {
localZipFile = File.createTempFile("atlas-loader", ".zip");
zipFiles.put(zipName, localZipFile);
copyFile(new URL(zipName), localZipFile);
} catch (IOException e) {
if (localZipFile != null && !localZipFile.delete()) {
log.error("Cannot delete " + localZipFile.getAbsolutePath());
}
log.error("IOException is thrown: " + e.getMessage());
throw new AtlasLoaderException("Error occurred while retrieving raw data files from ArrayExpress ftp site" + USE_PROCCESSED_FILES);
}
}
try {
extractZip(localZipFile, adData.dataDir);
} catch (IOException e) {
log.error("IOException is thrown: " + e.getMessage());
throw new AtlasLoaderException("Error occurred while retrieving raw data files from ArrayExpress ftp site" + USE_PROCCESSED_FILES);
}
}
}
if (!tempFile.exists()) {
throw new AtlasLoaderException("Error occurred while processing raw data files: File '" + dataFileName + "' is not found" + USE_PROCCESSED_FILES);
}
}
listener.setProgress("Processing data in R");
for (Map.Entry<String, RawData> entry : dataByArrayDesign.entrySet()) {
final DataNormalizer normalizer = new DataNormalizer(entry.getValue());
// this method returns null if computation was finished successfully
// or an instance of "try-error" R class in case of failure
// currently we receive instances of "try-error" as RChar objects
final RObject result = computeService.computeTask(normalizer);
if (result != null) {
throw new AtlasLoaderException(
"Something unexpected happened during R processing; returned " +
(result instanceof RChar
? ((RChar) result).getValue()[0]
: result));
}
try {
final File mergedFile = new File(normalizer.mergedFilePath);
final DataMatrixFileBuffer buffer = cache.getDataMatrixFileBuffer(mergedFile.toURL(), null);
final HashMap<String, Assay> assayMap = entry.getValue().assays;
final ArrayList<String> fileNames = normalizer.fileNames;
for (int i = 0; i < fileNames.size(); ++i) {
final Assay assay = assayMap.get(fileNames.get(i));
cache.setAssayDataMatrixRef(assay, buffer.getStorage(), i);
}
if (!mergedFile.delete()) {
log.warn("Cannot delete" + mergedFile.getAbsolutePath());
}
} catch (MalformedURLException e) {
throw LogUtil.createUnexpected("MalformedURLException is thrown: " + e.getMessage());
}
}
return true;
} finally {
for (RawData data : dataByArrayDesign.values()) {
deleteDirectory(data.dataDir);
}
for (File z : zipFiles.values()) {
if (!z.delete()) {
log.warn("Cannot delete " + z.getAbsolutePath());
}
}
}
}
private static class DataNormalizer implements ComputeTask<RObject> {
private final RawData data;
public final ArrayList<String> fileNames = new ArrayList<String>();
public final String pathPrefix;
public final String mergedFilePath;
public DataNormalizer(RawData data) {
this.data = data;
pathPrefix = data.dataDir.getAbsolutePath() + "/";
mergedFilePath = pathPrefix + "merged.txt";
}
public RObject compute(RServices R) throws RemoteException {
StringBuilder files = new StringBuilder();
StringBuilder scans = new StringBuilder();
files.append("files = c(");
scans.append("scans = c(");
boolean isFirst = true;
for (Map.Entry<String, String> entry : data.celFiles.entrySet()) {
if (isFirst) {
isFirst = false;
} else {
files.append(", ");
scans.append(", ");
}
fileNames.add(entry.getKey());
files.append("'");
files.append(pathPrefix);
files.append(entry.getKey());
files.append("'");
scans.append("'");
scans.append(entry.getValue());
scans.append("'");
}
files.append(")");
scans.append(")");
log.info(files.toString());
log.info(scans.toString());
log.info("outFile = '" + mergedFilePath + "'");
R.sourceFromBuffer(files.toString());
R.sourceFromBuffer(scans.toString());
R.sourceFromBuffer("outFile = '" + mergedFilePath + "'");
R.sourceFromBuffer(RUtil.getRCodeFromResource("R/normalizeOneExperiment.R"));
final RObject result = R.getObject("normalizeOneExperiment(files = files, outFile = outFile, scans = scans, parallel = FALSE)");
R.sourceFromBuffer("rm(outFile)");
R.sourceFromBuffer("rm(scans)");
R.sourceFromBuffer("rm(files)");
R.sourceFromBuffer(RUtil.getRCodeFromResource("R/cleanupNamespace.R"));
return result;
}
}
}
|
atlas-loader/src/main/java/uk/ac/ebi/gxa/loader/steps/ArrayDataStep.java
|
/*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package uk.ac.ebi.gxa.loader.steps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.MAGETABInvestigation;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.graph.utils.GraphUtils;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.sdrf.node.*;
import uk.ac.ebi.arrayexpress2.magetab.datamodel.sdrf.node.attribute.ArrayDesignAttribute;
import uk.ac.ebi.gxa.analytics.compute.AtlasComputeService;
import uk.ac.ebi.gxa.analytics.compute.ComputeTask;
import uk.ac.ebi.gxa.analytics.compute.RUtil;
import uk.ac.ebi.gxa.exceptions.LogUtil;
import uk.ac.ebi.gxa.loader.AtlasLoaderException;
import uk.ac.ebi.gxa.loader.cache.AtlasLoadCache;
import uk.ac.ebi.gxa.loader.datamatrix.DataMatrixFileBuffer;
import uk.ac.ebi.gxa.loader.service.AtlasLoaderServiceListener;
import uk.ac.ebi.gxa.utils.FileUtil;
import uk.ac.ebi.microarray.atlas.model.Assay;
import uk.ac.ebi.rcloud.server.RServices;
import uk.ac.ebi.rcloud.server.RType.RChar;
import uk.ac.ebi.rcloud.server.RType.RObject;
import javax.annotation.Nonnull;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.rmi.RemoteException;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import static com.google.common.io.ByteStreams.copy;
import static com.google.common.io.Closeables.closeQuietly;
import static uk.ac.ebi.gxa.utils.FileUtil.deleteDirectory;
/**
* Experiment loading step that prepares data matrix to be stored in data files.
* Based on the original handlers code by Tony Burdett.
*
* @author Nikolay Pultsin
*/
public class ArrayDataStep {
private final static Logger log = LoggerFactory.getLogger(ArrayDataStep.class);
private static final String USE_PROCCESSED_FILES = "; Please try using processed experimental data instead";
public static String displayName() {
return "Processing data matrix";
}
private static class RawData {
final File dataDir;
final HashMap<String, String> celFiles = new HashMap<String, String>();
final HashMap<String, Assay> assays = new HashMap<String, Assay>();
RawData() {
dataDir = FileUtil.createTempDirectory("atlas-loader");
}
}
private static final Object COPY_FILE_LOCK = new Object();
private static void copyFile(URL from, File to) throws IOException {
synchronized (COPY_FILE_LOCK) {
URLConnection connection = null;
InputStream is = null;
OutputStream os = null;
try {
connection = from.openConnection();
is = connection.getInputStream();
os = new FileOutputStream(to);
copy(is, os);
} finally {
closeQuietly(os);
closeQuietly(is);
if (connection != null && connection instanceof HttpURLConnection) {
((HttpURLConnection) connection).disconnect();
}
}
}
}
private static final Object EXTRACT_ZIP_LOCK = new Object();
private static void extractZip(File zipFile, File dir) throws IOException {
synchronized (EXTRACT_ZIP_LOCK) {
ZipInputStream zipInputStream = null;
try {
zipInputStream = new ZipInputStream(new FileInputStream(zipFile));
ZipEntry zipEntry = zipInputStream.getNextEntry();
while (zipEntry != null) {
final String entryName = zipEntry.getName();
FileOutputStream fileOutputStream = null;
try {
fileOutputStream = new FileOutputStream(new File(dir, entryName));
copy(zipInputStream, fileOutputStream);
} finally {
closeQuietly(fileOutputStream);
zipInputStream.closeEntry();
}
zipEntry = zipInputStream.getNextEntry();
}
} finally {
closeQuietly(zipInputStream);
}
}
}
public boolean readArrayData(@Nonnull AtlasComputeService computeService, MAGETABInvestigation investigation, AtlasLoaderServiceListener listener, AtlasLoadCache cache) throws AtlasLoaderException {
final URL sdrfURL = investigation.SDRF.getLocation();
final File sdrfDir = new File(sdrfURL.getFile()).getParentFile();
final HashMap<String, RawData> dataByArrayDesign = new HashMap<String, RawData>();
final HashMap<String, File> zipFiles = new HashMap<String, File>();
try {
// set this variable to false to avoid attempts of load
// CEL files from the same location as IDF/SDRF files;
// ftp link will be used
// set this variable to true to try local files firstly
boolean useLocalCopy = true;
final Collection<ArrayDataNode> dataNodes =
investigation.SDRF.getNodes(ArrayDataNode.class);
if (dataNodes.isEmpty()) {
log.warn("No data nodes for raw data are defined in " + sdrfURL);
// the experiment loading logic will use the processed files instead
return false;
}
listener.setProgress("Loading CEL files");
for (ArrayDataNode node : dataNodes) {
log.info("Found array data matrix node '" + node.getNodeName() + "'");
final Collection<HybridizationNode> hybridizationNodes = GraphUtils.findUpstreamNodes(node, HybridizationNode.class);
final Collection<AssayNode> assayNodes = GraphUtils.findUpstreamNodes(node, AssayNode.class);
if (hybridizationNodes.size() + assayNodes.size() != 1) {
throw new AtlasLoaderException("ArrayDataNode " + node.getNodeName() + " corresponds to " + (hybridizationNodes.size() + assayNodes.size()) + " assays");
}
final HybridizationNode assayNode =
hybridizationNodes.size() == 0 ? assayNodes.iterator().next() : hybridizationNodes.iterator().next();
Assay assay = cache.fetchAssay(assayNode.getNodeName());
if (assay == null) {
throw new AtlasLoaderException("Cannot fetch an assay for node " + assayNode.getNodeName());
}
final Collection<ScanNode> scanNodes = GraphUtils.findUpstreamNodes(node, ScanNode.class);
if (scanNodes.size() > 1) {
throw new AtlasLoaderException("ArrayDataNode " + node.getNodeName() + " corresponds to " + scanNodes.size() + " scans");
}
final ScanNode scanNode = scanNodes.size() == 1 ? scanNodes.iterator().next() : null;
final List<ArrayDesignAttribute> arrayDesigns = assayNode.arrayDesigns;
if (arrayDesigns.size() != 1) {
throw new AtlasLoaderException("Assay node " + assayNode.getNodeName() + " has " + arrayDesigns.size() + " array designs");
}
final String arrayDesignName = arrayDesigns.get(0).getNodeName();
final String dataFileName = node.getNodeName();
final String scanName = scanNode != null ? scanNode.getNodeName() : assayNode.getNodeName();
// TODO: use better way to check this if such way exists
if (!arrayDesignName.toLowerCase().contains("affy")) {
log.warn("Array design " + arrayDesignName + " is not an Affymetrix");
// For non-Affymetrics chip we don't throw and exception but allow the experiment loading logic
// to silently move to using the processed files instead.
return false;
}
if (dataFileName == null || dataFileName.length() == 0) {
continue;
}
RawData adData = dataByArrayDesign.get(arrayDesignName);
if (adData == null) {
adData = new RawData();
dataByArrayDesign.put(arrayDesignName, adData);
}
if (adData.celFiles.get(dataFileName) != null)
throw new AtlasLoaderException("Error processing file: '" + dataFileName + "' - this file is used twice" + USE_PROCCESSED_FILES);
adData.celFiles.put(dataFileName, scanName);
adData.assays.put(dataFileName, assay);
final File tempFile = new File(adData.dataDir, dataFileName);
if (useLocalCopy) {
final File localFile = new File(sdrfDir, dataFileName);
URL localFileURL;
try {
localFileURL = sdrfURL.getPort() == -1
? new URL(sdrfURL.getProtocol(),
sdrfURL.getHost(),
localFile.toString().replaceAll("\\\\", "/"))
: new URL(sdrfURL.getProtocol(),
sdrfURL.getHost(),
sdrfURL.getPort(),
localFile.toString().replaceAll("\\\\", "/"));
copyFile(localFileURL, tempFile);
} catch (IOException e) {
// ignore
}
}
if (!tempFile.exists() && node.comments != null) {
useLocalCopy = false;
final String zipName = DataUtils.fixZipURL(node.comments.get("ArrayExpress FTP file"));
if (zipName != null) {
File localZipFile = zipFiles.get(zipName);
if (localZipFile == null) {
try {
localZipFile = File.createTempFile("atlas-loader", ".zip");
zipFiles.put(zipName, localZipFile);
copyFile(new URL(zipName), localZipFile);
} catch (IOException e) {
if (localZipFile != null && !localZipFile.delete()) {
log.error("Cannot delete " + localZipFile.getAbsolutePath());
}
throw new AtlasLoaderException("Error: '"+e.getMessage()+"' occurred while retrieving raw data files from ArrayExpress ftp site" + USE_PROCCESSED_FILES);
}
}
try {
extractZip(localZipFile, adData.dataDir);
} catch (IOException e) {
throw new AtlasLoaderException("Error: '"+e.getMessage()+"' occurred while retrieving raw data files from ArrayExpress ftp site" + USE_PROCCESSED_FILES);
}
}
}
if (!tempFile.exists()) {
throw new AtlasLoaderException("Error occurred while processing raw data files: File '" + dataFileName + "' is not found" + USE_PROCCESSED_FILES);
}
}
listener.setProgress("Processing data in R");
for (Map.Entry<String, RawData> entry : dataByArrayDesign.entrySet()) {
final DataNormalizer normalizer = new DataNormalizer(entry.getValue());
// this method returns null if computation was finished successfully
// or an instance of "try-error" R class in case of failure
// currently we receive instances of "try-error" as RChar objects
final RObject result = computeService.computeTask(normalizer);
if (result != null) {
throw new AtlasLoaderException(
"Something unexpected happened during R processing; returned " +
(result instanceof RChar
? ((RChar) result).getValue()[0]
: result));
}
try {
final File mergedFile = new File(normalizer.mergedFilePath);
final DataMatrixFileBuffer buffer = cache.getDataMatrixFileBuffer(mergedFile.toURL(), null);
final HashMap<String, Assay> assayMap = entry.getValue().assays;
final ArrayList<String> fileNames = normalizer.fileNames;
for (int i = 0; i < fileNames.size(); ++i) {
final Assay assay = assayMap.get(fileNames.get(i));
cache.setAssayDataMatrixRef(assay, buffer.getStorage(), i);
}
if (!mergedFile.delete()) {
log.warn("Cannot delete" + mergedFile.getAbsolutePath());
}
} catch (MalformedURLException e) {
throw LogUtil.createUnexpected("MalformedURLException is thrown: " + e.getMessage());
}
}
return true;
} finally {
for (RawData data : dataByArrayDesign.values()) {
deleteDirectory(data.dataDir);
}
for (File z : zipFiles.values()) {
if (!z.delete()) {
log.warn("Cannot delete " + z.getAbsolutePath());
}
}
}
}
private static class DataNormalizer implements ComputeTask<RObject> {
private final RawData data;
public final ArrayList<String> fileNames = new ArrayList<String>();
public final String pathPrefix;
public final String mergedFilePath;
public DataNormalizer(RawData data) {
this.data = data;
pathPrefix = data.dataDir.getAbsolutePath() + "/";
mergedFilePath = pathPrefix + "merged.txt";
}
public RObject compute(RServices R) throws RemoteException {
StringBuilder files = new StringBuilder();
StringBuilder scans = new StringBuilder();
files.append("files = c(");
scans.append("scans = c(");
boolean isFirst = true;
for (Map.Entry<String, String> entry : data.celFiles.entrySet()) {
if (isFirst) {
isFirst = false;
} else {
files.append(", ");
scans.append(", ");
}
fileNames.add(entry.getKey());
files.append("'");
files.append(pathPrefix);
files.append(entry.getKey());
files.append("'");
scans.append("'");
scans.append(entry.getValue());
scans.append("'");
}
files.append(")");
scans.append(")");
log.info(files.toString());
log.info(scans.toString());
log.info("outFile = '" + mergedFilePath + "'");
R.sourceFromBuffer(files.toString());
R.sourceFromBuffer(scans.toString());
R.sourceFromBuffer("outFile = '" + mergedFilePath + "'");
R.sourceFromBuffer(RUtil.getRCodeFromResource("R/normalizeOneExperiment.R"));
final RObject result = R.getObject("normalizeOneExperiment(files = files, outFile = outFile, scans = scans, parallel = FALSE)");
R.sourceFromBuffer("rm(outFile)");
R.sourceFromBuffer("rm(scans)");
R.sourceFromBuffer("rm(files)");
R.sourceFromBuffer(RUtil.getRCodeFromResource("R/cleanupNamespace.R"));
return result;
}
}
}
|
Ticket 3334 - Code review comments
|
atlas-loader/src/main/java/uk/ac/ebi/gxa/loader/steps/ArrayDataStep.java
|
Ticket 3334 - Code review comments
|
|
Java
|
apache-2.0
|
756aa035a3fc42241fded1896cfd7bd043a85a85
| 0
|
researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds
|
package won.utils.im.port;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Random;
import org.apache.jena.datatypes.BaseDatatype;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.vocabulary.DC;
import org.apache.jena.vocabulary.RDF;
public class RealEstateNeedGenerator {
static Model model = ModelFactory.createDefaultModel();
static Property won_hasFacet = model.createProperty("http://purl.org/webofneeds/model#hasFacet");
static Property won_is = model.createProperty("http://purl.org/webofneeds/model#is");
static Property won_seeks = model.createProperty("http://purl.org/webofneeds/model#seeks");
static Property won_hasTag = model.createProperty("http://purl.org/webofneeds/model#hasTag");
static Property won_hasLocation = model.createProperty("http://purl.org/webofneeds/model#hasLocation");
static Property won_geoSpatial = model.createProperty("http://purl.org/webofneeds/model#geoSpatial");
static Property won_hasBoundingBox = model.createProperty("http://purl.org/webofneeds/model#hasBoundingBox");
static Property won_hasNorthWestCorner = model
.createProperty("http://purl.org/webofneeds/model#hasNorthWestCorner");
static Property won_hasSouthEastCorner = model
.createProperty("http://purl.org/webofneeds/model#hasSouthEastCorner");
static Property schema_amenityFeature = model.createProperty("http://schema.org/amenityFeature");
static Property schema_floorSize = model.createProperty("http://schema.org/floorSize");
static Property schema_numberOfRooms = model.createProperty("http://schema.org/numberOfRooms");
static Property schema_priceSpecification = model.createProperty("http://schema.org/priceSpecification");
static Property schema_geo = model.createProperty("http://schema.org/geo");
static Property schema_latitude = model.createProperty("http://schema.org/latitude");
static Property schema_longitude = model.createProperty("http://schema.org/longitude");
static Property schema_name = model.createProperty("http://schema.org/name");
static Property schema_description = model.createProperty("http://schema.org/description");
static Property schema_price = model.createProperty("http://schema.org/price");
static Property schema_priceCurrency = model.createProperty("http://schema.org/priceCurrency");
static Property schema_unitCode = model.createProperty("http://schema.org/unitCode");
static Property schema_value = model.createProperty("http://schema.org/value");
static RDFDatatype schema_Text = new BaseDatatype("http://schema.org/Text");
static RDFDatatype bigdata_geoSpatialDatatype = new BaseDatatype("http://www.bigdata.com/rdf/geospatial/literals/v1#lat-lon");
static HashMap<String, String>[] locations = new HashMap[10];
static String[] amenities = { "Balcony", "Parkingspace", "Garden", "Bathtub", "furnished",
"Parquetflooring", "Elevator", "Cellar", "Pool", "Sauna", "accessible" };
public static void main(String[] args) {
initializeLocations();
generateNeeds();
}
private static void generateNeeds() {
File parentFolder = new File("sample_needs");
parentFolder.mkdirs();
Arrays.stream(parentFolder.listFiles()).forEach(f -> f.delete());
final int N = 10000;
Random random = new Random();
for (int i = 0; i < N; i++) {
String rnd = Long.toHexString(random.nextLong());
String needURI = "https://localhost:8443/won/resource/event/" + "real_estate_sample_" + rnd + "#need";
model = ModelFactory.createDefaultModel();
setPrefixes();
Resource need = model.createResource(needURI);
Resource isPart = model.createResource();
Resource seeksPart = model.createResource();
Resource won_Need = model.createResource("http://purl.org/webofneeds/model#Need");
Resource won_OwnerFacet = model.createResource("http://purl.org/webofneeds/model#OwnerFacet");
// method signatures: branch, probability that detail is added, min, max
isPart = addTitle(isPart, 1.0, i);
isPart = addDescription(isPart, 1.0);
isPart = addLocation(isPart, 1.0);
isPart = addAmenities(isPart, 0.8, 1, 4);
isPart = addFloorSize(isPart, 0.8, 28, 250);
isPart = addNumberOfRooms(isPart, 0.8, 1, 9);
isPart = addPriceSpecification(isPart, 1.0, 250, 2200);
isPart.addProperty(won_hasTag, "RentOutRealEstate");
seeksPart.addProperty(won_hasTag, "SearchRealEstateToRent");
need.addProperty(RDF.type, won_Need);
need.addProperty(won_hasFacet, won_OwnerFacet);
need.addProperty(won_is, isPart);
need.addProperty(won_seeks, seeksPart);
try {
FileOutputStream out = new FileOutputStream(new File(parentFolder, "real_estate_need_" + rnd + ".trig"));
model.write(out, "TURTLE");
out.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
System.out.println("generated " + N + " sample needs");
}
private static Resource addTitle(Resource resource, double probability, int counter) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
resource.addProperty(DC.title, "Sample Real Estate Need " + counter);
return resource;
}
private static Resource addDescription(Resource resource, double probability) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
resource.addProperty(DC.description, "This is a sample offer that was automatically generated.");
return resource;
}
private static Resource addLocation(Resource resource, double probability) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
// pick a location
int locNr = (int) (Math.random() * 10);
String nwlat = locations[locNr].get("nwlat");
String nwlng = locations[locNr].get("nwlng");
String selat = locations[locNr].get("selat");
String selng = locations[locNr].get("selng");
String lat = locations[locNr].get("lat");
String lng = locations[locNr].get("lng");
String name = locations[locNr].get("name");
Resource locationResource = model.createResource();
Resource boundingBoxResource = model.createResource();
Resource nwCornerResource = model.createResource();
Resource seCornerResource = model.createResource();
Resource geoResource = model.createResource();
Resource schema_Place = model.createResource("http://schema.org/Place");
Resource schema_GeoCoordinates = model.createResource("http://schema.org/GeoCoordinates");
resource.addProperty(won_hasLocation, locationResource);
locationResource.addProperty(RDF.type, schema_Place);
locationResource.addProperty(schema_name, name);
locationResource.addProperty(schema_geo, geoResource);
geoResource.addProperty(RDF.type, schema_GeoCoordinates);
geoResource.addProperty(schema_latitude, lat);
geoResource.addProperty(schema_longitude, lng);
// add bigdata specific value: "<subj> won:geoSpatial "48.225073#16.358398"^^<http://www.bigdata.com/rdf/geospatial/literals/v1#lat-lon>"
geoResource.addProperty(won_geoSpatial, lat+"#"+lng, bigdata_geoSpatialDatatype);
locationResource.addProperty(won_hasBoundingBox, boundingBoxResource);
boundingBoxResource.addProperty(won_hasNorthWestCorner, nwCornerResource);
nwCornerResource.addProperty(RDF.type, schema_GeoCoordinates);
nwCornerResource.addProperty(schema_latitude, nwlat);
nwCornerResource.addProperty(schema_longitude, nwlng);
boundingBoxResource.addProperty(won_hasSouthEastCorner, seCornerResource);
seCornerResource.addProperty(RDF.type, schema_GeoCoordinates);
seCornerResource.addProperty(schema_latitude, selat);
seCornerResource.addProperty(schema_longitude, selng);
return resource;
}
private static Resource addAmenities(Resource resource, double probability, int min, int max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int numberOfAmenities = (int) (Math.random() * Math.abs(max - min + 1) + min);
Collections.shuffle(Arrays.asList(amenities));
Resource schema_LocationFeatureSpecification = model
.createResource("http://schema.org/LocationFeatureSpecification");
for (int j = 0; j < numberOfAmenities; j++) {
Resource amenityResource = model.createResource();
resource.addProperty(schema_amenityFeature, amenityResource);
amenityResource.addProperty(RDF.type, schema_LocationFeatureSpecification);
amenityResource.addProperty(schema_value, amenities[j], schema_Text);
}
return resource;
}
private static Resource addFloorSize(Resource resource, double probability, int min, int max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int floorSize = (int) (Math.random() * Math.abs(max - min + 1)) + min;
Resource floorSizeResource = model.createResource();
Resource schema_QuantitativeValue = model.createResource("http://schema.org/QuantitativeValue");
resource.addProperty(schema_floorSize, floorSizeResource);
floorSizeResource.addProperty(RDF.type, schema_QuantitativeValue);
floorSizeResource.addProperty(schema_unitCode, "MTK");
floorSizeResource.addProperty(schema_value, Integer.toString(floorSize), XSDDatatype.XSDfloat);
return resource;
}
private static Resource addNumberOfRooms(Resource resource, double probability, int min, int max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int numberOfRooms = (int) (Math.random() * Math.abs(max - min + 1)) + min;
resource.addProperty(schema_numberOfRooms, Integer.toString(numberOfRooms), XSDDatatype.XSDfloat);
return resource;
}
private static Resource addPriceSpecification(Resource resource, double probability, double min, double max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int price = (int) (Math.random() * Math.abs(max - min + 1) + min);
Resource schema_CompoundPriceSpecification = model
.createResource("http://schema.org/CompoundPriceSpecification");
Resource priceSpecificationResource = model.createResource();
resource.addProperty(schema_priceSpecification, priceSpecificationResource);
priceSpecificationResource.addProperty(RDF.type, schema_CompoundPriceSpecification);
priceSpecificationResource.addProperty(schema_description, "total rent per month");
priceSpecificationResource.addProperty(schema_price, Integer.toString(price), XSDDatatype.XSDfloat);
priceSpecificationResource.addProperty(schema_priceCurrency, "EUR");
return resource;
}
private static void initializeLocations() {
HashMap<String, String> loc0 = new HashMap<String, String>();
loc0.put("nwlat", "48.385349");
loc0.put("nwlng", "16.821063");
loc0.put("selat", "48.309745");
loc0.put("selng", "16.729174");
loc0.put("lat", "48.288651");
loc0.put("lng", "16.705195");
loc0.put("name", "Gemeinde Weikendorf, Bezirk Gänserndorf, Lower Austria, 2253, Austria");
locations[0] = loc0;
HashMap<String, String> loc1 = new HashMap<String, String>();
loc1.put("nwlat", "48.213814");
loc1.put("nwlng", "16.340870");
loc1.put("selat", "48.236309");
loc1.put("selng", "16.370149");
loc1.put("lat", "48.225073");
loc1.put("lng", "16.358398");
loc1.put("name", "Alsergrund, Vienna, 1090, Austria");
locations[1] = loc1;
HashMap<String, String> loc2 = new HashMap<String, String>();
loc2.put("nwlat", "48.145908");
loc2.put("nwlng", "14.126198");
loc2.put("selat", "48.465908");
loc2.put("selng", "14.446198");
loc2.put("lat", "48.305908");
loc2.put("lng", "14.286198");
loc2.put("name", "Linz, Upper Austria, 4010, Austria");
locations[2] = loc2;
HashMap<String, String> loc3 = new HashMap<String, String>();
loc3.put("nwlat", "46.910256");
loc3.put("nwlng", "15.278572");
loc3.put("selat", "47.230256");
loc3.put("selng", "15.598572");
loc3.put("lat", "47.070256");
loc3.put("lng", "15.438572");
loc3.put("name", "Graz, Styria, 8011, Austria");
locations[3] = loc3;
HashMap<String, String> loc4 = new HashMap<String, String>();
loc4.put("nwlat", "47.638135");
loc4.put("nwlng", "12.886481");
loc4.put("selat", "47.958135");
loc4.put("selng", "13.206481");
loc4.put("lat", "47.798135");
loc4.put("lng", "13.046481");
loc4.put("name", "Salzburg, 5020, Austria");
locations[4] = loc4;
HashMap<String, String> loc5 = new HashMap<String, String>();
loc5.put("nwlat", "48.164398");
loc5.put("nwlng", "15.582912");
loc5.put("selat", "48.244399");
loc5.put("selng", "15.662912");
loc5.put("lat", "48.204399");
loc5.put("lng", "15.622912");
loc5.put("name", "St. Pölten, Lower Austria, 3102, Austria");
locations[5] = loc5;
HashMap<String, String> loc6 = new HashMap<String, String>();
loc6.put("nwlat", "47.480016");
loc6.put("nwlng", "9.654882");
loc6.put("selat", "47.534581");
loc6.put("selng", "9.807672");
loc6.put("lat", "47.502578");
loc6.put("lng", "9.747292");
loc6.put("name", "Bregenz, Vorarlberg, Austria");
locations[6] = loc6;
HashMap<String, String> loc7 = new HashMap<String, String>();
loc7.put("nwlat", "46.782816");
loc7.put("nwlng", "14.467960");
loc7.put("selat", "46.462816");
loc7.put("selng", "14.147960");
loc7.put("lat", "46.622816");
loc7.put("lng", "14.307960");
loc7.put("name", "Klagenfurt, Klagenfurt am Wörthersee, Carinthia, 9020, Austria");
locations[7] = loc7;
HashMap<String, String> loc8 = new HashMap<String, String>();
loc8.put("nwlat", "47.425430");
loc8.put("nwlng", "11.552769");
loc8.put("selat", "47.105430");
loc8.put("selng", "11.232769");
loc8.put("lat", "47.265430");
loc8.put("lng", "11.392769");
loc8.put("name", "Innsbruck, Tyrol, 6020, Austria");
locations[8] = loc8;
HashMap<String, String> loc9 = new HashMap<String, String>();
loc9.put("nwlat", "48.145711");
loc9.put("nwlng", "16.560306");
loc9.put("selat", "47.951363");
loc9.put("selng", "16.253757");
loc9.put("lat", "47.875098");
loc9.put("lng", "15.866162");
loc9.put("name", "Bezirk Baden, Lower Austria, Austria");
locations[9] = loc9;
}
private static void setPrefixes() {
model.setNsPrefix("conn", "https://localhost:8443/won/resource/connection/");
model.setNsPrefix("need", "https://localhost:8443/won/resource/need/");
model.setNsPrefix("local", "https://localhost:8443/won/resource/");
model.setNsPrefix("event", "https://localhost:8443/won/resource/event/");
model.setNsPrefix("msg", "http://purl.org/webofneeds/message#");
model.setNsPrefix("won", "http://purl.org/webofneeds/model#");
model.setNsPrefix("woncrypt", "http://purl.org/webofneeds/woncrypt#");
model.setNsPrefix("cert", "http://www.w3.org/ns/auth/cert#");
model.setNsPrefix("geo", "http://www.w3.org/2003/01/geo/wgs84_pos#");
model.setNsPrefix("sig", "http://icp.it-risk.iwvi.uni-koblenz.de/ontologies/signature.owl#");
model.setNsPrefix("s", "http://schema.org/");
model.setNsPrefix("sh", "http://www.w3.org/ns/shacl#");
model.setNsPrefix("ldp", "http://www.w3.org/ns/ldp#");
model.setNsPrefix("sioc", "http://rdfs.org/sioc/ns#");
}
}
|
webofneeds/won-utils/won-utils-import/src/main/java/won/utils/im/port/RealEstateNeedGenerator.java
|
package won.utils.im.port;
import java.io.File;
import java.io.FileOutputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Random;
import org.apache.jena.datatypes.BaseDatatype;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.vocabulary.DC;
import org.apache.jena.vocabulary.RDF;
public class RealEstateNeedGenerator {
static Model model = ModelFactory.createDefaultModel();
static Property won_hasFacet = model.createProperty("http://purl.org/webofneeds/model#hasFacet");
static Property won_is = model.createProperty("http://purl.org/webofneeds/model#is");
static Property won_seeks = model.createProperty("http://purl.org/webofneeds/model#seeks");
static Property won_hasTag = model.createProperty("http://purl.org/webofneeds/model#hasTag");
static Property won_hasLocation = model.createProperty("http://purl.org/webofneeds/model#hasLocation");
static Property won_geoSpatial = model.createProperty("http://purl.org/webofneeds/model#geoSpatial");
static Property won_hasBoundingBox = model.createProperty("http://purl.org/webofneeds/model#hasBoundingBox");
static Property won_hasNorthWestCorner = model
.createProperty("http://purl.org/webofneeds/model#hasNorthWestCorner");
static Property won_hasSouthEastCorner = model
.createProperty("http://purl.org/webofneeds/model#hasSouthEastCorner");
static Property schema_amenityFeature = model.createProperty("http://schema.org/amenityFeature");
static Property schema_floorSize = model.createProperty("http://schema.org/floorSize");
static Property schema_numberOfRooms = model.createProperty("http://schema.org/numberOfRooms");
static Property schema_priceSpecification = model.createProperty("http://schema.org/priceSpecification");
static Property schema_geo = model.createProperty("http://schema.org/geo");
static Property schema_latitude = model.createProperty("http://schema.org/latitude");
static Property schema_longitude = model.createProperty("http://schema.org/longitude");
static Property schema_name = model.createProperty("http://schema.org/name");
static Property schema_description = model.createProperty("http://schema.org/description");
static Property schema_price = model.createProperty("http://schema.org/price");
static Property schema_priceCurrency = model.createProperty("http://schema.org/priceCurrency");
static Property schema_unitCode = model.createProperty("http://schema.org/unitCode");
static Property schema_value = model.createProperty("http://schema.org/value");
static RDFDatatype bigdata_geoSpatialDatatype = new BaseDatatype("http://www.bigdata.com/rdf/geospatial/literals/v1#lat-lon");
static HashMap<String, String>[] locations = new HashMap[10];
static String[] amenities = { "Balcony", "Parkingspace", "Garden", "Bathtub", "furnished",
"Parquetflooring", "Elevator", "Cellar", "Pool", "Sauna", "accessible" };
public static void main(String[] args) {
initializeLocations();
generateNeeds();
}
private static void generateNeeds() {
File parentFolder = new File("sample_needs");
parentFolder.mkdirs();
Arrays.stream(parentFolder.listFiles()).forEach(f -> f.delete());
final int N = 10000;
Random random = new Random();
for (int i = 0; i < N; i++) {
String rnd = Long.toHexString(random.nextLong());
String needURI = "https://localhost:8443/won/resource/event/" + "real_estate_sample_" + rnd + "#need";
model = ModelFactory.createDefaultModel();
setPrefixes();
Resource need = model.createResource(needURI);
Resource isPart = model.createResource();
Resource seeksPart = model.createResource();
Resource won_Need = model.createResource("http://purl.org/webofneeds/model#Need");
Resource won_OwnerFacet = model.createResource("http://purl.org/webofneeds/model#OwnerFacet");
// method signatures: branch, probability that detail is added, min, max
isPart = addTitle(isPart, 1.0, i);
isPart = addDescription(isPart, 1.0);
isPart = addLocation(isPart, 1.0);
isPart = addAmenities(isPart, 0.8, 1, 4);
isPart = addFloorSize(isPart, 0.8, 28, 250);
isPart = addNumberOfRooms(isPart, 0.8, 1, 9);
isPart = addPriceSpecification(isPart, 1.0, 250, 2200);
isPart.addProperty(won_hasTag, "RentOutRealEstate");
seeksPart.addProperty(won_hasTag, "SearchRealEstateToRent");
need.addProperty(RDF.type, won_Need);
need.addProperty(won_hasFacet, won_OwnerFacet);
need.addProperty(won_is, isPart);
need.addProperty(won_seeks, seeksPart);
try {
FileOutputStream out = new FileOutputStream(new File(parentFolder, "real_estate_need_" + rnd + ".trig"));
model.write(out, "TURTLE");
out.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
System.out.println("generated " + N + " sample needs");
}
private static Resource addTitle(Resource resource, double probability, int counter) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
resource.addProperty(DC.title, "Sample Real Estate Need " + counter);
return resource;
}
private static Resource addDescription(Resource resource, double probability) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
resource.addProperty(DC.description, "This is a sample offer that was automatically generated.");
return resource;
}
private static Resource addLocation(Resource resource, double probability) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
// pick a location
int locNr = (int) (Math.random() * 10);
String nwlat = locations[locNr].get("nwlat");
String nwlng = locations[locNr].get("nwlng");
String selat = locations[locNr].get("selat");
String selng = locations[locNr].get("selng");
String lat = locations[locNr].get("lat");
String lng = locations[locNr].get("lng");
String name = locations[locNr].get("name");
Resource locationResource = model.createResource();
Resource boundingBoxResource = model.createResource();
Resource nwCornerResource = model.createResource();
Resource seCornerResource = model.createResource();
Resource geoResource = model.createResource();
Resource schema_Place = model.createResource("http://schema.org/Place");
Resource schema_GeoCoordinates = model.createResource("http://schema.org/GeoCoordinates");
resource.addProperty(won_hasLocation, locationResource);
locationResource.addProperty(RDF.type, schema_Place);
locationResource.addProperty(schema_name, name);
locationResource.addProperty(schema_geo, geoResource);
geoResource.addProperty(RDF.type, schema_GeoCoordinates);
geoResource.addProperty(schema_latitude, lat);
geoResource.addProperty(schema_longitude, lng);
// add bigdata specific value: "<subj> won:geoSpatial "48.225073#16.358398"^^<http://www.bigdata.com/rdf/geospatial/literals/v1#lat-lon>"
geoResource.addProperty(won_geoSpatial, lat+"#"+lng, bigdata_geoSpatialDatatype);
locationResource.addProperty(won_hasBoundingBox, boundingBoxResource);
boundingBoxResource.addProperty(won_hasNorthWestCorner, nwCornerResource);
nwCornerResource.addProperty(RDF.type, schema_GeoCoordinates);
nwCornerResource.addProperty(schema_latitude, nwlat);
nwCornerResource.addProperty(schema_longitude, nwlng);
boundingBoxResource.addProperty(won_hasSouthEastCorner, seCornerResource);
seCornerResource.addProperty(RDF.type, schema_GeoCoordinates);
seCornerResource.addProperty(schema_latitude, selat);
seCornerResource.addProperty(schema_longitude, selng);
return resource;
}
private static Resource addAmenities(Resource resource, double probability, int min, int max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int numberOfAmenities = (int) (Math.random() * Math.abs(max - min + 1) + min);
Collections.shuffle(Arrays.asList(amenities));
Resource amenityResource = model.createResource();
Resource schema_LocationFeatureSpecification = model
.createResource("http://schema.org/LocationFeatureSpecification");
resource.addProperty(schema_amenityFeature, amenityResource);
amenityResource.addProperty(RDF.type, schema_LocationFeatureSpecification);
for (int j = 0; j < numberOfAmenities; j++) {
amenityResource.addProperty(schema_name, amenities[j]);
}
return resource;
}
private static Resource addFloorSize(Resource resource, double probability, int min, int max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int floorSize = (int) (Math.random() * Math.abs(max - min + 1)) + min;
Resource floorSizeResource = model.createResource();
Resource schema_QuantitativeValue = model.createResource("http://schema.org/QuantitativeValue");
resource.addProperty(schema_floorSize, floorSizeResource);
floorSizeResource.addProperty(RDF.type, schema_QuantitativeValue);
floorSizeResource.addProperty(schema_unitCode, "MTK");
floorSizeResource.addProperty(schema_value, Integer.toString(floorSize), XSDDatatype.XSDfloat);
return resource;
}
private static Resource addNumberOfRooms(Resource resource, double probability, int min, int max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int numberOfRooms = (int) (Math.random() * Math.abs(max - min + 1)) + min;
resource.addProperty(schema_numberOfRooms, Integer.toString(numberOfRooms), XSDDatatype.XSDfloat);
return resource;
}
private static Resource addPriceSpecification(Resource resource, double probability, double min, double max) {
if (Math.random() < (1.0 - probability)) {
return resource;
}
int price = (int) (Math.random() * Math.abs(max - min + 1) + min);
Resource schema_CompoundPriceSpecification = model
.createResource("http://schema.org/CompoundPriceSpecification");
Resource priceSpecificationResource = model.createResource();
resource.addProperty(schema_priceSpecification, priceSpecificationResource);
priceSpecificationResource.addProperty(RDF.type, schema_CompoundPriceSpecification);
priceSpecificationResource.addProperty(schema_description, "total rent per month");
priceSpecificationResource.addProperty(schema_price, Integer.toString(price), XSDDatatype.XSDfloat);
priceSpecificationResource.addProperty(schema_priceCurrency, "EUR");
return resource;
}
private static void initializeLocations() {
HashMap<String, String> loc0 = new HashMap<String, String>();
loc0.put("nwlat", "48.385349");
loc0.put("nwlng", "16.821063");
loc0.put("selat", "48.309745");
loc0.put("selng", "16.729174");
loc0.put("lat", "48.288651");
loc0.put("lng", "16.705195");
loc0.put("name", "Gemeinde Weikendorf, Bezirk Gänserndorf, Lower Austria, 2253, Austria");
locations[0] = loc0;
HashMap<String, String> loc1 = new HashMap<String, String>();
loc1.put("nwlat", "48.213814");
loc1.put("nwlng", "16.340870");
loc1.put("selat", "48.236309");
loc1.put("selng", "16.370149");
loc1.put("lat", "48.225073");
loc1.put("lng", "16.358398");
loc1.put("name", "Alsergrund, Vienna, 1090, Austria");
locations[1] = loc1;
HashMap<String, String> loc2 = new HashMap<String, String>();
loc2.put("nwlat", "48.145908");
loc2.put("nwlng", "14.126198");
loc2.put("selat", "48.465908");
loc2.put("selng", "14.446198");
loc2.put("lat", "48.305908");
loc2.put("lng", "14.286198");
loc2.put("name", "Linz, Upper Austria, 4010, Austria");
locations[2] = loc2;
HashMap<String, String> loc3 = new HashMap<String, String>();
loc3.put("nwlat", "46.910256");
loc3.put("nwlng", "15.278572");
loc3.put("selat", "47.230256");
loc3.put("selng", "15.598572");
loc3.put("lat", "47.070256");
loc3.put("lng", "15.438572");
loc3.put("name", "Graz, Styria, 8011, Austria");
locations[3] = loc3;
HashMap<String, String> loc4 = new HashMap<String, String>();
loc4.put("nwlat", "47.638135");
loc4.put("nwlng", "12.886481");
loc4.put("selat", "47.958135");
loc4.put("selng", "13.206481");
loc4.put("lat", "47.798135");
loc4.put("lng", "13.046481");
loc4.put("name", "Salzburg, 5020, Austria");
locations[4] = loc4;
HashMap<String, String> loc5 = new HashMap<String, String>();
loc5.put("nwlat", "48.164398");
loc5.put("nwlng", "15.582912");
loc5.put("selat", "48.244399");
loc5.put("selng", "15.662912");
loc5.put("lat", "48.204399");
loc5.put("lng", "15.622912");
loc5.put("name", "St. Pölten, Lower Austria, 3102, Austria");
locations[5] = loc5;
HashMap<String, String> loc6 = new HashMap<String, String>();
loc6.put("nwlat", "47.480016");
loc6.put("nwlng", "9.654882");
loc6.put("selat", "47.534581");
loc6.put("selng", "9.807672");
loc6.put("lat", "47.502578");
loc6.put("lng", "9.747292");
loc6.put("name", "Bregenz, Vorarlberg, Austria");
locations[6] = loc6;
HashMap<String, String> loc7 = new HashMap<String, String>();
loc7.put("nwlat", "46.782816");
loc7.put("nwlng", "14.467960");
loc7.put("selat", "46.462816");
loc7.put("selng", "14.147960");
loc7.put("lat", "46.622816");
loc7.put("lng", "14.307960");
loc7.put("name", "Klagenfurt, Klagenfurt am Wörthersee, Carinthia, 9020, Austria");
locations[7] = loc7;
HashMap<String, String> loc8 = new HashMap<String, String>();
loc8.put("nwlat", "47.425430");
loc8.put("nwlng", "11.552769");
loc8.put("selat", "47.105430");
loc8.put("selng", "11.232769");
loc8.put("lat", "47.265430");
loc8.put("lng", "11.392769");
loc8.put("name", "Innsbruck, Tyrol, 6020, Austria");
locations[8] = loc8;
HashMap<String, String> loc9 = new HashMap<String, String>();
loc9.put("nwlat", "48.145711");
loc9.put("nwlng", "16.560306");
loc9.put("selat", "47.951363");
loc9.put("selng", "16.253757");
loc9.put("lat", "47.875098");
loc9.put("lng", "15.866162");
loc9.put("name", "Bezirk Baden, Lower Austria, Austria");
locations[9] = loc9;
}
private static void setPrefixes() {
model.setNsPrefix("conn", "https://localhost:8443/won/resource/connection/");
model.setNsPrefix("need", "https://localhost:8443/won/resource/need/");
model.setNsPrefix("local", "https://localhost:8443/won/resource/");
model.setNsPrefix("event", "https://localhost:8443/won/resource/event/");
model.setNsPrefix("msg", "http://purl.org/webofneeds/message#");
model.setNsPrefix("won", "http://purl.org/webofneeds/model#");
model.setNsPrefix("woncrypt", "http://purl.org/webofneeds/woncrypt#");
model.setNsPrefix("cert", "http://www.w3.org/ns/auth/cert#");
model.setNsPrefix("geo", "http://www.w3.org/2003/01/geo/wgs84_pos#");
model.setNsPrefix("sig", "http://icp.it-risk.iwvi.uni-koblenz.de/ontologies/signature.owl#");
model.setNsPrefix("s", "http://schema.org/");
model.setNsPrefix("sh", "http://www.w3.org/ns/shacl#");
model.setNsPrefix("ldp", "http://www.w3.org/ns/ldp#");
model.setNsPrefix("sioc", "http://rdfs.org/sioc/ns#");
}
}
|
Fix amenity feature structure
|
webofneeds/won-utils/won-utils-import/src/main/java/won/utils/im/port/RealEstateNeedGenerator.java
|
Fix amenity feature structure
|
|
Java
|
apache-2.0
|
b0fd7fae8e4eb945cbbfb3c1bd6a8a84e61bbd90
| 0
|
anylineorg/anyline,anylineorg/anyline
|
package org.anyline.jdbc.config.db.impl.oracle;
import org.anyline.dao.AnylineDao;
import org.anyline.entity.DataRow;
import org.anyline.entity.DataSet;
import org.anyline.entity.OrderStore;
import org.anyline.entity.PageNavi;
import org.anyline.jdbc.config.db.SQLAdapter;
import org.anyline.jdbc.config.db.impl.BasicSQLAdapter;
import org.anyline.jdbc.config.db.run.RunSQL;
import org.anyline.jdbc.entity.*;
import org.anyline.util.*;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.stereotype.Repository;
import java.lang.reflect.Field;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.*;
@Repository("anyline.jdbc.sql.adapter.oracle")
public class SQLAdapterImpl extends BasicSQLAdapter implements SQLAdapter, InitializingBean {
@Autowired(required = false)
@Qualifier("anyline.dao")
protected AnylineDao dao;
public DB_TYPE type(){
return DB_TYPE.ORACLE;
}
@Value("${anyline.jdbc.delimiter.oracle:}")
private String delimiter;
@Override
public void afterPropertiesSet() throws Exception {
setDelimiter(delimiter);
}
public SQLAdapterImpl(){
delimiterFr = "";
delimiterTo = "";
}
/* *****************************************************************************************************************
*
* DML
*
* *****************************************************************************************************************/
@Override
public String parseFinalQueryTxt(RunSQL run){
StringBuilder builder = new StringBuilder();
String cols = run.getFetchColumns();
PageNavi navi = run.getPageNavi();
String sql = run.getBaseQueryTxt();
OrderStore orders = run.getOrderStore();
int first = 0;
int last = 0;
String order = "";
if(null != orders){
order = orders.getRunText(getDelimiterFr()+getDelimiterTo());
}
if(null != navi){
first = navi.getFirstRow();
last = navi.getLastRow();
}
if(null == navi){
builder.append(sql).append("\n").append(order);
}else{
//分页
builder.append("SELECT "+cols+" FROM( \n");
builder.append("SELECT TAB_I.* ,ROWNUM AS ROW_NUMBER \n");
builder.append("FROM( \n");
builder.append(sql);
builder.append("\n").append(order);
builder.append(") TAB_I \n");
builder.append(") TAB_O WHERE ROW_NUMBER >= "+(first+1)+" AND ROW_NUMBER <= "+(last+1));
}
return builder.toString();
}
@Override
public String concat(String ... args){
return concatOr(args);
}
/**
* 批量插入
* INSERT ALL
* INTO T (ID, NAME) VALUES (1,'N1')
* INTO T (ID, NAME) VALUES (2,'N2')
* INTO T (ID, NAME) VALUES (3,'N3')
* SELECT 1 FROM DUAL
* @param builder builder
* @param dest dest
* @param keys keys
*/
@Override
public void createInsertsTxt(StringBuilder builder, String dest, DataSet set, List<String> keys){
builder.append("INSERT ALL \n");
String head = "INTO " + dest + " (";
int keySize = keys.size();
for(int i=0; i<keySize; i++){
String key = keys.get(i);
head += key;
if(i<keySize-1){
head += ", ";
}
}
head += ") ";
int dataSize = set.size();
for(int i=0; i<dataSize; i++){
DataRow row = set.getRow(i);
if(null == row){
continue;
}
if(row.hasPrimaryKeys() && null != primaryCreater && BasicUtil.isEmpty(row.getPrimaryValue())){
String pk = row.getPrimaryKey();
if(null == pk){
pk = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID");
}
row.put(pk, primaryCreater.createPrimary(this.type(),dest.replace(getDelimiterFr(), "").replace(getDelimiterTo(), ""), pk, null));
}
builder.append(head).append("VALUES ");
insertValue(builder, row, keys);
builder.append(" \n");
}
builder.append("SELECT 1 FROM DUAL");
}
@Override
public void createInsertsTxt(StringBuilder builder, String dest, Collection list, List<String> keys){
if(list instanceof DataSet){
DataSet set = (DataSet) list;
createInsertsTxt(builder, dest, set, keys);
return;
}
builder.append("INSERT ALL \n");
String head = "INTO " + dest + " (";
int keySize = keys.size();
for(int i=0; i<keySize; i++){
String key = keys.get(i);
head += key;
if(i<keySize-1){
head += ", ";
}
}
head += ") ";
for(Object obj:list){
if(obj instanceof DataRow) {
DataRow row = (DataRow)obj;
if (row.hasPrimaryKeys() && null != primaryCreater && BasicUtil.isEmpty(row.getPrimaryValue())) {
String pk = row.getPrimaryKey();
if (null == pk) {
pk = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID");
}
row.put(pk, primaryCreater.createPrimary(type(), dest.replace(getDelimiterFr(), "").replace(getDelimiterTo(), ""), pk, null));
}
builder.append(head).append("VALUES ");
insertValue(builder, row, keys);
}else{
String pk = null;
Object pv = null;
if(AdapterProxy.hasAdapter()){
pk = AdapterProxy.primaryKey(obj.getClass());
pv = AdapterProxy.primaryValue(obj);
AdapterProxy.createPrimaryValue(obj);
}else{
pk = DataRow.DEFAULT_PRIMARY_KEY;
pv = BeanUtil.getFieldValue(obj, pk);
if(null != primaryCreater && null == pv){
pv = primaryCreater.createPrimary(type(),dest.replace(getDelimiterFr(), "").replace(getDelimiterTo(), ""), pk, null);
BeanUtil.setFieldValue(obj, pk, pv);
}
}
builder.append(head).append("VALUES ");
insertValue(builder, obj, keys);
}
builder.append(" \n");
}
builder.append("SELECT 1 FROM DUAL");
}
/* *****************************************************************************************************************
*
* metadata
*
* =================================================================================================================
* table : 表
* master table : 主表
* partition table : 分区有
* column : 列
* tag : 标签
* index : 索引
* constraint : 约束
*
******************************************************************************************************************/
/* *****************************************************************************************************************
* table
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryTableRunSQL(String catalog, String schema, String pattern, String types);
* public LinkedHashMap<String, Table> tables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, DataSet set) throws Exception;
* public LinkedHashMap<String, Table> tables(boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表
* @param catalog catalog
* @param schema schema
* @param pattern pattern
* @param types types
* @return String
*/
@Override
public List<String> buildQueryTableRunSQL(String catalog, String schema, String pattern, String types) throws Exception{
return super.buildQueryTableRunSQL(catalog, schema, pattern, types);
}
@Override
public LinkedHashMap<String, Table> tables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, DataSet set) throws Exception{
return super.tables(index, create, catalog, schema, tables, set);
}
@Override
public LinkedHashMap<String, Table> tables(boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, ResultSet set) throws Exception{
return super.tables(create, catalog, schema, tables, set);
}
/* *****************************************************************************************************************
* master table
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryMasterTableRunSQL(String catalog, String schema, String pattern, String types);
* public LinkedHashMap<String, MasterTable> mtables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, DataSet set) throws Exception;
* public LinkedHashMap<String, MasterTable> mtables(boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询主表
* @param catalog catalog
* @param schema schema
* @param pattern pattern
* @param types types
* @return String
*/
@Override
public List<String> buildQueryMasterTableRunSQL(String catalog, String schema, String pattern, String types) throws Exception{
return super.buildQueryMasterTableRunSQL(catalog, schema, pattern, types);
}
/**
* 从jdbc结果中提取表结构
* ResultSet set = con.getMetaData().getTables()
* @param create 上一步没有查到的,这一步是否需要新创建
* @param catalog catalog
* @param schema schema
* @param set 查询结果
* @return List
*/
@Override
public LinkedHashMap<String, MasterTable> mtables(boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, ResultSet set) throws Exception{
return super.mtables(create, catalog, schema, tables, set);
}
/**
* 从上一步生成的SQL查询结果中 提取表结构
* @param index 第几条SQL
* @param create 上一步没有查到的,这一步是否需要新创建
* @param catalog catalog
* @param schema schema
* @param tables 上一步查询结果
* @param set set
* @return tables
* @throws Exception
*/
@Override
public LinkedHashMap<String, MasterTable> mtables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, DataSet set) throws Exception{
return super.mtables(index, create, catalog, schema, tables, set);
}
/* *****************************************************************************************************************
* partition table
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryPartitionTableRunSQL(String catalog, String schema, String pattern, String types);
* public List<String> buildQueryPartitionTableRunSQL(MasterTable master);
* public LinkedHashMap<String, PartitionTable> ptables(int index, boolean create, MasterTable master, String catalog, String schema, LinkedHashMap<String, PartitionTable> tables, DataSet set) throws Exception;
* public LinkedHashMap<String, PartitionTable> ptables(boolean create, String catalog, MasterTable master, String schema, LinkedHashMap<String, PartitionTable> tables, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询分区
* @param catalog catalog
* @param schema schema
* @param pattern pattern
* @param types types
* @return String
*/
@Override
public List<String> buildQueryPartitionTableRunSQL(String catalog, String schema, String pattern, String types) throws Exception{
return super.buildQueryPartitionTableRunSQL(catalog, schema, pattern, types);
}
@Override
public List<String> buildQueryPartitionTableRunSQL(MasterTable master) throws Exception{
return super.buildQueryPartitionTableRunSQL(master);
}
/**
* 根据查询结果集构造Table
* @param index 第几条SQL 对照 buildQueryMasterTableRunSQL返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param master 主表
* @param catalog catalog
* @param schema schema
* @param tables 上一步查询结果
* @param set set
* @return tables
* @throws Exception
*/
@Override
public LinkedHashMap<String, PartitionTable> ptables(int index, boolean create, MasterTable master, String catalog, String schema, LinkedHashMap<String, PartitionTable> tables, DataSet set) throws Exception{
return super.ptables(index, create, master, catalog, schema, tables, set);
}
/**
* 根据JDBC
* @param create 上一步没有查到的,这一步是否需要新创建
* @param master 主表
* @param catalog catalog
* @param schema schema
* @param tables tables
* @param set set
* @return tables
* @throws Exception
*/
@Override
public LinkedHashMap<String, PartitionTable> ptables(boolean create, String catalog, MasterTable master, String schema, LinkedHashMap<String, PartitionTable> tables, ResultSet set) throws Exception{
return super.ptables(create, catalog, master, schema, tables, set);
}
/* *****************************************************************************************************************
* column
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryColumnRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Column> columns(int index, boolean create, Table table, LinkedHashMap<String, Column> columns, DataSet set) throws Exception;
* public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表上的列
* @param table table
* @return sql
*/
@Override
public List<String> buildQueryColumnRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryColumnRunSQL(table, metadata);
}
/**
*
* @param index 第几条SQL 对照 buildQueryColumnRunSQL返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param columns 上一步查询结果
* @param set set
* @return columns columns
* @throws Exception
*/
@Override
public LinkedHashMap<String, Column> columns(int index, boolean create, Table table, LinkedHashMap<String, Column> columns, DataSet set) throws Exception{
return super.columns(index, create, table, columns, set);
}
@Override
public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, SqlRowSet set) throws Exception{
return super.columns(create, table, columns, set);
}
@Override
public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, ResultSet set) throws Exception{
return super.columns(create, table, columns, set);
}
/* *****************************************************************************************************************
* tag
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryTagRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Tag> tags(int index, boolean create, Table table, LinkedHashMap<String, Tag> tags, DataSet set) throws Exception;
* public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
*
* @param table table
* @param metadata 是否根据metadata | 查询系统表
* @return sqls
*/
@Override
public List<String> buildQueryTagRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryTagRunSQL(table, metadata);
}
/**
* 根据查询结果集构造Tag
* @param index 第几条查询SQL 对照 buildQueryTagRunSQL返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param tags tags
* @param set set
* @return tags tags
* @throws Exception
*/
@Override
public LinkedHashMap<String, Tag> tags(int index, boolean create, Table table, LinkedHashMap<String, Tag> tags, DataSet set) throws Exception{
return super.tags(index, create, table, tags, set);
}
@Override
public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, SqlRowSet set) throws Exception{
return super.tags(create, table, tags, set);
}
@Override
public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, ResultSet set) throws Exception{
return super.tags(create, table, tags, set);
}
/* *****************************************************************************************************************
* index
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryIndexRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Index> indexs(int index, boolean create, Table table, LinkedHashMap<String, Index> indexs, DataSet set) throws Exception;
* public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表上的列
* @param table table
* @param metadata 是否根据metadata | 查询系统表
* @return sql
*/
@Override
public List<String> buildQueryIndexRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryIndexRunSQL(table, metadata);
}
/**
*
* @param index 第几条查询SQL 对照 buildQueryIndexRunSQL 返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param indexs indexs
* @param set set
* @return indexs indexs
* @throws Exception
*/
@Override
public LinkedHashMap<String, Index> indexs(int index, boolean create, Table table, LinkedHashMap<String, Index> indexs, DataSet set) throws Exception{
return super.indexs(index, create, table, indexs, set);
}
@Override
public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, SqlRowSet set) throws Exception{
return super.indexs(create, table, indexs, set);
}
@Override
public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, ResultSet set) throws Exception{
return super.indexs(create, table, indexs, set);
}
/* *****************************************************************************************************************
* constraint
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryConstraintRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Constraint> constraints(int constraint,boolean create, Table table, LinkedHashMap<String, Constraint> constraints, DataSet set) throws Exception;
* public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表上的约束
* @param table table
* @param metadata 是否根据metadata | 查询系统表
* @return sqls
*/
@Override
public List<String> buildQueryConstraintRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryConstraintRunSQL(table, metadata);
}
/**
* 根据查询结果集构造Constraint
* @param index 第几条查询SQL 对照 buildQueryConstraintRunSQL 返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param constraints 上一步查询结果
* @param set set
* @return constraints constraints
* @throws Exception
*/
@Override
public LinkedHashMap<String, Constraint> constraints(int index , boolean create, Table table, LinkedHashMap<String, Constraint> constraints, DataSet set) throws Exception{
return super.constraints(index, create, table, constraints, set);
}
@Override
public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, SqlRowSet set) throws Exception{
return super.constraints(create, table, constraints, set);
}
@Override
public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, ResultSet set) throws Exception{
return super.constraints(create, table, constraints, set);
}
/* *****************************************************************************************************************
*
* DDL
*
* =================================================================================================================
* table : 表
* master table : 主表
* partition table : 分区有
* column : 列
* tag : 标签
* index : 索引
* constraint : 约束
*
******************************************************************************************************************/
/* *****************************************************************************************************************
* table
* -----------------------------------------------------------------------------------------------------------------
* public String buildCreateRunSQL(Table table);
* public String buildAlterRunSQL(Table table);
* public String buildRenameRunSQL(Table table);
* public String buildChangeCommentRunSQL(Table table);
* public String buildDropRunSQL(Table table);
* public StringBuilder checkTableExists(StringBuilder builder, boolean exists)
* public StringBuilder primary(StringBuilder builder, Table table)
* public StringBuilder comment(StringBuilder builder, Table table)
* public StringBuilder name(StringBuilder builder, Table table)
******************************************************************************************************************/
@Override
public String buildCreateRunSQL(Table table) throws Exception{
return super.buildCreateRunSQL(table);
}
@Override
public String buildAlterRunSQL(Table table) throws Exception{
return super.buildAlterRunSQL(table);
}
/**
* 修改表名
* ALTER TABLE A RENAME TO B;
* @param table table
* @return String
*/
@Override
public String buildRenameRunSQL(Table table) throws Exception {
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, table);
builder.append(" RENAME TO ");
name(builder, table.getUpdate());
return builder.toString();
}
/**
* 修改备注
* COMMENT ON TABLE T IS 'ABC';
* @param table table
* @return String
*/
@Override
public String buildChangeCommentRunSQL(Table table) throws Exception{
String comment = table.getComment();
if(BasicUtil.isNotEmpty(comment)) {
StringBuilder builder = new StringBuilder();
builder.append("COMMENT ON TABLE ");
name(builder, table);
builder.append(" IS '").append(comment).append("'");
return builder.toString();
}else{
return null;
}
}
/**
* 删除表
* @param table table
* @return String
*/
@Override
public String buildDropRunSQL(Table table) throws Exception{
return super.buildDropRunSQL(table);
}
@Override
public StringBuilder checkTableExists(StringBuilder builder, boolean exists){
return builder;
}
/**
* 主键
* CONSTRAINT PK_BS_DEV PRIMARY KEY (ID ASC)
* @param builder builder
* @param table table
* @return builder
*/
@Override
public StringBuilder primary(StringBuilder builder, Table table){
List<Column> pks = table.primarys();
if(pks.size()>0){
builder.append(",CONSTRAINT ").append("PK_").append(table.getName()).append(" PRIMARY KEY (");
int idx = 0;
for(Column pk:pks){
if(idx > 0){
builder.append(",");
}
SQLUtil.delimiter(builder, pk.getName(), getDelimiterFr(), getDelimiterTo());
String order = pk.getOrder();
if(null != order){
builder.append(" ").append(order);
}
}
builder.append(")");
}
return builder;
}
/**
* 备注
*
* @param builder builder
* @param table table
* @return builder
*/
@Override
public StringBuilder comment(StringBuilder builder, Table table){
return super.comment(builder, table);
}
/**
* 构造完整表名
* @param builder builder
* @param table table
* @return StringBuilder
*/
@Override
public StringBuilder name(StringBuilder builder, Table table){
return super.name(builder, table);
}
/* *****************************************************************************************************************
* master table
* -----------------------------------------------------------------------------------------------------------------
* public String buildCreateRunSQL(MasterTable table);
* public String buildAlterRunSQL(MasterTable table);
* public String buildDropRunSQL(MasterTable table);
* public String buildRenameRunSQL(MasterTable table);
* public String buildChangeCommentRunSQL(MasterTable table);
******************************************************************************************************************/
/**
* 创建主表
* @param table table
* @return String
*/
@Override
public String buildCreateRunSQL(MasterTable table) throws Exception{
return super.buildCreateRunSQL(table);
}
@Override
public String buildAlterRunSQL(MasterTable table) throws Exception{
return super.buildAlterRunSQL(table);
}
@Override
public String buildDropRunSQL(MasterTable table) throws Exception{
return super.buildDropRunSQL(table);
}
@Override
public String buildRenameRunSQL(MasterTable table) throws Exception{
return super.buildRenameRunSQL(table);
}
@Override
public String buildChangeCommentRunSQL(MasterTable table) throws Exception{
return super.buildChangeCommentRunSQL(table);
}
/* *****************************************************************************************************************
* partition table
* -----------------------------------------------------------------------------------------------------------------
* public String buildCreateRunSQL(PartitionTable table);
* public String buildAlterRunSQL(PartitionTable table);
* public String buildDropRunSQL(PartitionTable table);
* public String buildRenameRunSQL(PartitionTable table);
* public String buildChangeCommentRunSQL(PartitionTable table);
******************************************************************************************************************/
/**
* 创建分区表
* @param table table
* @return String
*/
@Override
public String buildCreateRunSQL(PartitionTable table) throws Exception{
return super.buildCreateRunSQL(table);
}
@Override
public String buildAlterRunSQL(PartitionTable table) throws Exception{
return super.buildAlterRunSQL(table);
}
@Override
public String buildDropRunSQL(PartitionTable table) throws Exception{
return super.buildDropRunSQL(table);
}
@Override
public String buildRenameRunSQL(PartitionTable table) throws Exception{
return super.buildRenameRunSQL(table);
}
@Override
public String buildChangeCommentRunSQL(PartitionTable table) throws Exception{
return super.buildChangeCommentRunSQL(table);
}
/* *****************************************************************************************************************
* column
* -----------------------------------------------------------------------------------------------------------------
* public String alterColumnKeyword()
* public String buildAddRunSQL(Column column)
* public List<String> buildAlterRunSQL(Column column)
* public String buildDropRunSQL(Column column)
* public String buildRenameRunSQL(Column column)
* public List<String> buildChangeTypeRunSQL(Column column)
* public String buildChangeDefaultRunSQL(Column column)
* public String buildChangeNullableRunSQL(Column column)
* public String buildChangeCommentRunSQL(Column column)
* public StringBuilder define(StringBuilder builder, Column column)
* public StringBuilder type(StringBuilder builder, Column column)
* public StringBuilder nullable(StringBuilder builder, Column column)
* public StringBuilder charset(StringBuilder builder, Column column)
* public StringBuilder defaultValue(StringBuilder builder, Column column)
* public StringBuilder increment(StringBuilder builder, Column column)
* public StringBuilder onupdate(StringBuilder builder, Column column)
* public StringBuilder position(StringBuilder builder, Column column)
* public StringBuilder comment(StringBuilder builder, Column column)
******************************************************************************************************************/
@Override
public String alterColumnKeyword(){
return "ALTER";
}
/**
* 添加列
* ALTER TABLE HR_USER ADD UPT_TIME datetime CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param column column
* @return String
*/
@Override
public String buildAddRunSQL(Column column) throws Exception{
column.setCreater(this);
StringBuilder builder = new StringBuilder();
Table table = column.getTable();
builder.append("ALTER TABLE ");
name(builder, table);
//Column update = column.getUpdate();
//if(null == update){
//添加列
builder.append(" ADD ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo()).append(" ");
define(builder, column);
//}
return builder.toString();
}
/**
* 修改列 ALTER TABLE HR_USER CHANGE UPT_TIME UPT_TIME datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param column column
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Column column) throws Exception{
return super.buildAlterRunSQL(column);
}
/**
* 删除列
* ALTER TABLE HR_USER DROP COLUMN NAME;
* @param column column
* @return String
*/
@Override
public String buildDropRunSQL(Column column) throws Exception{
return super.buildDropRunSQL(column);
}
/**
* 修改列名
*
* ALTER TABLE 表名 RENAME COLUMN RENAME 老列名 TO 新列名
* @param column column
* @return String
*/
@Override
public String buildRenameRunSQL(Column column) throws Exception{
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable());
builder.append(" RENAME COLUMN ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
builder.append(" TO ");
SQLUtil.delimiter(builder, column.getNewName(), getDelimiterFr(), getDelimiterTo());
return builder.toString();
}
/**
* 修改数据类型
* 1.ADD NEW COLUMN
* 2.FORMAT VALUE
* 3.MOVE VALUE
* alter table tb modify (name nvarchar2(20))
* @param column column
* @return sql
*/
public List<String> buildChangeTypeRunSQL(Column column) throws Exception{
List<String> sqls = new ArrayList<>();
Column update = column.getUpdate();
String name = column.getName();
String type = column.getTypeName();
if(type.contains("(")){
type = type.substring(0,type.indexOf("("));
}
String uname = update.getName();
String utype = update.getTypeName();
if(uname.endsWith("_TMP_UPDATE_TYPE")){
sqls.add(buildDropRunSQL(update));
}else {
if (utype != null && utype.contains("(")) {
utype = utype.substring(0, utype.indexOf("("));
}
if (!type.equals(utype)) {
String tmp_name = column.getName() + "_TMP_UPDATE_TYPE";
update.setName(tmp_name);
String rename = buildRenameRunSQL(column);
sqls.add(rename);
update.setName(uname);
String add = buildAddRunSQL(update);
sqls.add(add);
StringBuilder builder = new StringBuilder();
builder.append("UPDATE ");
name(builder, column.getTable());
builder.append(" SET ");
SQLUtil.delimiter(builder, uname, getDelimiterFr(), getDelimiterTo());
builder.append(" = ");
SQLUtil.delimiter(builder, tmp_name, getDelimiterFr(), getDelimiterTo());
sqls.add(builder.toString());
column.setName(tmp_name);
String drop = buildDropRunSQL(column);
sqls.add(drop);
column.setName(name);
update.setName(tmp_name);
} else {
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable());
builder.append(" MODIFY(");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo()).append(" ");
type(builder, column.getUpdate());
builder.append(")");
sqls.add(builder.toString());
}
}
//column.setName(name);
return sqls;
}
/**
* 修改默认值
* ALTER TABLE MY_TEST_TABLE MODIFY B DEFAULT 2
* @param column column
* @return String
*/
@Override
public String buildChangeDefaultRunSQL(Column column) throws Exception{
Object def = column.getDefaultValue();
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable()).append(" MODIFY ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
builder.append(" DEFAULT ");
if(null != def){
format(builder, def);
}else{
builder.append("NULL");
}
return builder.toString();
}
/**
* 修改非空限制
* ALTER TABLE T MODIFY C NOT NULL ;
* @param column column
* @return String
*/
@Override
public String buildChangeNullableRunSQL(Column column) throws Exception{
int nullable = column.isNullable();
int uNullable = column.getUpdate().isNullable();
if(nullable != -1 && uNullable != -1){
if(nullable == uNullable){
return null;
}
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable()).append(" MODIFY ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
if(uNullable == 0){
builder.append(" NOT ");
}
builder.append("NULL");
return builder.toString();
}
return null;
}
/**
* 修改备注
* COMMENT ON COLUMN T.ID IS 'ABC'
* @param column column
* @return String
*/
@Override
public String buildChangeCommentRunSQL(Column column) throws Exception{
String comment = column.getComment();
if(BasicUtil.isNotEmpty(comment)) {
StringBuilder builder = new StringBuilder();
builder.append("COMMENT ON COLUMN ");
name(builder, column.getTable()).append(".");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
builder.append(" IS '").append(comment).append("'");
return builder.toString();
}else{
return null;
}
}
/**
* 定义列
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder define(StringBuilder builder, Column column){
return super.define(builder, column);
}
/**
* 数据类型
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder type(StringBuilder builder, Column column){
return super.type(builder, column);
}
/**
* 编码
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder nullable(StringBuilder builder, Column column){
return super.nullable(builder, column);
}
/**
* 编码
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder charset(StringBuilder builder, Column column){
return super.charset(builder, column);
}
/**
* 默认值
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder defaultValue(StringBuilder builder, Column column){
return super.defaultValue(builder, column);
}
/**
* 自增长列
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder increment(StringBuilder builder, Column column){
return super.increment(builder, column);
}
/**
* 更新行事件
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder onupdate(StringBuilder builder, Column column){
return super.onupdate(builder, column);
}
/**
* 位置
*
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder position(StringBuilder builder, Column column){
return super.position(builder, column);
}
/**
* 备注
*
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder comment(StringBuilder builder, Column column){
return super.comment(builder, column);
}
/* *****************************************************************************************************************
* tag
* -----------------------------------------------------------------------------------------------------------------
* public String buildAddRunSQL(Tag tag);
* public List<String> buildAlterRunSQL(Tag tag);
* public String buildDropRunSQL(Tag tag);
* public String buildRenameRunSQL(Tag tag);
* public String buildChangeDefaultRunSQL(Tag tag);
* public String buildChangeNullableRunSQL(Tag tag);
* public String buildChangeCommentRunSQL(Tag tag);
* public List<String> buildChangeTypeRunSQL(Tag tag);
******************************************************************************************************************/
/**
* 添加标签
* ALTER TABLE HR_USER ADD TAG UPT_TIME datetime CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param tag tag
* @return String
*/
@Override
public String buildAddRunSQL(Tag tag) throws Exception{
return super.buildAddRunSQL(tag);
}
/**
* 修改标签 ALTER TABLE HR_USER CHANGE UPT_TIME UPT_TIME datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param tag tag
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Tag tag) throws Exception{
return super.buildAlterRunSQL(tag);
}
/**
* 删除标签
* ALTER TABLE HR_USER DROP TAG NAME;
* @param tag tag
* @return String
*/
@Override
public String buildDropRunSQL(Tag tag) throws Exception{
return super.buildDropRunSQL(tag);
}
/**
* 修改标签名
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildRenameRunSQL(Tag tag) throws Exception{
return super.buildRenameRunSQL(tag);
}
/**
* 修改默认值
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildChangeDefaultRunSQL(Tag tag) throws Exception{
return super.buildChangeDefaultRunSQL(tag);
}
/**
* 修改非空限制
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildChangeNullableRunSQL(Tag tag) throws Exception{
return super.buildChangeNullableRunSQL(tag);
}
/**
* 修改备注
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildChangeCommentRunSQL(Tag tag) throws Exception{
return super.buildChangeCommentRunSQL(tag);
}
/**
* 修改数据类型
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return sql
*/
@Override
public List<String> buildChangeTypeRunSQL(Tag tag) throws Exception{
return super.buildChangeTypeRunSQL(tag);
}
/* *****************************************************************************************************************
* index
* -----------------------------------------------------------------------------------------------------------------
* public String buildAddRunSQL(Index index) throws Exception
* public List<String> buildAlterRunSQL(Index index) throws Exception
* public String buildDropRunSQL(Index index) throws Exception
* public String buildRenameRunSQL(Index index) throws Exception
******************************************************************************************************************/
/**
* 添加索引
* @param index index
* @return String
*/
@Override
public String buildAddRunSQL(Index index) throws Exception{
return super.buildAddRunSQL(index);
}
/**
* 修改索引
* 有可能生成多条SQL
* @param index index
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Index index) throws Exception{
return super.buildAlterRunSQL(index);
}
/**
* 删除索引
* @param index index
* @return String
*/
@Override
public String buildDropRunSQL(Index index) throws Exception{
return super.buildDropRunSQL(index);
}
/**
* 修改索引名
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param index index
* @return String
*/
@Override
public String buildRenameRunSQL(Index index) throws Exception{
return super.buildRenameRunSQL(index);
}
/* *****************************************************************************************************************
* constraint
* -----------------------------------------------------------------------------------------------------------------
* public String buildAddRunSQL(Constraint constraint) throws Exception
* public List<String> buildAlterRunSQL(Constraint constraint) throws Exception
* public String buildDropRunSQL(Constraint constraint) throws Exception
* public String buildRenameRunSQL(Constraint index) throws Exception
******************************************************************************************************************/
/**
* 添加索引
* @param constraint constraint
* @return String
*/
@Override
public String buildAddRunSQL(Constraint constraint) throws Exception{
return super.buildAddRunSQL(constraint);
}
/**
* 修改索引
* 有可能生成多条SQL
* @param constraint constraint
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Constraint constraint) throws Exception{
return super.buildAlterRunSQL(constraint);
}
/**
* 删除索引
* @param constraint constraint
* @return String
*/
@Override
public String buildDropRunSQL(Constraint constraint) throws Exception{
return super.buildDropRunSQL(constraint);
}
/**
* 修改索引名
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param index index
* @return String
*/
@Override
public String buildRenameRunSQL(Constraint index) throws Exception{
return super.buildRenameRunSQL(index);
}
/* *****************************************************************************************************************
*
* common
*------------------------------------------------------------------------------------------------------------------
* public boolean isBooleanColumn(Column column)
* public boolean isNumberColumn(Column column)
* public boolean isCharColumn(Column column)
* public String buildInValue(SQL_BUILD_IN_VALUE value)
* public String type2type(String type)
* public String type2class(String type)
* public void value(StringBuilder builder, Object obj, String key)
******************************************************************************************************************/
@Override
public boolean isBooleanColumn(Column column) {
return super.isBooleanColumn(column);
}
/**
* 是否同数字
* @param column column
* @return boolean
*/
@Override
public boolean isNumberColumn(Column column){
return super.isNumberColumn(column);
}
@Override
public boolean isCharColumn(Column column) {
return super.isCharColumn(column);
}
/**
* 内置函数
* @param value SQL_BUILD_IN_VALUE
* @return String
*/
public String buildInValue(SQL_BUILD_IN_VALUE value){
if(value == SQL_BUILD_IN_VALUE.CURRENT_TIME){
return "sysdate";
}
return null;
}
@Override
public String type2type(String type){
if(null != type){
type = type.toUpperCase();
if("DATETIME".equals(type)){
return "TIMESTAMP";
}
if("DOUBLE".equals(type)){
return "DECIMAL";
}
}
return super.type2type(type);
}
@Override
public String type2class(String type){
return super.type2class(type);
}
@Override
public void value(StringBuilder builder, Object obj, String key){
Object value = null;
if(obj instanceof DataRow){
value = ((DataRow)obj).get(key);
}else if(AdapterProxy.hasAdapter()){
Field field = AdapterProxy.field(obj.getClass(), key);
value = BeanUtil.getFieldValue(obj, field);
}else{
value = BeanUtil.getFieldValue(obj, key);
}
if(null == value || "NULL".equals(value)){
builder.append("null");
}else if(value instanceof String){
format(builder, value);
}else if(value instanceof Timestamp
|| value instanceof java.util.Date
|| value instanceof java.sql.Date
|| value instanceof LocalDate
|| value instanceof LocalTime
|| value instanceof LocalDateTime
){
Date date = DateUtil.parse(value);
builder.append("TO_DATE('").append(DateUtil.format(date,DateUtil.FORMAT_DATE_TIME)).append("','yyyy-mm-dd hh24:mi:ss')");
}else if(value instanceof Date){
builder.append("TO_DATE('").append(DateUtil.format((Date)value,DateUtil.FORMAT_DATE_TIME)).append("','yyyy-mm-dd hh24:mi:ss')");
}else if(value instanceof Number || value instanceof Boolean){
builder.append(value);
}else{
builder.append(value);
}
}
}
|
anyline-jdbc-dialect/anyline-jdbc-oracle/src/main/java/org/anyline/jdbc/config/db/impl/oracle/SQLAdapterImpl.java
|
package org.anyline.jdbc.config.db.impl.oracle;
import org.anyline.dao.AnylineDao;
import org.anyline.entity.DataRow;
import org.anyline.entity.DataSet;
import org.anyline.entity.OrderStore;
import org.anyline.entity.PageNavi;
import org.anyline.jdbc.config.db.SQLAdapter;
import org.anyline.jdbc.config.db.impl.BasicSQLAdapter;
import org.anyline.jdbc.config.db.run.RunSQL;
import org.anyline.jdbc.entity.*;
import org.anyline.util.*;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.stereotype.Repository;
import java.lang.reflect.Field;
import java.sql.ResultSet;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.*;
@Repository("anyline.jdbc.sql.adapter.oracle")
public class SQLAdapterImpl extends BasicSQLAdapter implements SQLAdapter, InitializingBean {
@Autowired(required = false)
@Qualifier("anyline.dao")
protected AnylineDao dao;
public DB_TYPE type(){
return DB_TYPE.ORACLE;
}
@Value("${anyline.jdbc.delimiter.oracle:}")
private String delimiter;
@Override
public void afterPropertiesSet() throws Exception {
setDelimiter(delimiter);
}
public SQLAdapterImpl(){
delimiterFr = "";
delimiterTo = "";
}
/* *****************************************************************************************************************
*
* DML
*
* *****************************************************************************************************************/
@Override
public String parseFinalQueryTxt(RunSQL run){
StringBuilder builder = new StringBuilder();
String cols = run.getFetchColumns();
PageNavi navi = run.getPageNavi();
String sql = run.getBaseQueryTxt();
OrderStore orders = run.getOrderStore();
int first = 0;
int last = 0;
String order = "";
if(null != orders){
order = orders.getRunText(getDelimiterFr()+getDelimiterTo());
}
if(null != navi){
first = navi.getFirstRow();
last = navi.getLastRow();
}
if(null == navi){
builder.append(sql).append("\n").append(order);
}else{
//分页
builder.append("SELECT "+cols+" FROM( \n");
builder.append("SELECT TAB_I.* ,ROWNUM AS ROW_NUMBER \n");
builder.append("FROM( \n");
builder.append(sql);
builder.append("\n").append(order);
builder.append(") TAB_I \n");
builder.append(") TAB_O WHERE ROW_NUMBER >= "+(first+1)+" AND ROW_NUMBER <= "+(last+1));
}
return builder.toString();
}
@Override
public String concat(String ... args){
return concatOr(args);
}
/**
* 批量插入
* INSERT ALL
* INTO T (ID, NAME) VALUES (1,'N1')
* INTO T (ID, NAME) VALUES (2,'N2')
* INTO T (ID, NAME) VALUES (3,'N3')
* SELECT 1 FROM DUAL
* @param builder builder
* @param dest dest
* @param keys keys
*/
@Override
public void createInsertsTxt(StringBuilder builder, String dest, DataSet set, List<String> keys){
builder.append("INSERT ALL \n");
String head = "INTO " + dest + " (";
int keySize = keys.size();
for(int i=0; i<keySize; i++){
String key = keys.get(i);
head += key;
if(i<keySize-1){
head += ", ";
}
}
head += ") ";
int dataSize = set.size();
for(int i=0; i<dataSize; i++){
DataRow row = set.getRow(i);
if(null == row){
continue;
}
if(row.hasPrimaryKeys() && null != primaryCreater && BasicUtil.isEmpty(row.getPrimaryValue())){
String pk = row.getPrimaryKey();
if(null == pk){
pk = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID");
}
row.put(pk, primaryCreater.createPrimary(this.type(),dest.replace(getDelimiterFr(), "").replace(getDelimiterTo(), ""), pk, null));
}
builder.append(head).append("VALUES ");
insertValue(builder, row, keys);
builder.append(" \n");
}
builder.append("SELECT 1 FROM DUAL");
}
@Override
public void createInsertsTxt(StringBuilder builder, String dest, Collection list, List<String> keys){
if(list instanceof DataSet){
DataSet set = (DataSet) list;
createInsertsTxt(builder, dest, set, keys);
return;
}
builder.append("INSERT ALL \n");
String head = "INTO " + dest + " (";
int keySize = keys.size();
for(int i=0; i<keySize; i++){
String key = keys.get(i);
head += key;
if(i<keySize-1){
head += ", ";
}
}
head += ") ";
for(Object obj:list){
if(obj instanceof DataRow) {
DataRow row = (DataRow)obj;
if (row.hasPrimaryKeys() && null != primaryCreater && BasicUtil.isEmpty(row.getPrimaryValue())) {
String pk = row.getPrimaryKey();
if (null == pk) {
pk = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID");
}
row.put(pk, primaryCreater.createPrimary(type(), dest.replace(getDelimiterFr(), "").replace(getDelimiterTo(), ""), pk, null));
}
builder.append(head).append("VALUES ");
insertValue(builder, row, keys);
}else{
String pk = null;
Object pv = null;
if(AdapterProxy.hasAdapter()){
pk = AdapterProxy.primaryKey(obj.getClass());
pv = AdapterProxy.primaryValue(obj);
AdapterProxy.createPrimaryValue(obj);
}else{
pk = DataRow.DEFAULT_PRIMARY_KEY;
pv = BeanUtil.getFieldValue(obj, pk);
if(null != primaryCreater && null == pv){
pv = primaryCreater.createPrimary(type(),dest.replace(getDelimiterFr(), "").replace(getDelimiterTo(), ""), pk, null);
BeanUtil.setFieldValue(obj, pk, pv);
}
}
builder.append(head).append("VALUES ");
insertValue(builder, obj, keys);
}
builder.append(" \n");
}
builder.append("SELECT 1 FROM DUAL");
}
/* *****************************************************************************************************************
*
* metadata
*
* =================================================================================================================
* table : 表
* master table : 主表
* partition table : 分区有
* column : 列
* tag : 标签
* index : 索引
* constraint : 约束
*
******************************************************************************************************************/
/* *****************************************************************************************************************
* table
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryTableRunSQL(String catalog, String schema, String pattern, String types);
* public LinkedHashMap<String, Table> tables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, DataSet set) throws Exception;
* public LinkedHashMap<String, Table> tables(boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表
* @param catalog catalog
* @param schema schema
* @param pattern pattern
* @param types types
* @return String
*/
@Override
public List<String> buildQueryTableRunSQL(String catalog, String schema, String pattern, String types) throws Exception{
return super.buildQueryTableRunSQL(catalog, schema, pattern, types);
}
@Override
public LinkedHashMap<String, Table> tables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, DataSet set) throws Exception{
return super.tables(index, create, catalog, schema, tables, set);
}
@Override
public LinkedHashMap<String, Table> tables(boolean create, String catalog, String schema, LinkedHashMap<String, Table> tables, ResultSet set) throws Exception{
return super.tables(create, catalog, schema, tables, set);
}
/* *****************************************************************************************************************
* master table
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryMasterTableRunSQL(String catalog, String schema, String pattern, String types);
* public LinkedHashMap<String, MasterTable> mtables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, DataSet set) throws Exception;
* public LinkedHashMap<String, MasterTable> mtables(boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询主表
* @param catalog catalog
* @param schema schema
* @param pattern pattern
* @param types types
* @return String
*/
@Override
public List<String> buildQueryMasterTableRunSQL(String catalog, String schema, String pattern, String types) throws Exception{
return super.buildQueryMasterTableRunSQL(catalog, schema, pattern, types);
}
/**
* 从jdbc结果中提取表结构
* ResultSet set = con.getMetaData().getTables()
* @param create 上一步没有查到的,这一步是否需要新创建
* @param catalog catalog
* @param schema schema
* @param set 查询结果
* @return List
*/
@Override
public LinkedHashMap<String, MasterTable> mtables(boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, ResultSet set) throws Exception{
return super.mtables(create, catalog, schema, tables, set);
}
/**
* 从上一步生成的SQL查询结果中 提取表结构
* @param index 第几条SQL
* @param create 上一步没有查到的,这一步是否需要新创建
* @param catalog catalog
* @param schema schema
* @param tables 上一步查询结果
* @param set set
* @return tables
* @throws Exception
*/
@Override
public LinkedHashMap<String, MasterTable> mtables(int index, boolean create, String catalog, String schema, LinkedHashMap<String, MasterTable> tables, DataSet set) throws Exception{
return super.mtables(index, create, catalog, schema, tables, set);
}
/* *****************************************************************************************************************
* partition table
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryPartitionTableRunSQL(String catalog, String schema, String pattern, String types);
* public List<String> buildQueryPartitionTableRunSQL(MasterTable master);
* public LinkedHashMap<String, PartitionTable> ptables(int index, boolean create, MasterTable master, String catalog, String schema, LinkedHashMap<String, PartitionTable> tables, DataSet set) throws Exception;
* public LinkedHashMap<String, PartitionTable> ptables(boolean create, String catalog, MasterTable master, String schema, LinkedHashMap<String, PartitionTable> tables, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询分区
* @param catalog catalog
* @param schema schema
* @param pattern pattern
* @param types types
* @return String
*/
@Override
public List<String> buildQueryPartitionTableRunSQL(String catalog, String schema, String pattern, String types) throws Exception{
return super.buildQueryPartitionTableRunSQL(catalog, schema, pattern, types);
}
@Override
public List<String> buildQueryPartitionTableRunSQL(MasterTable master) throws Exception{
return super.buildQueryPartitionTableRunSQL(master);
}
/**
* 根据查询结果集构造Table
* @param index 第几条SQL 对照 buildQueryMasterTableRunSQL返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param master 主表
* @param catalog catalog
* @param schema schema
* @param tables 上一步查询结果
* @param set set
* @return tables
* @throws Exception
*/
@Override
public LinkedHashMap<String, PartitionTable> ptables(int index, boolean create, MasterTable master, String catalog, String schema, LinkedHashMap<String, PartitionTable> tables, DataSet set) throws Exception{
return super.ptables(index, create, master, catalog, schema, tables, set);
}
/**
* 根据JDBC
* @param create 上一步没有查到的,这一步是否需要新创建
* @param master 主表
* @param catalog catalog
* @param schema schema
* @param tables tables
* @param set set
* @return tables
* @throws Exception
*/
@Override
public LinkedHashMap<String, PartitionTable> ptables(boolean create, String catalog, MasterTable master, String schema, LinkedHashMap<String, PartitionTable> tables, ResultSet set) throws Exception{
return super.ptables(create, catalog, master, schema, tables, set);
}
/* *****************************************************************************************************************
* column
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryColumnRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Column> columns(int index, boolean create, Table table, LinkedHashMap<String, Column> columns, DataSet set) throws Exception;
* public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表上的列
* @param table table
* @return sql
*/
@Override
public List<String> buildQueryColumnRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryColumnRunSQL(table, metadata);
}
/**
*
* @param index 第几条SQL 对照 buildQueryColumnRunSQL返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param columns 上一步查询结果
* @param set set
* @return columns columns
* @throws Exception
*/
@Override
public LinkedHashMap<String, Column> columns(int index, boolean create, Table table, LinkedHashMap<String, Column> columns, DataSet set) throws Exception{
return super.columns(index, create, table, columns, set);
}
@Override
public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, SqlRowSet set) throws Exception{
return super.columns(create, table, columns, set);
}
@Override
public LinkedHashMap<String, Column> columns(boolean create, Table table, LinkedHashMap<String, Column> columns, ResultSet set) throws Exception{
return super.columns(create, table, columns, set);
}
/* *****************************************************************************************************************
* tag
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryTagRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Tag> tags(int index, boolean create, Table table, LinkedHashMap<String, Tag> tags, DataSet set) throws Exception;
* public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
*
* @param table table
* @param metadata 是否根据metadata | 查询系统表
* @return sqls
*/
@Override
public List<String> buildQueryTagRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryTagRunSQL(table, metadata);
}
/**
* 根据查询结果集构造Tag
* @param index 第几条查询SQL 对照 buildQueryTagRunSQL返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param tags tags
* @param set set
* @return tags tags
* @throws Exception
*/
@Override
public LinkedHashMap<String, Tag> tags(int index, boolean create, Table table, LinkedHashMap<String, Tag> tags, DataSet set) throws Exception{
return super.tags(index, create, table, tags, set);
}
@Override
public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, SqlRowSet set) throws Exception{
return super.tags(create, table, tags, set);
}
@Override
public LinkedHashMap<String, Tag> tags(boolean create, Table table, LinkedHashMap<String, Tag> tags, ResultSet set) throws Exception{
return super.tags(create, table, tags, set);
}
/* *****************************************************************************************************************
* index
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryIndexRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Index> indexs(int index, boolean create, Table table, LinkedHashMap<String, Index> indexs, DataSet set) throws Exception;
* public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表上的列
* @param table table
* @param metadata 是否根据metadata | 查询系统表
* @return sql
*/
@Override
public List<String> buildQueryIndexRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryIndexRunSQL(table, metadata);
}
/**
*
* @param index 第几条查询SQL 对照 buildQueryIndexRunSQL 返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param indexs indexs
* @param set set
* @return indexs indexs
* @throws Exception
*/
@Override
public LinkedHashMap<String, Index> indexs(int index, boolean create, Table table, LinkedHashMap<String, Index> indexs, DataSet set) throws Exception{
return super.indexs(index, create, table, indexs, set);
}
@Override
public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, SqlRowSet set) throws Exception{
return super.indexs(create, table, indexs, set);
}
@Override
public LinkedHashMap<String, Index> indexs(boolean create, Table table, LinkedHashMap<String, Index> indexs, ResultSet set) throws Exception{
return super.indexs(create, table, indexs, set);
}
/* *****************************************************************************************************************
* constraint
* -----------------------------------------------------------------------------------------------------------------
* public List<String> buildQueryConstraintRunSQL(Table table, boolean metadata);
* public LinkedHashMap<String, Constraint> constraints(int constraint,boolean create, Table table, LinkedHashMap<String, Constraint> constraints, DataSet set) throws Exception;
* public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, SqlRowSet set) throws Exception;
* public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, ResultSet set) throws Exception;
******************************************************************************************************************/
/**
* 查询表上的约束
* @param table table
* @param metadata 是否根据metadata | 查询系统表
* @return sqls
*/
@Override
public List<String> buildQueryConstraintRunSQL(Table table, boolean metadata) throws Exception{
return super.buildQueryConstraintRunSQL(table, metadata);
}
/**
* 根据查询结果集构造Constraint
* @param index 第几条查询SQL 对照 buildQueryConstraintRunSQL 返回顺序
* @param create 上一步没有查到的,这一步是否需要新创建
* @param table table
* @param constraints 上一步查询结果
* @param set set
* @return constraints constraints
* @throws Exception
*/
@Override
public LinkedHashMap<String, Constraint> constraints(int index , boolean create, Table table, LinkedHashMap<String, Constraint> constraints, DataSet set) throws Exception{
return super.constraints(index, create, table, constraints, set);
}
@Override
public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, SqlRowSet set) throws Exception{
return super.constraints(create, table, constraints, set);
}
@Override
public LinkedHashMap<String, Constraint> constraints(boolean create, Table table, LinkedHashMap<String, Constraint> constraints, ResultSet set) throws Exception{
return super.constraints(create, table, constraints, set);
}
/* *****************************************************************************************************************
*
* DDL
*
* =================================================================================================================
* table : 表
* master table : 主表
* partition table : 分区有
* column : 列
* tag : 标签
* index : 索引
* constraint : 约束
*
******************************************************************************************************************/
/* *****************************************************************************************************************
* table
* -----------------------------------------------------------------------------------------------------------------
* public String buildCreateRunSQL(Table table);
* public String buildAlterRunSQL(Table table);
* public String buildRenameRunSQL(Table table);
* public String buildChangeCommentRunSQL(Table table);
* public String buildDropRunSQL(Table table);
* public StringBuilder checkTableExists(StringBuilder builder, boolean exists)
* public StringBuilder primary(StringBuilder builder, Table table)
* public StringBuilder comment(StringBuilder builder, Table table)
* public StringBuilder name(StringBuilder builder, Table table)
******************************************************************************************************************/
@Override
public String buildCreateRunSQL(Table table) throws Exception{
return super.buildCreateRunSQL(table);
}
@Override
public String buildAlterRunSQL(Table table) throws Exception{
return super.buildAlterRunSQL(table);
}
/**
* 修改表名
* ALTER TABLE A RENAME TO B;
* @param table table
* @return String
*/
@Override
public String buildRenameRunSQL(Table table) throws Exception {
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, table);
builder.append(" RENAME TO ");
name(builder, table.getUpdate());
return builder.toString();
}
/**
* 修改备注
* COMMENT ON TABLE T IS 'ABC';
* @param table table
* @return String
*/
@Override
public String buildChangeCommentRunSQL(Table table) throws Exception{
String comment = table.getComment();
if(BasicUtil.isNotEmpty(comment)) {
StringBuilder builder = new StringBuilder();
builder.append("COMMENT ON TABLE ");
name(builder, table);
builder.append(" IS '").append(comment).append("'");
return builder.toString();
}else{
return null;
}
}
/**
* 删除表
* @param table table
* @return String
*/
@Override
public String buildDropRunSQL(Table table) throws Exception{
return super.buildDropRunSQL(table);
}
@Override
public StringBuilder checkTableExists(StringBuilder builder, boolean exists){
return builder;
}
/**
* 主键
* CONSTRAINT PK_BS_DEV PRIMARY KEY (ID ASC)
* @param builder builder
* @param table table
* @return builder
*/
@Override
public StringBuilder primary(StringBuilder builder, Table table){
List<Column> pks = table.primarys();
if(pks.size()>0){
builder.append(",CONSTRAINT ").append("PK_").append(table.getName()).append(" PRIMARY KEY (");
int idx = 0;
for(Column pk:pks){
if(idx > 0){
builder.append(",");
}
SQLUtil.delimiter(builder, pk.getName(), getDelimiterFr(), getDelimiterTo());
String order = pk.getOrder();
if(null != order){
builder.append(" ").append(order);
}
}
builder.append(")");
}
return builder;
}
/**
* 备注
*
* @param builder builder
* @param table table
* @return builder
*/
@Override
public StringBuilder comment(StringBuilder builder, Table table){
return super.comment(builder, table);
}
/**
* 构造完整表名
* @param builder builder
* @param table table
* @return StringBuilder
*/
@Override
public StringBuilder name(StringBuilder builder, Table table){
return super.name(builder, table);
}
/* *****************************************************************************************************************
* master table
* -----------------------------------------------------------------------------------------------------------------
* public String buildCreateRunSQL(MasterTable table);
* public String buildAlterRunSQL(MasterTable table);
* public String buildDropRunSQL(MasterTable table);
* public String buildRenameRunSQL(MasterTable table);
* public String buildChangeCommentRunSQL(MasterTable table);
******************************************************************************************************************/
/**
* 创建主表
* @param table table
* @return String
*/
@Override
public String buildCreateRunSQL(MasterTable table) throws Exception{
return super.buildCreateRunSQL(table);
}
@Override
public String buildAlterRunSQL(MasterTable table) throws Exception{
return super.buildAlterRunSQL(table);
}
@Override
public String buildDropRunSQL(MasterTable table) throws Exception{
return super.buildDropRunSQL(table);
}
@Override
public String buildRenameRunSQL(MasterTable table) throws Exception{
return super.buildRenameRunSQL(table);
}
@Override
public String buildChangeCommentRunSQL(MasterTable table) throws Exception{
return super.buildChangeCommentRunSQL(table);
}
/* *****************************************************************************************************************
* partition table
* -----------------------------------------------------------------------------------------------------------------
* public String buildCreateRunSQL(PartitionTable table);
* public String buildAlterRunSQL(PartitionTable table);
* public String buildDropRunSQL(PartitionTable table);
* public String buildRenameRunSQL(PartitionTable table);
* public String buildChangeCommentRunSQL(PartitionTable table);
******************************************************************************************************************/
/**
* 创建分区表
* @param table table
* @return String
*/
@Override
public String buildCreateRunSQL(PartitionTable table) throws Exception{
return super.buildCreateRunSQL(table);
}
@Override
public String buildAlterRunSQL(PartitionTable table) throws Exception{
return super.buildAlterRunSQL(table);
}
@Override
public String buildDropRunSQL(PartitionTable table) throws Exception{
return super.buildDropRunSQL(table);
}
@Override
public String buildRenameRunSQL(PartitionTable table) throws Exception{
return super.buildRenameRunSQL(table);
}
@Override
public String buildChangeCommentRunSQL(PartitionTable table) throws Exception{
return super.buildChangeCommentRunSQL(table);
}
/* *****************************************************************************************************************
* column
* -----------------------------------------------------------------------------------------------------------------
* public String alterColumnKeyword()
* public String buildAddRunSQL(Column column)
* public List<String> buildAlterRunSQL(Column column)
* public String buildDropRunSQL(Column column)
* public String buildRenameRunSQL(Column column)
* public List<String> buildChangeTypeRunSQL(Column column)
* public String buildChangeDefaultRunSQL(Column column)
* public String buildChangeNullableRunSQL(Column column)
* public String buildChangeCommentRunSQL(Column column)
* public StringBuilder define(StringBuilder builder, Column column)
* public StringBuilder type(StringBuilder builder, Column column)
* public StringBuilder nullable(StringBuilder builder, Column column)
* public StringBuilder charset(StringBuilder builder, Column column)
* public StringBuilder defaultValue(StringBuilder builder, Column column)
* public StringBuilder increment(StringBuilder builder, Column column)
* public StringBuilder onupdate(StringBuilder builder, Column column)
* public StringBuilder position(StringBuilder builder, Column column)
* public StringBuilder comment(StringBuilder builder, Column column)
******************************************************************************************************************/
@Override
public String alterColumnKeyword(){
return "ALTER";
}
/**
* 添加列
* ALTER TABLE HR_USER ADD UPT_TIME datetime CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param column column
* @return String
*/
@Override
public String buildAddRunSQL(Column column) throws Exception{
column.setCreater(this);
StringBuilder builder = new StringBuilder();
Table table = column.getTable();
builder.append("ALTER TABLE ");
name(builder, table);
//Column update = column.getUpdate();
//if(null == update){
//添加列
builder.append(" ADD ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo()).append(" ");
define(builder, column);
//}
return builder.toString();
}
/**
* 修改列 ALTER TABLE HR_USER CHANGE UPT_TIME UPT_TIME datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param column column
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Column column) throws Exception{
return super.buildAlterRunSQL(column);
}
/**
* 删除列
* ALTER TABLE HR_USER DROP COLUMN NAME;
* @param column column
* @return String
*/
@Override
public String buildDropRunSQL(Column column) throws Exception{
return super.buildDropRunSQL(column);
}
/**
* 修改列名
*
* ALTER TABLE 表名 RENAME COLUMN RENAME 老列名 TO 新列名
* @param column column
* @return String
*/
@Override
public String buildRenameRunSQL(Column column) throws Exception{
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable());
builder.append(" RENAME COLUMN ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
builder.append(" TO ");
SQLUtil.delimiter(builder, column.getNewName(), getDelimiterFr(), getDelimiterTo());
return builder.toString();
}
/**
* 修改数据类型
* 1.ADD NEW COLUMN
* 2.FORMAT VALUE
* 3.MOVE VALUE
* alter table tb modify (name nvarchar2(20))
* @param column column
* @return sql
*/
public List<String> buildChangeTypeRunSQL(Column column) throws Exception{
List<String> sqls = new ArrayList<>();
Column update = column.getUpdate();
String name = column.getName();
String type = column.getTypeName();
if(type.contains("(")){
type = type.substring(0,type.indexOf("("));
}
String uname = update.getName();
String utype = update.getTypeName();
if(uname.endsWith("_TMP_UPDATE_TYPE")){
sqls.add(buildDropRunSQL(update));
}else {
if (utype != null && utype.contains("(")) {
utype = utype.substring(0, utype.indexOf("("));
}
if (!type.equals(utype)) {
String tmp_name = column.getName() + "_TMP_UPDATE_TYPE";
update.setName(tmp_name);
String rename = buildRenameRunSQL(column);
sqls.add(rename);
update.setName(uname);
String add = buildAddRunSQL(update);
sqls.add(add);
StringBuilder builder = new StringBuilder();
builder.append("UPDATE ");
name(builder, column.getTable());
builder.append(" SET ");
SQLUtil.delimiter(builder, uname, getDelimiterFr(), getDelimiterTo());
builder.append(" = ");
SQLUtil.delimiter(builder, tmp_name, getDelimiterFr(), getDelimiterTo());
sqls.add(builder.toString());
column.setName(tmp_name);
String drop = buildDropRunSQL(column);
sqls.add(drop);
column.setName(name);
update.setName(tmp_name);
} else {
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable());
builder.append(" MODIFY(");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo()).append(" ");
type(builder, column.getUpdate());
builder.append(")");
sqls.add(builder.toString());
}
}
//column.setName(name);
return sqls;
}
/**
* 修改默认值
* ALTER TABLE MY_TEST_TABLE MODIFY B DEFAULT 2
* @param column column
* @return String
*/
@Override
public String buildChangeDefaultRunSQL(Column column) throws Exception{
Object def = column.getDefaultValue();
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable()).append(" MODIFY ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
builder.append(" DEFAULT ");
if(null != def){
format(builder, def);
}else{
builder.append("NULL");
}
return builder.toString();
}
/**
* 修改非空限制
* ALTER TABLE T MODIFY C NOT NULL ;
* @param column column
* @return String
*/
@Override
public String buildChangeNullableRunSQL(Column column) throws Exception{
int nullable = column.isNullable();
int uNullable = column.getUpdate().isNullable();
if(nullable != -1 && uNullable != -1){
if(nullable == uNullable){
return null;
}
StringBuilder builder = new StringBuilder();
builder.append("ALTER TABLE ");
name(builder, column.getTable()).append(" MODIFY ");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
if(uNullable == 0){
builder.append(" NOT ");
}
builder.append("NULL");
return builder.toString();
}
return null;
}
/**
* 修改备注
* COMMENT ON COLUMN T.ID IS 'ABC'
* @param column column
* @return String
*/
@Override
public String buildChangeCommentRunSQL(Column column) throws Exception{
String comment = column.getComment();
if(BasicUtil.isNotEmpty(comment)) {
StringBuilder builder = new StringBuilder();
builder.append("COMMENT ON COLUMN ");
name(builder, column.getTable()).append(".");
SQLUtil.delimiter(builder, column.getName(), getDelimiterFr(), getDelimiterTo());
builder.append(" IS '").append(comment).append("'");
return builder.toString();
}else{
return null;
}
}
/**
* 定义列
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder define(StringBuilder builder, Column column){
return super.define(builder, column);
}
/**
* 数据类型
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder type(StringBuilder builder, Column column){
return super.type(builder, column);
}
/**
* 编码
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder nullable(StringBuilder builder, Column column){
return super.nullable(builder, column);
}
/**
* 编码
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder charset(StringBuilder builder, Column column){
return super.charset(builder, column);
}
/**
* 默认值
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder defaultValue(StringBuilder builder, Column column){
return super.defaultValue(builder, column);
}
/**
* 自增长列
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder increment(StringBuilder builder, Column column){
return super.increment(builder, column);
}
/**
* 更新行事件
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder onupdate(StringBuilder builder, Column column){
return super.onupdate(builder, column);
}
/**
* 位置
*
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder position(StringBuilder builder, Column column){
return super.position(builder, column);
}
/**
* 备注
*
* @param builder builder
* @param column column
* @return builder
*/
@Override
public StringBuilder comment(StringBuilder builder, Column column){
return super.comment(builder, column);
}
/* *****************************************************************************************************************
* tag
* -----------------------------------------------------------------------------------------------------------------
* public String buildAddRunSQL(Tag tag);
* public List<String> buildAlterRunSQL(Tag tag);
* public String buildDropRunSQL(Tag tag);
* public String buildRenameRunSQL(Tag tag);
* public String buildChangeDefaultRunSQL(Tag tag);
* public String buildChangeNullableRunSQL(Tag tag);
* public String buildChangeCommentRunSQL(Tag tag);
* public List<String> buildChangeTypeRunSQL(Tag tag);
******************************************************************************************************************/
/**
* 添加标签
* ALTER TABLE HR_USER ADD TAG UPT_TIME datetime CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param tag tag
* @return String
*/
@Override
public String buildAddRunSQL(Tag tag) throws Exception{
return super.buildAddRunSQL(tag);
}
/**
* 修改标签 ALTER TABLE HR_USER CHANGE UPT_TIME UPT_TIME datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP comment '修改时间' AFTER ID;
* @param tag tag
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Tag tag) throws Exception{
return super.buildAlterRunSQL(tag);
}
/**
* 删除标签
* ALTER TABLE HR_USER DROP TAG NAME;
* @param tag tag
* @return String
*/
@Override
public String buildDropRunSQL(Tag tag) throws Exception{
return super.buildDropRunSQL(tag);
}
/**
* 修改标签名
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildRenameRunSQL(Tag tag) throws Exception{
return super.buildRenameRunSQL(tag);
}
/**
* 修改默认值
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildChangeDefaultRunSQL(Tag tag) throws Exception{
return super.buildChangeDefaultRunSQL(tag);
}
/**
* 修改非空限制
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildChangeNullableRunSQL(Tag tag) throws Exception{
return super.buildChangeNullableRunSQL(tag);
}
/**
* 修改备注
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return String
*/
@Override
public String buildChangeCommentRunSQL(Tag tag) throws Exception{
return super.buildChangeCommentRunSQL(tag);
}
/**
* 修改数据类型
*
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param tag tag
* @return sql
*/
@Override
public List<String> buildChangeTypeRunSQL(Tag tag) throws Exception{
return super.buildChangeTypeRunSQL(tag);
}
/* *****************************************************************************************************************
* index
* -----------------------------------------------------------------------------------------------------------------
* public String buildAddRunSQL(Index index) throws Exception
* public List<String> buildAlterRunSQL(Index index) throws Exception
* public String buildDropRunSQL(Index index) throws Exception
* public String buildRenameRunSQL(Index index) throws Exception
******************************************************************************************************************/
/**
* 添加索引
* @param index index
* @return String
*/
@Override
public String buildAddRunSQL(Index index) throws Exception{
return super.buildAddRunSQL(index);
}
/**
* 修改索引
* 有可能生成多条SQL
* @param index index
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Index index) throws Exception{
return super.buildAlterRunSQL(index);
}
/**
* 删除索引
* @param index index
* @return String
*/
@Override
public String buildDropRunSQL(Index index) throws Exception{
return super.buildDropRunSQL(index);
}
/**
* 修改索引名
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param index index
* @return String
*/
@Override
public String buildRenameRunSQL(Index index) throws Exception{
return super.buildRenameRunSQL(index);
}
/* *****************************************************************************************************************
* constraint
* -----------------------------------------------------------------------------------------------------------------
* public String buildAddRunSQL(Constraint constraint) throws Exception
* public List<String> buildAlterRunSQL(Constraint constraint) throws Exception
* public String buildDropRunSQL(Constraint constraint) throws Exception
* public String buildRenameRunSQL(Constraint index) throws Exception
******************************************************************************************************************/
/**
* 添加索引
* @param constraint constraint
* @return String
*/
@Override
public String buildAddRunSQL(Constraint constraint) throws Exception{
return super.buildAddRunSQL(constraint);
}
/**
* 修改索引
* 有可能生成多条SQL
* @param constraint constraint
* @return List
*/
@Override
public List<String> buildAlterRunSQL(Constraint constraint) throws Exception{
return super.buildAlterRunSQL(constraint);
}
/**
* 删除索引
* @param constraint constraint
* @return String
*/
@Override
public String buildDropRunSQL(Constraint constraint) throws Exception{
return super.buildDropRunSQL(constraint);
}
/**
* 修改索引名
* 一般不直接调用,如果需要由buildAlterRunSQL内部统一调用
* @param index index
* @return String
*/
@Override
public String buildRenameRunSQL(Constraint index) throws Exception{
return super.buildRenameRunSQL(index);
}
/* *****************************************************************************************************************
*
* common
*------------------------------------------------------------------------------------------------------------------
* public boolean isBooleanColumn(Column column)
* public boolean isNumberColumn(Column column)
* public boolean isCharColumn(Column column)
* public String buildInValue(SQL_BUILD_IN_VALUE value)
* public String type2type(String type)
* public String type2class(String type)
* public void value(StringBuilder builder, Object obj, String key)
******************************************************************************************************************/
@Override
public boolean isBooleanColumn(Column column) {
return super.isBooleanColumn(column);
}
/**
* 是否同数字
* @param column column
* @return boolean
*/
@Override
public boolean isNumberColumn(Column column){
return super.isNumberColumn(column);
}
@Override
public boolean isCharColumn(Column column) {
return super.isCharColumn(column);
}
/**
* 内置函数
* @param value SQL_BUILD_IN_VALUE
* @return String
*/
public String buildInValue(SQL_BUILD_IN_VALUE value){
if(value == SQL_BUILD_IN_VALUE.CURRENT_TIME){
return "sysdate";
}
return null;
}
@Override
public String type2type(String type){
if(null != type){
type = type.toUpperCase();
if("DATETIME".equals(type)){
return "TIMESTAMP";
}
}
return super.type2type(type);
}
@Override
public String type2class(String type){
return super.type2class(type);
}
@Override
public void value(StringBuilder builder, Object obj, String key){
Object value = null;
if(obj instanceof DataRow){
value = ((DataRow)obj).get(key);
}else if(AdapterProxy.hasAdapter()){
Field field = AdapterProxy.field(obj.getClass(), key);
value = BeanUtil.getFieldValue(obj, field);
}else{
value = BeanUtil.getFieldValue(obj, key);
}
if(null == value || "NULL".equals(value)){
builder.append("null");
}else if(value instanceof String){
format(builder, value);
}else if(value instanceof Timestamp
|| value instanceof java.util.Date
|| value instanceof java.sql.Date
|| value instanceof LocalDate
|| value instanceof LocalTime
|| value instanceof LocalDateTime
){
Date date = DateUtil.parse(value);
builder.append("TO_DATE('").append(DateUtil.format(date,DateUtil.FORMAT_DATE_TIME)).append("','yyyy-mm-dd hh24:mi:ss')");
}else if(value instanceof Date){
builder.append("TO_DATE('").append(DateUtil.format((Date)value,DateUtil.FORMAT_DATE_TIME)).append("','yyyy-mm-dd hh24:mi:ss')");
}else if(value instanceof Number || value instanceof Boolean){
builder.append(value);
}else{
builder.append(value);
}
}
}
|
oracle type2type
|
anyline-jdbc-dialect/anyline-jdbc-oracle/src/main/java/org/anyline/jdbc/config/db/impl/oracle/SQLAdapterImpl.java
|
oracle type2type
|
|
Java
|
apache-2.0
|
88b3406f2fc841b22ec2aa4208942b64876d20f1
| 0
|
jyemin/mongo-java-driver,jyemin/mongo-java-driver,rozza/mongo-java-driver,gianpaj/mongo-java-driver,davydotcom/mongo-java-driver,jsonking/mongo-java-driver,rozza/mongo-java-driver,wanggc/mongo-java-driver,jsonking/mongo-java-driver,kay-kim/mongo-java-driver,davydotcom/mongo-java-driver,PSCGroup/mongo-java-driver,wanggc/mongo-java-driver
|
// DBCollection.java
/**
* Copyright (C) 2008 10gen Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb;
// Mongo
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.bson.types.ObjectId;
/** This class provides a skeleton implementation of a database collection.
* <p>A typical invocation sequence is thus
* <blockquote><pre>
* Mongo mongo = new Mongo( new DBAddress( "localhost", 127017 ) );
* DB db = mongo.getDB( "mydb" );
* DBCollection collection = db.getCollection( "test" );
* </pre></blockquote>
* @dochub collections
*/
@SuppressWarnings("unchecked")
public abstract class DBCollection {
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @param concern the write concern
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(DBObject[] arr , WriteConcern concern ) throws MongoException {
return insert( arr, concern, getDBEncoder());
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @param concern the write concern
* @param encoder the DBEncoder to use
* @return
* @throws MongoException
* @dochub insert
*/
public abstract WriteResult insert(DBObject[] arr , WriteConcern concern, DBEncoder encoder) throws MongoException;
/**
* Inserts a document into the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param o
* @param concern the write concern
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(DBObject o , WriteConcern concern )
throws MongoException {
return insert( new DBObject[]{ o } , concern );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(DBObject ... arr)
throws MongoException {
return insert( arr , getWriteConcern() );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(WriteConcern concern, DBObject ... arr)
throws MongoException {
return insert( arr, concern );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param list list of documents to save
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(List<DBObject> list )
throws MongoException {
return insert( list, getWriteConcern() );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param list list of documents to save
* @param concern the write concern
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(List<DBObject> list, WriteConcern concern )
throws MongoException {
return insert( list.toArray( new DBObject[list.size()] ) , concern );
}
/**
* Performs an update operation.
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @param upsert if the database should create the element if it does not exist
* @param multi if the update should be applied to all objects matching (db version 1.1.3 and above). An object will
* not be inserted if it does not exist in the collection and upsert=true and multi=true.
* See <a href="http://www.mongodb.org/display/DOCS/Atomic+Operations">http://www.mongodb.org/display/DOCS/Atomic+Operations</a>
* @param concern the write concern
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi , WriteConcern concern ) throws MongoException {
return update( q, o, upsert, multi, concern, getDBEncoder());
}
/**
* Performs an update operation.
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @param upsert if the database should create the element if it does not exist
* @param multi if the update should be applied to all objects matching (db version 1.1.3 and above). An object will
* not be inserted if it does not exist in the collection and upsert=true and multi=true.
* See <a href="http://www.mongodb.org/display/DOCS/Atomic+Operations">http://www.mongodb.org/display/DOCS/Atomic+Operations</a>
* @param concern the write concern
* @param encoder the DBEncoder to use
* @return
* @throws MongoException
* @dochub update
*/
public abstract WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi , WriteConcern concern, DBEncoder encoder ) throws MongoException ;
/**
* calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean, com.mongodb.WriteConcern)} with default WriteConcern.
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @param upsert if the database should create the element if it does not exist
* @param multi if the update should be applied to all objects matching (db version 1.1.3 and above)
* See http://www.mongodb.org/display/DOCS/Atomic+Operations
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi )
throws MongoException {
return update( q , o , upsert , multi , getWriteConcern() );
}
/**
* calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean)} with upsert=false and multi=false
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult update( DBObject q , DBObject o ) throws MongoException {
return update( q , o , false , false );
}
/**
* calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean)} with upsert=false and multi=true
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult updateMulti( DBObject q , DBObject o ) throws MongoException {
return update( q , o , false , true );
}
/**
* Adds any necessary fields to a given object before saving it to the collection.
* @param o object to which to add the fields
*/
protected abstract void doapply( DBObject o );
/**
* Removes objects from the database collection.
* @param o the object that documents to be removed must match
* @param concern WriteConcern for this operation
* @return
* @throws MongoException
* @dochub remove
*/
public WriteResult remove( DBObject o , WriteConcern concern ) throws MongoException {
return remove( o, concern, getDBEncoder());
}
/**
* Removes objects from the database collection.
* @param o the object that documents to be removed must match
* @param concern WriteConcern for this operation
* @param encoder the DBEncoder to use
* @return
* @throws MongoException
* @dochub remove
*/
public abstract WriteResult remove( DBObject o , WriteConcern concern, DBEncoder encoder ) throws MongoException ;
/**
* calls {@link DBCollection#remove(com.mongodb.DBObject, com.mongodb.WriteConcern)} with the default WriteConcern
* @param o the object that documents to be removed must match
* @return
* @throws MongoException
* @dochub remove
*/
public WriteResult remove( DBObject o )
throws MongoException {
return remove( o , getWriteConcern() );
}
/**
* Finds objects
*/
abstract Iterator<DBObject> __find( DBObject ref , DBObject fields , int numToSkip , int batchSize , int limit, int options, ReadPreference readPref, DBDecoder decoder ) throws MongoException ;
/**
* Calls {@link DBCollection#find(com.mongodb.DBObject, com.mongodb.DBObject, int, int)} and applies the query options
* @param query query used to search
* @param fields the fields of matching objects to return
* @param numToSkip number of objects to skip
* @param batchSize the batch size. This option has a complex behavior, see {@link DBCursor#batchSize(int) }
* @param options - see Bytes QUERYOPTION_*
* @return the cursor
* @throws MongoException
* @dochub find
*/
@Deprecated
public final DBCursor find( DBObject query , DBObject fields , int numToSkip , int batchSize , int options ) throws MongoException{
return find(query, fields, numToSkip, batchSize).addOption(options);
}
/**
* Finds objects from the database that match a query.
* A DBCursor object is returned, that can be iterated to go through the results.
*
* @param query query used to search
* @param fields the fields of matching objects to return
* @param numToSkip number of objects to skip
* @param batchSize the batch size. This option has a complex behavior, see {@link DBCursor#batchSize(int) }
* @return the cursor
* @throws MongoException
* @dochub find
*/
@Deprecated
public final DBCursor find( DBObject query , DBObject fields , int numToSkip , int batchSize ) {
DBCursor cursor = find(query, fields).skip(numToSkip).batchSize(batchSize);
return cursor;
}
// ------
/**
* Finds an object by its id.
* This compares the passed in value to the _id field of the document
*
* @param obj any valid object
* @return the object, if found, otherwise <code>null</code>
* @throws MongoException
*/
public final DBObject findOne( Object obj )
throws MongoException {
return findOne(obj, null);
}
/**
* Finds an object by its id.
* This compares the passed in value to the _id field of the document
*
* @param obj any valid object
* @param fields fields to return
* @return the object, if found, otherwise <code>null</code>
* @dochub find
*/
public final DBObject findOne( Object obj, DBObject fields ) {
Iterator<DBObject> iterator = __find( new BasicDBObject("_id", obj), fields, 0, -1, 0, getOptions(), getReadPreference(), getDecoder() );
return (iterator != null ? iterator.next() : null);
}
/**
* Finds the first document in the query and updates it.
* @param query query to match
* @param fields fields to be returned
* @param sort sort to apply before picking first document
* @param remove if true, document found will be removed
* @param update update to apply
* @param returnNew if true, the updated document is returned, otherwise the old document is returned (or it would be lost forever)
* @param upsert do upsert (insert if document not present)
* @return the document
*/
public DBObject findAndModify(DBObject query, DBObject fields, DBObject sort, boolean remove, DBObject update, boolean returnNew, boolean upsert) {
BasicDBObject cmd = new BasicDBObject( "findandmodify", _name);
if (query != null && !query.keySet().isEmpty())
cmd.append( "query", query );
if (fields != null && !fields.keySet().isEmpty())
cmd.append( "fields", fields );
if (sort != null && !sort.keySet().isEmpty())
cmd.append( "sort", sort );
if (remove)
cmd.append( "remove", remove );
else {
if (update != null && !update.keySet().isEmpty()) {
// if 1st key doesnt start with $, then object will be inserted as is, need to check it
String key = update.keySet().iterator().next();
if (key.charAt(0) != '$')
_checkObject(update, false, false);
cmd.append( "update", update );
}
if (returnNew)
cmd.append( "new", returnNew );
if (upsert)
cmd.append( "upsert", upsert );
}
if (remove && !(update == null || update.keySet().isEmpty() || returnNew))
throw new MongoException("FindAndModify: Remove cannot be mixed with the Update, or returnNew params!");
CommandResult res = this._db.command( cmd );
if (res.ok() || res.getErrorMessage().equals( "No matching object found" ))
return (DBObject) res.get( "value" );
res.throwOnError();
return null;
}
/**
* calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)}
* with fields=null, remove=false, returnNew=false, upsert=false
* @param query
* @param sort
* @param update
* @return the old document
*/
public DBObject findAndModify( DBObject query , DBObject sort , DBObject update){
return findAndModify( query, null, sort, false, update, false, false);
}
/**
* calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)}
* with fields=null, sort=null, remove=false, returnNew=false, upsert=false
* @param query
* @param update
* @return the old document
*/
public DBObject findAndModify( DBObject query , DBObject update ) {
return findAndModify( query, null, null, false, update, false, false );
}
/**
* calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)}
* with fields=null, sort=null, remove=true, returnNew=false, upsert=false
* @param query
* @return the removed document
*/
public DBObject findAndRemove( DBObject query ) {
return findAndModify( query, null, null, true, null, false, false );
}
// --- START INDEX CODE ---
/**
* calls {@link DBCollection#createIndex(com.mongodb.DBObject, com.mongodb.DBObject)} with default index options
* @param keys an object with a key set of the fields desired for the index
* @throws MongoException
*/
public final void createIndex( final DBObject keys )
throws MongoException {
createIndex( keys , defaultOptions( keys ) );
}
/**
* Forces creation of an index on a set of fields, if one does not already exist.
* @param keys
* @param options
* @throws MongoException
*/
public void createIndex( DBObject keys , DBObject options ) throws MongoException {
createIndex( keys, options, getDBEncoder());
}
/**
* Forces creation of an index on a set of fields, if one does not already exist.
* @param keys
* @param options
* @param encoder the DBEncoder to use
* @throws MongoException
*/
public abstract void createIndex( DBObject keys , DBObject options, DBEncoder encoder ) throws MongoException;
/**
* Creates an ascending index on a field with default options, if one does not already exist.
* @param name name of field to index on
*/
public final void ensureIndex( final String name ){
ensureIndex( new BasicDBObject( name , 1 ) );
}
/**
* calls {@link DBCollection#ensureIndex(com.mongodb.DBObject, com.mongodb.DBObject)} with default options
* @param keys an object with a key set of the fields desired for the index
* @throws MongoException
*/
public final void ensureIndex( final DBObject keys )
throws MongoException {
ensureIndex( keys , defaultOptions( keys ) );
}
/**
* calls {@link DBCollection#ensureIndex(com.mongodb.DBObject, java.lang.String, boolean)} with unique=false
* @param keys fields to use for index
* @param name an identifier for the index
* @throws MongoException
* @dochub indexes
*/
public void ensureIndex( DBObject keys , String name )
throws MongoException {
ensureIndex( keys , name , false );
}
/**
* Ensures an index on this collection (that is, the index will be created if it does not exist).
* @param keys fields to use for index
* @param name an identifier for the index. If null or empty, the default name will be used.
* @param unique if the index should be unique
* @throws MongoException
*/
public void ensureIndex( DBObject keys , String name , boolean unique )
throws MongoException {
DBObject options = defaultOptions( keys );
if (name != null && name.length()>0)
options.put( "name" , name );
if ( unique )
options.put( "unique" , Boolean.TRUE );
ensureIndex( keys , options );
}
/**
* Creates an index on a set of fields, if one does not already exist.
* @param keys an object with a key set of the fields desired for the index
* @param optionsIN options for the index (name, unique, etc)
* @throws MongoException
*/
public final void ensureIndex( final DBObject keys , final DBObject optionsIN )
throws MongoException {
if ( checkReadOnly( false ) ) return;
final DBObject options = defaultOptions( keys );
for ( String k : optionsIN.keySet() )
options.put( k , optionsIN.get( k ) );
final String name = options.get( "name" ).toString();
if ( _createdIndexes.contains( name ) )
return;
createIndex( keys , options );
_createdIndexes.add( name );
}
/**
* Clears all indices that have not yet been applied to this collection.
*/
public void resetIndexCache(){
_createdIndexes.clear();
}
DBObject defaultOptions( DBObject keys ){
DBObject o = new BasicDBObject();
o.put( "name" , genIndexName( keys ) );
o.put( "ns" , _fullName );
return o;
}
/**
* Convenience method to generate an index name from the set of fields it is over.
* @param keys the names of the fields used in this index
* @return a string representation of this index's fields
*/
public static String genIndexName( DBObject keys ){
StringBuilder name = new StringBuilder();
for ( String s : keys.keySet() ){
if ( name.length() > 0 )
name.append( '_' );
name.append( s ).append( '_' );
Object val = keys.get( s );
if ( val instanceof Number || val instanceof String )
name.append( val.toString().replace( ' ', '_' ) );
}
return name.toString();
}
// --- END INDEX CODE ---
/**
* Set hint fields for this collection (to optimize queries).
* @param lst a list of <code>DBObject</code>s to be used as hints
*/
public void setHintFields( List<DBObject> lst ){
_hintFields = lst;
}
/**
* Queries for an object in this collection.
* @param ref object for which to search
* @return an iterator over the results
* @dochub find
*/
public final DBCursor find( DBObject ref ){
return new DBCursor( this, ref, null, getReadPreference());
}
/**
* Queries for an object in this collection.
*
* <p>
* An empty DBObject will match every document in the collection.
* Regardless of fields specified, the _id fields are always returned.
* </p>
* <p>
* An example that returns the "x" and "_id" fields for every document
* in the collection that has an "x" field:
* </p>
* <blockquote><pre>
* BasicDBObject keys = new BasicDBObject();
* keys.put("x", 1);
*
* DBCursor cursor = collection.find(new BasicDBObject(), keys);
* </pre></blockquote>
*
* @param ref object for which to search
* @param keys fields to return
* @return a cursor to iterate over results
* @dochub find
*/
public final DBCursor find( DBObject ref , DBObject keys ){
return new DBCursor( this, ref, keys, getReadPreference());
}
/**
* Queries for all objects in this collection.
* @return a cursor which will iterate over every object
* @dochub find
*/
public final DBCursor find(){
return new DBCursor( this, null, null, getReadPreference());
}
/**
* Returns a single object from this collection.
* @return the object found, or <code>null</code> if the collection is empty
* @throws MongoException
*/
public final DBObject findOne()
throws MongoException {
return findOne( new BasicDBObject() );
}
/**
* Returns a single object from this collection matching the query.
* @param o the query object
* @return the object found, or <code>null</code> if no such object exists
* @throws MongoException
*/
public final DBObject findOne( DBObject o )
throws MongoException {
return findOne( o, null, getReadPreference());
}
/**
* Returns a single object from this collection matching the query.
* @param o the query object
* @param fields fields to return
* @return the object found, or <code>null</code> if no such object exists
* @dochub find
*/
public final DBObject findOne( DBObject o, DBObject fields ) {
return findOne( o, fields, getReadPreference());
}
/**
* Returns a single object from this collection matching the query.
* @param o the query object
* @param fields fields to return
* @return the object found, or <code>null</code> if no such object exists
* @dochub find
*/
public final DBObject findOne( DBObject o, DBObject fields, ReadPreference readPref ) {
Iterator<DBObject> i = __find( o , fields , 0 , -1 , 0, getOptions(), readPref, getDecoder() );
DBObject obj = (i == null ? null : i.next());
if ( obj != null && ( fields != null && fields.keySet().size() > 0 ) ){
obj.markAsPartialObject();
}
return obj;
}
// Only create a new decoder if there is a decoder factory explicitly set on the collection. Otherwise return null
// so that DBPort will use a cached decoder from the default factory.
private DBDecoder getDecoder() {
return _decoderFactory != null ? _decoderFactory.create() : null;
}
// Only create a new encoder if there is an encoder factory explicitly set on the collection. Otherwise return null
// to allow DB to create its own or use a cached one.
private DBEncoder getDBEncoder() {
return _encoderFactory != null ? _encoderFactory.create() : null;
}
/**
* calls {@link DBCollection#apply(com.mongodb.DBObject, boolean)} with ensureID=true
* @param o <code>DBObject</code> to which to add fields
* @return the modified parameter object
*/
public final Object apply( DBObject o ){
return apply( o , true );
}
/**
* calls {@link DBCollection#doapply(com.mongodb.DBObject)}, optionally adding an automatic _id field
* @param jo object to add fields to
* @param ensureID whether to add an <code>_id</code> field
* @return the modified object <code>o</code>
*/
public final Object apply( DBObject jo , boolean ensureID ){
Object id = jo.get( "_id" );
if ( ensureID && id == null ){
id = ObjectId.get();
jo.put( "_id" , id );
}
doapply( jo );
return id;
}
/**
* calls {@link DBCollection#save(com.mongodb.DBObject, com.mongodb.WriteConcern)} with default WriteConcern
* @param jo the <code>DBObject</code> to save
* will add <code>_id</code> field to jo if needed
* @return
*/
public final WriteResult save( DBObject jo ) {
return save(jo, getWriteConcern());
}
/**
* Saves an object to this collection (does insert or update based on the object _id).
* @param jo the <code>DBObject</code> to save
* @param concern the write concern
* @return
* @throws MongoException
*/
public final WriteResult save( DBObject jo, WriteConcern concern )
throws MongoException {
if ( checkReadOnly( true ) )
return null;
_checkObject( jo , false , false );
Object id = jo.get( "_id" );
if ( id == null || ( id instanceof ObjectId && ((ObjectId)id).isNew() ) ){
if ( id != null && id instanceof ObjectId )
((ObjectId)id).notNew();
if ( concern == null )
return insert( jo );
else
return insert( jo, concern );
}
DBObject q = new BasicDBObject();
q.put( "_id" , id );
if ( concern == null )
return update( q , jo , true , false );
else
return update( q , jo , true , false , concern );
}
// ---- DB COMMANDS ----
/**
* Drops all indices from this collection
* @throws MongoException
*/
public void dropIndexes()
throws MongoException {
dropIndexes( "*" );
}
/**
* Drops an index from this collection
* @param name the index name
* @throws MongoException
*/
public void dropIndexes( String name )
throws MongoException {
DBObject cmd = BasicDBObjectBuilder.start()
.add( "deleteIndexes" , getName() )
.add( "index" , name )
.get();
resetIndexCache();
CommandResult res = _db.command( cmd );
if (res.ok() || res.getErrorMessage().equals( "ns not found" ))
return;
res.throwOnError();
}
/**
* Drops (deletes) this collection. Use with care.
* @throws MongoException
*/
public void drop()
throws MongoException {
resetIndexCache();
CommandResult res =_db.command( BasicDBObjectBuilder.start().add( "drop" , getName() ).get() );
if (res.ok() || res.getErrorMessage().equals( "ns not found" ))
return;
res.throwOnError();
}
/**
* returns the number of documents in this collection.
* @return
* @throws MongoException
*/
public long count()
throws MongoException {
return getCount(new BasicDBObject(), null);
}
/**
* returns the number of documents that match a query.
* @param query query to match
* @return
* @throws MongoException
*/
public long count(DBObject query)
throws MongoException {
return getCount(query, null);
}
/**
* calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject)} with an empty query and null fields.
* @return number of documents that match query
* @throws MongoException
*/
public long getCount()
throws MongoException {
return getCount(new BasicDBObject(), null);
}
/**
* calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject)} with null fields.
* @param query query to match
* @return
* @throws MongoException
*/
public long getCount(DBObject query)
throws MongoException {
return getCount(query, null);
}
/**
* calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject, long, long)} with limit=0 and skip=0
* @param query query to match
* @param fields fields to return
* @return
* @throws MongoException
*/
public long getCount(DBObject query, DBObject fields)
throws MongoException {
return getCount( query , fields , 0 , 0 );
}
/**
* Returns the number of documents in the collection
* that match the specified query
*
* @param query query to select documents to count
* @param fields fields to return
* @param limit limit the count to this value
* @param skip number of entries to skip
* @return number of documents that match query and fields
* @throws MongoException
*/
public long getCount(DBObject query, DBObject fields, long limit, long skip )
throws MongoException {
BasicDBObject cmd = new BasicDBObject();
cmd.put("count", getName());
cmd.put("query", query);
if (fields != null) {
cmd.put("fields", fields);
}
if ( limit > 0 )
cmd.put( "limit" , limit );
if ( skip > 0 )
cmd.put( "skip" , skip );
CommandResult res = _db.command(cmd,getOptions());
if ( ! res.ok() ){
String errmsg = res.getErrorMessage();
if ( errmsg.equals("ns does not exist") ||
errmsg.equals("ns missing" ) ){
// for now, return 0 - lets pretend it does exist
return 0;
}
res.throwOnError();
}
return res.getLong("n");
}
/**
* Calls {@link DBCollection#rename(java.lang.String, boolean)} with dropTarget=false
* @param newName new collection name (not a full namespace)
* @return the new collection
* @throws MongoException
*/
public DBCollection rename( String newName )
throws MongoException {
return rename(newName, false);
}
/**
* renames of this collection to newName
* @param newName new collection name (not a full namespace)
* @param dropTarget if a collection with the new name exists, whether or not to drop it
* @return the new collection
* @throws MongoException
*/
public DBCollection rename( String newName, boolean dropTarget )
throws MongoException {
CommandResult ret =
_db.getSisterDB( "admin" )
.command( BasicDBObjectBuilder.start()
.add( "renameCollection" , _fullName )
.add( "to" , _db._name + "." + newName )
.add( "dropTarget" , dropTarget )
.get() );
ret.throwOnError();
resetIndexCache();
return _db.getCollection( newName );
}
/**
* calls {@link DBCollection#group(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, java.lang.String, java.lang.String)} with finalize=null
* @param key - { a : true }
* @param cond - optional condition on query
* @param reduce javascript reduce function
* @param initial initial value for first match on a key
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
public DBObject group( DBObject key , DBObject cond , DBObject initial , String reduce )
throws MongoException {
return group( key , cond , initial , reduce , null );
}
/**
* Applies a group operation
* @param key - { a : true }
* @param cond - optional condition on query
* @param reduce javascript reduce function
* @param initial initial value for first match on a key
* @param finalize An optional function that can operate on the result(s) of the reduce function.
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
public DBObject group( DBObject key , DBObject cond , DBObject initial , String reduce , String finalize )
throws MongoException {
GroupCommand cmd = new GroupCommand(this, key, cond, initial, reduce, finalize);
return group( cmd );
}
/**
* Applies a group operation
* @param cmd the group command
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
public DBObject group( GroupCommand cmd ) {
CommandResult res = _db.command( cmd.toDBObject(), getOptions() );
res.throwOnError();
return (DBObject)res.get( "retval" );
}
/**
* @deprecated prefer the {@link DBCollection#group(com.mongodb.GroupCommand)} which is more standard
* Applies a group operation
* @param args object representing the arguments to the group function
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
@Deprecated
public DBObject group( DBObject args )
throws MongoException {
args.put( "ns" , getName() );
CommandResult res = _db.command( new BasicDBObject( "group" , args ), getOptions() );
res.throwOnError();
return (DBObject)res.get( "retval" );
}
/**
* find distinct values for a key
* @param key
* @return
*/
public List distinct( String key ){
return distinct( key , new BasicDBObject() );
}
/**
* find distinct values for a key
* @param key
* @param query query to match
* @return
*/
public List distinct( String key , DBObject query ){
DBObject c = BasicDBObjectBuilder.start()
.add( "distinct" , getName() )
.add( "key" , key )
.add( "query" , query )
.get();
CommandResult res = _db.command( c, getOptions() );
res.throwOnError();
return (List)(res.get( "values" ));
}
/**
* performs a map reduce operation
* Runs the command in REPLACE output mode (saves to named collection)
*
* @param map
* map function in javascript code
* @param outputTarget
* optional - leave null if want to use temp collection
* @param reduce
* reduce function in javascript code
* @param query
* to match
* @return
* @throws MongoException
* @dochub mapreduce
*/
public MapReduceOutput mapReduce( String map , String reduce , String outputTarget , DBObject query ) throws MongoException{
return mapReduce( new MapReduceCommand( this , map , reduce , outputTarget , MapReduceCommand.OutputType.REPLACE, query ) );
}
/**
* performs a map reduce operation
* Specify an outputType to control job execution
* * INLINE - Return results inline
* * REPLACE - Replace the output collection with the job output
* * MERGE - Merge the job output with the existing contents of outputTarget
* * REDUCE - Reduce the job output with the existing contents of
* outputTarget
*
* @param map
* map function in javascript code
* @param outputTarget
* optional - leave null if want to use temp collection
* @param outputType
* set the type of job output
* @param reduce
* reduce function in javascript code
* @param query
* to match
* @return
* @throws MongoException
* @dochub mapreduce
*/
public MapReduceOutput mapReduce( String map , String reduce , String outputTarget , MapReduceCommand.OutputType outputType , DBObject query )
throws MongoException{
return mapReduce( new MapReduceCommand( this , map , reduce , outputTarget , outputType , query ) );
}
/**
* performs a map reduce operation
*
* @param command
* object representing the parameters
* @return
* @throws MongoException
*/
public MapReduceOutput mapReduce( MapReduceCommand command ) throws MongoException{
DBObject cmd = command.toDBObject();
// if type in inline, then query options like slaveOk is fine
CommandResult res = null;
if (command.getOutputType() == MapReduceCommand.OutputType.INLINE)
res = _db.command( cmd, getOptions(), command.getReadPreference() != null ? command.getReadPreference() : getReadPreference() );
else
res = _db.command( cmd );
res.throwOnError();
return new MapReduceOutput( this , cmd, res );
}
/**
* performs a map reduce operation
*
* @param command
* object representing the parameters
* @return
* @throws MongoException
*/
public MapReduceOutput mapReduce( DBObject command ) throws MongoException{
if ( command.get( "mapreduce" ) == null && command.get( "mapReduce" ) == null )
throw new IllegalArgumentException( "need mapreduce arg" );
CommandResult res = _db.command( command );
res.throwOnError();
return new MapReduceOutput( this , command, res );
}
/**
* Return a list of the indexes for this collection. Each object
* in the list is the "info document" from MongoDB
*
* @return list of index documents
*/
public List<DBObject> getIndexInfo() {
BasicDBObject cmd = new BasicDBObject();
cmd.put("ns", getFullName());
DBCursor cur = _db.getCollection("system.indexes").find(cmd);
List<DBObject> list = new ArrayList<DBObject>();
while(cur.hasNext()) {
list.add(cur.next());
}
return list;
}
/**
* Drops an index from this collection
* @param keys keys of the index
* @throws MongoException
*/
public void dropIndex( DBObject keys )
throws MongoException {
dropIndexes( genIndexName( keys ) );
}
/**
* Drops an index from this collection
* @param name name of index to drop
* @throws MongoException
*/
public void dropIndex( String name )
throws MongoException {
dropIndexes( name );
}
/**
* gets the collections statistics ("collstats" command)
* @return
*/
public CommandResult getStats() {
return getDB().command(new BasicDBObject("collstats", getName()), getOptions());
}
/**
* returns whether or not this is a capped collection
* @return
*/
public boolean isCapped() {
CommandResult stats = getStats();
Object capped = stats.get("capped");
return(capped != null && (Integer)capped == 1);
}
// ------
/**
* Initializes a new collection. No operation is actually performed on the database.
* @param base database in which to create the collection
* @param name the name of the collection
*/
protected DBCollection( DB base , String name ){
_db = base;
_name = name;
_fullName = _db.getName() + "." + name;
_options = new Bytes.OptionHolder( _db._options );
}
protected DBObject _checkObject( DBObject o , boolean canBeNull , boolean query ){
if ( o == null ){
if ( canBeNull )
return null;
throw new IllegalArgumentException( "can't be null" );
}
if ( o.isPartialObject() && ! query )
throw new IllegalArgumentException( "can't save partial objects" );
if ( ! query ){
_checkKeys(o);
}
return o;
}
/**
* Checks key strings for invalid characters.
*/
private void _checkKeys( DBObject o ) {
for ( String s : o.keySet() ){
validateKey ( s );
Object inner = o.get( s );
if ( inner instanceof DBObject ) {
_checkKeys( (DBObject)inner );
} else if ( inner instanceof Map ) {
_checkKeys( (Map<String, Object>)inner );
}
}
}
/**
* Checks key strings for invalid characters.
*/
private void _checkKeys( Map<String, Object> o ) {
for ( String s : o.keySet() ){
validateKey ( s );
Object inner = o.get( s );
if ( inner instanceof DBObject ) {
_checkKeys( (DBObject)inner );
} else if ( inner instanceof Map ) {
_checkKeys( (Map<String, Object>)inner );
}
}
}
/**
* Check for invalid key names
* @param s the string field/key to check
* @exception IllegalArgumentException if the key is not valid.
*/
private void validateKey(String s ) {
if ( s.contains( "." ) )
throw new IllegalArgumentException( "fields stored in the db can't have . in them. (Bad Key: '" + s + "')" );
if ( s.startsWith( "$" ) )
throw new IllegalArgumentException( "fields stored in the db can't start with '$' (Bad Key: '" + s + "')" );
}
/**
* Finds a collection that is prefixed with this collection's name.
* A typical use of this might be
* <blockquote><pre>
* DBCollection users = mongo.getCollection( "wiki" ).getCollection( "users" );
* </pre></blockquote>
* Which is equivalent to
* <pre><blockquote>
* DBCollection users = mongo.getCollection( "wiki.users" );
* </pre></blockquote>
* @param n the name of the collection to find
* @return the matching collection
*/
public DBCollection getCollection( String n ){
return _db.getCollection( _name + "." + n );
}
/**
* Returns the name of this collection.
* @return the name of this collection
*/
public String getName(){
return _name;
}
/**
* Returns the full name of this collection, with the database name as a prefix.
* @return the name of this collection
*/
public String getFullName(){
return _fullName;
}
/**
* Returns the database this collection is a member of.
* @return this collection's database
*/
public DB getDB(){
return _db;
}
/**
* Returns if this collection's database is read-only
* @param strict if an exception should be thrown if the database is read-only
* @return if this collection's database is read-only
* @throws RuntimeException if the database is read-only and <code>strict</code> is set
*/
protected boolean checkReadOnly( boolean strict ){
if ( ! _db._readOnly )
return false;
if ( ! strict )
return true;
throw new IllegalStateException( "db is read only" );
}
@Override
public int hashCode(){
return _fullName.hashCode();
}
@Override
public boolean equals( Object o ){
return o == this;
}
@Override
public String toString(){
return _name;
}
/**
* Sets a default class for objects in this collection; null resets the class to nothing.
* @param c the class
* @throws IllegalArgumentException if <code>c</code> is not a DBObject
*/
public void setObjectClass( Class c ){
if ( c == null ){
// reset
_wrapper = null;
_objectClass = null;
return;
}
if ( ! DBObject.class.isAssignableFrom( c ) )
throw new IllegalArgumentException( c.getName() + " is not a DBObject" );
_objectClass = c;
if ( ReflectionDBObject.class.isAssignableFrom( c ) )
_wrapper = ReflectionDBObject.getWrapper( c );
else
_wrapper = null;
}
/**
* Gets the default class for objects in the collection
* @return the class
*/
public Class getObjectClass(){
return _objectClass;
}
/**
* sets the internal class
* @param path
* @param c
*/
public void setInternalClass( String path , Class c ){
_internalClass.put( path , c );
}
/**
* gets the internal class
* @param path
* @return
*/
protected Class getInternalClass( String path ){
Class c = _internalClass.get( path );
if ( c != null )
return c;
if ( _wrapper == null )
return null;
return _wrapper.getInternalClass( path );
}
/**
* Set the write concern for this collection. Will be used for
* writes to this collection. Overrides any setting of write
* concern at the DB level. See the documentation for
* {@link WriteConcern} for more information.
*
* @param concern write concern to use
*/
public void setWriteConcern( WriteConcern concern ){
_concern = concern;
}
/**
* Get the write concern for this collection.
* @return
*/
public WriteConcern getWriteConcern(){
if ( _concern != null )
return _concern;
return _db.getWriteConcern();
}
/**
* Sets the read preference for this collection. Will be used as default
* for reads from this collection; overrides DB & Connection level settings.
* See the * documentation for {@link ReadPreference} for more information.
*
* @param preference Read Preference to use
*/
public void setReadPreference( ReadPreference preference ){
_readPref = preference;
}
/**
* Gets the read preference
* @return
*/
public ReadPreference getReadPreference(){
if ( _readPref != null )
return _readPref;
return _db.getReadPreference();
}
/**
* makes this query ok to run on a slave node
*
* @deprecated Replaced with ReadPreference.SECONDARY
* @see com.mongodb.ReadPreference.SECONDARY
*/
@Deprecated
public void slaveOk(){
addOption( Bytes.QUERYOPTION_SLAVEOK );
}
/**
* adds a default query option
* @param option
*/
public void addOption( int option ){
_options.add(option);
}
/**
* sets the default query options
* @param options
*/
public void setOptions( int options ){
_options.set(options);
}
/**
* resets the default query options
*/
public void resetOptions(){
_options.reset();
}
/**
* gets the default query options
* @return
*/
public int getOptions(){
return _options.get();
}
/**
* Set a customer decoder factory for this collection. Set to null to use the default from MongoOptions.
* @param fact the factory to set.
*/
public void setDBDecoderFactory(DBDecoderFactory fact) {
_decoderFactory = fact;
}
/**
* Get the decoder factory for this collection. A null return value means that the default from MongoOptions
* is being used.
* @return the factory
*/
public DBDecoderFactory getDBDecoderFactory() {
return _decoderFactory;
}
/**
* Set a customer encoder factory for this collection. Set to null to use the default from MongoOptions.
* @param fact the factory to set.
*/
public void setDBEncoderFactory(DBEncoderFactory fact) {
_encoderFactory = fact;
}
/**
* Get the encoder factory for this collection. A null return value means that the default from MongoOptions
* is being used.
* @return the factory
*/
public DBEncoderFactory getDBEncoderFactory() {
return _encoderFactory;
}
final DB _db;
final protected String _name;
final protected String _fullName;
protected List<DBObject> _hintFields;
private WriteConcern _concern = null;
private ReadPreference _readPref = null;
private DBDecoderFactory _decoderFactory;
private DBEncoderFactory _encoderFactory;
final Bytes.OptionHolder _options;
protected Class _objectClass = null;
private Map<String,Class> _internalClass = Collections.synchronizedMap( new HashMap<String,Class>() );
private ReflectionDBObject.JavaWrapper _wrapper = null;
final private Set<String> _createdIndexes = new HashSet<String>();
}
|
src/main/com/mongodb/DBCollection.java
|
// DBCollection.java
/**
* Copyright (C) 2008 10gen Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb;
// Mongo
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.bson.types.ObjectId;
/** This class provides a skeleton implementation of a database collection.
* <p>A typical invocation sequence is thus
* <blockquote><pre>
* Mongo mongo = new Mongo( new DBAddress( "localhost", 127017 ) );
* DB db = mongo.getDB( "mydb" );
* DBCollection collection = db.getCollection( "test" );
* </pre></blockquote>
* @dochub collections
*/
@SuppressWarnings("unchecked")
public abstract class DBCollection {
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @param concern the write concern
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(DBObject[] arr , WriteConcern concern ) throws MongoException {
return insert( arr, concern, getDBEncoderFactory().create() );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @param concern the write concern
* @param encoder the DBEncoder to use
* @return
* @throws MongoException
* @dochub insert
*/
public abstract WriteResult insert(DBObject[] arr , WriteConcern concern, DBEncoder encoder) throws MongoException;
/**
* Inserts a document into the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param o
* @param concern the write concern
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(DBObject o , WriteConcern concern )
throws MongoException {
return insert( new DBObject[]{ o } , concern );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(DBObject ... arr)
throws MongoException {
return insert( arr , getWriteConcern() );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param arr array of documents to save
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(WriteConcern concern, DBObject ... arr)
throws MongoException {
return insert( arr, concern );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param list list of documents to save
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(List<DBObject> list )
throws MongoException {
return insert( list, getWriteConcern() );
}
/**
* Saves document(s) to the database.
* if doc doesn't have an _id, one will be added
* you can get the _id that was added from doc after the insert
*
* @param list list of documents to save
* @param concern the write concern
* @return
* @throws MongoException
* @dochub insert
*/
public WriteResult insert(List<DBObject> list, WriteConcern concern )
throws MongoException {
return insert( list.toArray( new DBObject[list.size()] ) , concern );
}
/**
* Performs an update operation.
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @param upsert if the database should create the element if it does not exist
* @param multi if the update should be applied to all objects matching (db version 1.1.3 and above). An object will
* not be inserted if it does not exist in the collection and upsert=true and multi=true.
* See <a href="http://www.mongodb.org/display/DOCS/Atomic+Operations">http://www.mongodb.org/display/DOCS/Atomic+Operations</a>
* @param concern the write concern
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi , WriteConcern concern ) throws MongoException {
return update( q, o, upsert, multi, concern, getDBEncoderFactory().create() );
}
/**
* Performs an update operation.
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @param upsert if the database should create the element if it does not exist
* @param multi if the update should be applied to all objects matching (db version 1.1.3 and above). An object will
* not be inserted if it does not exist in the collection and upsert=true and multi=true.
* See <a href="http://www.mongodb.org/display/DOCS/Atomic+Operations">http://www.mongodb.org/display/DOCS/Atomic+Operations</a>
* @param concern the write concern
* @param encoder the DBEncoder to use
* @return
* @throws MongoException
* @dochub update
*/
public abstract WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi , WriteConcern concern, DBEncoder encoder ) throws MongoException ;
/**
* calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean, com.mongodb.WriteConcern)} with default WriteConcern.
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @param upsert if the database should create the element if it does not exist
* @param multi if the update should be applied to all objects matching (db version 1.1.3 and above)
* See http://www.mongodb.org/display/DOCS/Atomic+Operations
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult update( DBObject q , DBObject o , boolean upsert , boolean multi )
throws MongoException {
return update( q , o , upsert , multi , getWriteConcern() );
}
/**
* calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean)} with upsert=false and multi=false
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult update( DBObject q , DBObject o ) throws MongoException {
return update( q , o , false , false );
}
/**
* calls {@link DBCollection#update(com.mongodb.DBObject, com.mongodb.DBObject, boolean, boolean)} with upsert=false and multi=true
* @param q search query for old object to update
* @param o object with which to update <tt>q</tt>
* @return
* @throws MongoException
* @dochub update
*/
public WriteResult updateMulti( DBObject q , DBObject o ) throws MongoException {
return update( q , o , false , true );
}
/**
* Adds any necessary fields to a given object before saving it to the collection.
* @param o object to which to add the fields
*/
protected abstract void doapply( DBObject o );
/**
* Removes objects from the database collection.
* @param o the object that documents to be removed must match
* @param concern WriteConcern for this operation
* @return
* @throws MongoException
* @dochub remove
*/
public WriteResult remove( DBObject o , WriteConcern concern ) throws MongoException {
return remove( o, concern, getDBEncoderFactory().create() );
}
/**
* Removes objects from the database collection.
* @param o the object that documents to be removed must match
* @param concern WriteConcern for this operation
* @param encoder the DBEncoder to use
* @return
* @throws MongoException
* @dochub remove
*/
public abstract WriteResult remove( DBObject o , WriteConcern concern, DBEncoder encoder ) throws MongoException ;
/**
* calls {@link DBCollection#remove(com.mongodb.DBObject, com.mongodb.WriteConcern)} with the default WriteConcern
* @param o the object that documents to be removed must match
* @return
* @throws MongoException
* @dochub remove
*/
public WriteResult remove( DBObject o )
throws MongoException {
return remove( o , getWriteConcern() );
}
/**
* Finds objects
*/
abstract Iterator<DBObject> __find( DBObject ref , DBObject fields , int numToSkip , int batchSize , int limit, int options, ReadPreference readPref, DBDecoder decoder ) throws MongoException ;
/**
* Calls {@link DBCollection#find(com.mongodb.DBObject, com.mongodb.DBObject, int, int)} and applies the query options
* @param query query used to search
* @param fields the fields of matching objects to return
* @param numToSkip number of objects to skip
* @param batchSize the batch size. This option has a complex behavior, see {@link DBCursor#batchSize(int) }
* @param options - see Bytes QUERYOPTION_*
* @return the cursor
* @throws MongoException
* @dochub find
*/
@Deprecated
public final DBCursor find( DBObject query , DBObject fields , int numToSkip , int batchSize , int options ) throws MongoException{
return find(query, fields, numToSkip, batchSize).addOption(options);
}
/**
* Finds objects from the database that match a query.
* A DBCursor object is returned, that can be iterated to go through the results.
*
* @param query query used to search
* @param fields the fields of matching objects to return
* @param numToSkip number of objects to skip
* @param batchSize the batch size. This option has a complex behavior, see {@link DBCursor#batchSize(int) }
* @return the cursor
* @throws MongoException
* @dochub find
*/
@Deprecated
public final DBCursor find( DBObject query , DBObject fields , int numToSkip , int batchSize ) {
DBCursor cursor = find(query, fields).skip(numToSkip).batchSize(batchSize);
return cursor;
}
// ------
/**
* Finds an object by its id.
* This compares the passed in value to the _id field of the document
*
* @param obj any valid object
* @return the object, if found, otherwise <code>null</code>
* @throws MongoException
*/
public final DBObject findOne( Object obj )
throws MongoException {
return findOne(obj, null);
}
/**
* Finds an object by its id.
* This compares the passed in value to the _id field of the document
*
* @param obj any valid object
* @param fields fields to return
* @return the object, if found, otherwise <code>null</code>
* @dochub find
*/
public final DBObject findOne( Object obj, DBObject fields ) {
Iterator<DBObject> iterator = __find( new BasicDBObject("_id", obj), fields, 0, -1, 0, getOptions(), getReadPreference(), getDecoder() );
return (iterator != null ? iterator.next() : null);
}
/**
* Finds the first document in the query and updates it.
* @param query query to match
* @param fields fields to be returned
* @param sort sort to apply before picking first document
* @param remove if true, document found will be removed
* @param update update to apply
* @param returnNew if true, the updated document is returned, otherwise the old document is returned (or it would be lost forever)
* @param upsert do upsert (insert if document not present)
* @return the document
*/
public DBObject findAndModify(DBObject query, DBObject fields, DBObject sort, boolean remove, DBObject update, boolean returnNew, boolean upsert) {
BasicDBObject cmd = new BasicDBObject( "findandmodify", _name);
if (query != null && !query.keySet().isEmpty())
cmd.append( "query", query );
if (fields != null && !fields.keySet().isEmpty())
cmd.append( "fields", fields );
if (sort != null && !sort.keySet().isEmpty())
cmd.append( "sort", sort );
if (remove)
cmd.append( "remove", remove );
else {
if (update != null && !update.keySet().isEmpty()) {
// if 1st key doesnt start with $, then object will be inserted as is, need to check it
String key = update.keySet().iterator().next();
if (key.charAt(0) != '$')
_checkObject(update, false, false);
cmd.append( "update", update );
}
if (returnNew)
cmd.append( "new", returnNew );
if (upsert)
cmd.append( "upsert", upsert );
}
if (remove && !(update == null || update.keySet().isEmpty() || returnNew))
throw new MongoException("FindAndModify: Remove cannot be mixed with the Update, or returnNew params!");
CommandResult res = this._db.command( cmd );
if (res.ok() || res.getErrorMessage().equals( "No matching object found" ))
return (DBObject) res.get( "value" );
res.throwOnError();
return null;
}
/**
* calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)}
* with fields=null, remove=false, returnNew=false, upsert=false
* @param query
* @param sort
* @param update
* @return the old document
*/
public DBObject findAndModify( DBObject query , DBObject sort , DBObject update){
return findAndModify( query, null, sort, false, update, false, false);
}
/**
* calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)}
* with fields=null, sort=null, remove=false, returnNew=false, upsert=false
* @param query
* @param update
* @return the old document
*/
public DBObject findAndModify( DBObject query , DBObject update ) {
return findAndModify( query, null, null, false, update, false, false );
}
/**
* calls {@link DBCollection#findAndModify(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, boolean, com.mongodb.DBObject, boolean, boolean)}
* with fields=null, sort=null, remove=true, returnNew=false, upsert=false
* @param query
* @return the removed document
*/
public DBObject findAndRemove( DBObject query ) {
return findAndModify( query, null, null, true, null, false, false );
}
// --- START INDEX CODE ---
/**
* calls {@link DBCollection#createIndex(com.mongodb.DBObject, com.mongodb.DBObject)} with default index options
* @param keys an object with a key set of the fields desired for the index
* @throws MongoException
*/
public final void createIndex( final DBObject keys )
throws MongoException {
createIndex( keys , defaultOptions( keys ) );
}
/**
* Forces creation of an index on a set of fields, if one does not already exist.
* @param keys
* @param options
* @throws MongoException
*/
public void createIndex( DBObject keys , DBObject options ) throws MongoException {
createIndex( keys, options, getDBEncoderFactory().create() );
}
/**
* Forces creation of an index on a set of fields, if one does not already exist.
* @param keys
* @param options
* @param encoder the DBEncoder to use
* @throws MongoException
*/
public abstract void createIndex( DBObject keys , DBObject options, DBEncoder encoder ) throws MongoException;
/**
* Creates an ascending index on a field with default options, if one does not already exist.
* @param name name of field to index on
*/
public final void ensureIndex( final String name ){
ensureIndex( new BasicDBObject( name , 1 ) );
}
/**
* calls {@link DBCollection#ensureIndex(com.mongodb.DBObject, com.mongodb.DBObject)} with default options
* @param keys an object with a key set of the fields desired for the index
* @throws MongoException
*/
public final void ensureIndex( final DBObject keys )
throws MongoException {
ensureIndex( keys , defaultOptions( keys ) );
}
/**
* calls {@link DBCollection#ensureIndex(com.mongodb.DBObject, java.lang.String, boolean)} with unique=false
* @param keys fields to use for index
* @param name an identifier for the index
* @throws MongoException
* @dochub indexes
*/
public void ensureIndex( DBObject keys , String name )
throws MongoException {
ensureIndex( keys , name , false );
}
/**
* Ensures an index on this collection (that is, the index will be created if it does not exist).
* @param keys fields to use for index
* @param name an identifier for the index. If null or empty, the default name will be used.
* @param unique if the index should be unique
* @throws MongoException
*/
public void ensureIndex( DBObject keys , String name , boolean unique )
throws MongoException {
DBObject options = defaultOptions( keys );
if (name != null && name.length()>0)
options.put( "name" , name );
if ( unique )
options.put( "unique" , Boolean.TRUE );
ensureIndex( keys , options );
}
/**
* Creates an index on a set of fields, if one does not already exist.
* @param keys an object with a key set of the fields desired for the index
* @param optionsIN options for the index (name, unique, etc)
* @throws MongoException
*/
public final void ensureIndex( final DBObject keys , final DBObject optionsIN )
throws MongoException {
if ( checkReadOnly( false ) ) return;
final DBObject options = defaultOptions( keys );
for ( String k : optionsIN.keySet() )
options.put( k , optionsIN.get( k ) );
final String name = options.get( "name" ).toString();
if ( _createdIndexes.contains( name ) )
return;
createIndex( keys , options );
_createdIndexes.add( name );
}
/**
* Clears all indices that have not yet been applied to this collection.
*/
public void resetIndexCache(){
_createdIndexes.clear();
}
DBObject defaultOptions( DBObject keys ){
DBObject o = new BasicDBObject();
o.put( "name" , genIndexName( keys ) );
o.put( "ns" , _fullName );
return o;
}
/**
* Convenience method to generate an index name from the set of fields it is over.
* @param keys the names of the fields used in this index
* @return a string representation of this index's fields
*/
public static String genIndexName( DBObject keys ){
StringBuilder name = new StringBuilder();
for ( String s : keys.keySet() ){
if ( name.length() > 0 )
name.append( '_' );
name.append( s ).append( '_' );
Object val = keys.get( s );
if ( val instanceof Number || val instanceof String )
name.append( val.toString().replace( ' ', '_' ) );
}
return name.toString();
}
// --- END INDEX CODE ---
/**
* Set hint fields for this collection (to optimize queries).
* @param lst a list of <code>DBObject</code>s to be used as hints
*/
public void setHintFields( List<DBObject> lst ){
_hintFields = lst;
}
/**
* Queries for an object in this collection.
* @param ref object for which to search
* @return an iterator over the results
* @dochub find
*/
public final DBCursor find( DBObject ref ){
return new DBCursor( this, ref, null, getReadPreference());
}
/**
* Queries for an object in this collection.
*
* <p>
* An empty DBObject will match every document in the collection.
* Regardless of fields specified, the _id fields are always returned.
* </p>
* <p>
* An example that returns the "x" and "_id" fields for every document
* in the collection that has an "x" field:
* </p>
* <blockquote><pre>
* BasicDBObject keys = new BasicDBObject();
* keys.put("x", 1);
*
* DBCursor cursor = collection.find(new BasicDBObject(), keys);
* </pre></blockquote>
*
* @param ref object for which to search
* @param keys fields to return
* @return a cursor to iterate over results
* @dochub find
*/
public final DBCursor find( DBObject ref , DBObject keys ){
return new DBCursor( this, ref, keys, getReadPreference());
}
/**
* Queries for all objects in this collection.
* @return a cursor which will iterate over every object
* @dochub find
*/
public final DBCursor find(){
return new DBCursor( this, null, null, getReadPreference());
}
/**
* Returns a single object from this collection.
* @return the object found, or <code>null</code> if the collection is empty
* @throws MongoException
*/
public final DBObject findOne()
throws MongoException {
return findOne( new BasicDBObject() );
}
/**
* Returns a single object from this collection matching the query.
* @param o the query object
* @return the object found, or <code>null</code> if no such object exists
* @throws MongoException
*/
public final DBObject findOne( DBObject o )
throws MongoException {
return findOne( o, null, getReadPreference());
}
/**
* Returns a single object from this collection matching the query.
* @param o the query object
* @param fields fields to return
* @return the object found, or <code>null</code> if no such object exists
* @dochub find
*/
public final DBObject findOne( DBObject o, DBObject fields ) {
return findOne( o, fields, getReadPreference());
}
/**
* Returns a single object from this collection matching the query.
* @param o the query object
* @param fields fields to return
* @return the object found, or <code>null</code> if no such object exists
* @dochub find
*/
public final DBObject findOne( DBObject o, DBObject fields, ReadPreference readPref ) {
Iterator<DBObject> i = __find( o , fields , 0 , -1 , 0, getOptions(), readPref, getDecoder() );
DBObject obj = (i == null ? null : i.next());
if ( obj != null && ( fields != null && fields.keySet().size() > 0 ) ){
obj.markAsPartialObject();
}
return obj;
}
// Only create a new decoder if there is a decoder factory explicitly set on the collection. Otherwise return null
// so that DBPort will use a cached decoder from the default factory.
private DBDecoder getDecoder() {
return _decoderFactory != null ? _decoderFactory.create() : null;
}
/**
* calls {@link DBCollection#apply(com.mongodb.DBObject, boolean)} with ensureID=true
* @param o <code>DBObject</code> to which to add fields
* @return the modified parameter object
*/
public final Object apply( DBObject o ){
return apply( o , true );
}
/**
* calls {@link DBCollection#doapply(com.mongodb.DBObject)}, optionally adding an automatic _id field
* @param jo object to add fields to
* @param ensureID whether to add an <code>_id</code> field
* @return the modified object <code>o</code>
*/
public final Object apply( DBObject jo , boolean ensureID ){
Object id = jo.get( "_id" );
if ( ensureID && id == null ){
id = ObjectId.get();
jo.put( "_id" , id );
}
doapply( jo );
return id;
}
/**
* calls {@link DBCollection#save(com.mongodb.DBObject, com.mongodb.WriteConcern)} with default WriteConcern
* @param jo the <code>DBObject</code> to save
* will add <code>_id</code> field to jo if needed
* @return
*/
public final WriteResult save( DBObject jo ) {
return save(jo, getWriteConcern());
}
/**
* Saves an object to this collection (does insert or update based on the object _id).
* @param jo the <code>DBObject</code> to save
* @param concern the write concern
* @return
* @throws MongoException
*/
public final WriteResult save( DBObject jo, WriteConcern concern )
throws MongoException {
if ( checkReadOnly( true ) )
return null;
_checkObject( jo , false , false );
Object id = jo.get( "_id" );
if ( id == null || ( id instanceof ObjectId && ((ObjectId)id).isNew() ) ){
if ( id != null && id instanceof ObjectId )
((ObjectId)id).notNew();
if ( concern == null )
return insert( jo );
else
return insert( jo, concern );
}
DBObject q = new BasicDBObject();
q.put( "_id" , id );
if ( concern == null )
return update( q , jo , true , false );
else
return update( q , jo , true , false , concern );
}
// ---- DB COMMANDS ----
/**
* Drops all indices from this collection
* @throws MongoException
*/
public void dropIndexes()
throws MongoException {
dropIndexes( "*" );
}
/**
* Drops an index from this collection
* @param name the index name
* @throws MongoException
*/
public void dropIndexes( String name )
throws MongoException {
DBObject cmd = BasicDBObjectBuilder.start()
.add( "deleteIndexes" , getName() )
.add( "index" , name )
.get();
resetIndexCache();
CommandResult res = _db.command( cmd );
if (res.ok() || res.getErrorMessage().equals( "ns not found" ))
return;
res.throwOnError();
}
/**
* Drops (deletes) this collection. Use with care.
* @throws MongoException
*/
public void drop()
throws MongoException {
resetIndexCache();
CommandResult res =_db.command( BasicDBObjectBuilder.start().add( "drop" , getName() ).get() );
if (res.ok() || res.getErrorMessage().equals( "ns not found" ))
return;
res.throwOnError();
}
/**
* returns the number of documents in this collection.
* @return
* @throws MongoException
*/
public long count()
throws MongoException {
return getCount(new BasicDBObject(), null);
}
/**
* returns the number of documents that match a query.
* @param query query to match
* @return
* @throws MongoException
*/
public long count(DBObject query)
throws MongoException {
return getCount(query, null);
}
/**
* calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject)} with an empty query and null fields.
* @return number of documents that match query
* @throws MongoException
*/
public long getCount()
throws MongoException {
return getCount(new BasicDBObject(), null);
}
/**
* calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject)} with null fields.
* @param query query to match
* @return
* @throws MongoException
*/
public long getCount(DBObject query)
throws MongoException {
return getCount(query, null);
}
/**
* calls {@link DBCollection#getCount(com.mongodb.DBObject, com.mongodb.DBObject, long, long)} with limit=0 and skip=0
* @param query query to match
* @param fields fields to return
* @return
* @throws MongoException
*/
public long getCount(DBObject query, DBObject fields)
throws MongoException {
return getCount( query , fields , 0 , 0 );
}
/**
* Returns the number of documents in the collection
* that match the specified query
*
* @param query query to select documents to count
* @param fields fields to return
* @param limit limit the count to this value
* @param skip number of entries to skip
* @return number of documents that match query and fields
* @throws MongoException
*/
public long getCount(DBObject query, DBObject fields, long limit, long skip )
throws MongoException {
BasicDBObject cmd = new BasicDBObject();
cmd.put("count", getName());
cmd.put("query", query);
if (fields != null) {
cmd.put("fields", fields);
}
if ( limit > 0 )
cmd.put( "limit" , limit );
if ( skip > 0 )
cmd.put( "skip" , skip );
CommandResult res = _db.command(cmd,getOptions());
if ( ! res.ok() ){
String errmsg = res.getErrorMessage();
if ( errmsg.equals("ns does not exist") ||
errmsg.equals("ns missing" ) ){
// for now, return 0 - lets pretend it does exist
return 0;
}
res.throwOnError();
}
return res.getLong("n");
}
/**
* Calls {@link DBCollection#rename(java.lang.String, boolean)} with dropTarget=false
* @param newName new collection name (not a full namespace)
* @return the new collection
* @throws MongoException
*/
public DBCollection rename( String newName )
throws MongoException {
return rename(newName, false);
}
/**
* renames of this collection to newName
* @param newName new collection name (not a full namespace)
* @param dropTarget if a collection with the new name exists, whether or not to drop it
* @return the new collection
* @throws MongoException
*/
public DBCollection rename( String newName, boolean dropTarget )
throws MongoException {
CommandResult ret =
_db.getSisterDB( "admin" )
.command( BasicDBObjectBuilder.start()
.add( "renameCollection" , _fullName )
.add( "to" , _db._name + "." + newName )
.add( "dropTarget" , dropTarget )
.get() );
ret.throwOnError();
resetIndexCache();
return _db.getCollection( newName );
}
/**
* calls {@link DBCollection#group(com.mongodb.DBObject, com.mongodb.DBObject, com.mongodb.DBObject, java.lang.String, java.lang.String)} with finalize=null
* @param key - { a : true }
* @param cond - optional condition on query
* @param reduce javascript reduce function
* @param initial initial value for first match on a key
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
public DBObject group( DBObject key , DBObject cond , DBObject initial , String reduce )
throws MongoException {
return group( key , cond , initial , reduce , null );
}
/**
* Applies a group operation
* @param key - { a : true }
* @param cond - optional condition on query
* @param reduce javascript reduce function
* @param initial initial value for first match on a key
* @param finalize An optional function that can operate on the result(s) of the reduce function.
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
public DBObject group( DBObject key , DBObject cond , DBObject initial , String reduce , String finalize )
throws MongoException {
GroupCommand cmd = new GroupCommand(this, key, cond, initial, reduce, finalize);
return group( cmd );
}
/**
* Applies a group operation
* @param cmd the group command
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
public DBObject group( GroupCommand cmd ) {
CommandResult res = _db.command( cmd.toDBObject(), getOptions() );
res.throwOnError();
return (DBObject)res.get( "retval" );
}
/**
* @deprecated prefer the {@link DBCollection#group(com.mongodb.GroupCommand)} which is more standard
* Applies a group operation
* @param args object representing the arguments to the group function
* @return
* @throws MongoException
* @see <a href="http://www.mongodb.org/display/DOCS/Aggregation">http://www.mongodb.org/display/DOCS/Aggregation</a>
*/
@Deprecated
public DBObject group( DBObject args )
throws MongoException {
args.put( "ns" , getName() );
CommandResult res = _db.command( new BasicDBObject( "group" , args ), getOptions() );
res.throwOnError();
return (DBObject)res.get( "retval" );
}
/**
* find distinct values for a key
* @param key
* @return
*/
public List distinct( String key ){
return distinct( key , new BasicDBObject() );
}
/**
* find distinct values for a key
* @param key
* @param query query to match
* @return
*/
public List distinct( String key , DBObject query ){
DBObject c = BasicDBObjectBuilder.start()
.add( "distinct" , getName() )
.add( "key" , key )
.add( "query" , query )
.get();
CommandResult res = _db.command( c, getOptions() );
res.throwOnError();
return (List)(res.get( "values" ));
}
/**
* performs a map reduce operation
* Runs the command in REPLACE output mode (saves to named collection)
*
* @param map
* map function in javascript code
* @param outputTarget
* optional - leave null if want to use temp collection
* @param reduce
* reduce function in javascript code
* @param query
* to match
* @return
* @throws MongoException
* @dochub mapreduce
*/
public MapReduceOutput mapReduce( String map , String reduce , String outputTarget , DBObject query ) throws MongoException{
return mapReduce( new MapReduceCommand( this , map , reduce , outputTarget , MapReduceCommand.OutputType.REPLACE, query ) );
}
/**
* performs a map reduce operation
* Specify an outputType to control job execution
* * INLINE - Return results inline
* * REPLACE - Replace the output collection with the job output
* * MERGE - Merge the job output with the existing contents of outputTarget
* * REDUCE - Reduce the job output with the existing contents of
* outputTarget
*
* @param map
* map function in javascript code
* @param outputTarget
* optional - leave null if want to use temp collection
* @param outputType
* set the type of job output
* @param reduce
* reduce function in javascript code
* @param query
* to match
* @return
* @throws MongoException
* @dochub mapreduce
*/
public MapReduceOutput mapReduce( String map , String reduce , String outputTarget , MapReduceCommand.OutputType outputType , DBObject query )
throws MongoException{
return mapReduce( new MapReduceCommand( this , map , reduce , outputTarget , outputType , query ) );
}
/**
* performs a map reduce operation
*
* @param command
* object representing the parameters
* @return
* @throws MongoException
*/
public MapReduceOutput mapReduce( MapReduceCommand command ) throws MongoException{
DBObject cmd = command.toDBObject();
// if type in inline, then query options like slaveOk is fine
CommandResult res = null;
if (command.getOutputType() == MapReduceCommand.OutputType.INLINE)
res = _db.command( cmd, getOptions(), command.getReadPreference() != null ? command.getReadPreference() : getReadPreference() );
else
res = _db.command( cmd );
res.throwOnError();
return new MapReduceOutput( this , cmd, res );
}
/**
* performs a map reduce operation
*
* @param command
* object representing the parameters
* @return
* @throws MongoException
*/
public MapReduceOutput mapReduce( DBObject command ) throws MongoException{
if ( command.get( "mapreduce" ) == null && command.get( "mapReduce" ) == null )
throw new IllegalArgumentException( "need mapreduce arg" );
CommandResult res = _db.command( command );
res.throwOnError();
return new MapReduceOutput( this , command, res );
}
/**
* Return a list of the indexes for this collection. Each object
* in the list is the "info document" from MongoDB
*
* @return list of index documents
*/
public List<DBObject> getIndexInfo() {
BasicDBObject cmd = new BasicDBObject();
cmd.put("ns", getFullName());
DBCursor cur = _db.getCollection("system.indexes").find(cmd);
List<DBObject> list = new ArrayList<DBObject>();
while(cur.hasNext()) {
list.add(cur.next());
}
return list;
}
/**
* Drops an index from this collection
* @param keys keys of the index
* @throws MongoException
*/
public void dropIndex( DBObject keys )
throws MongoException {
dropIndexes( genIndexName( keys ) );
}
/**
* Drops an index from this collection
* @param name name of index to drop
* @throws MongoException
*/
public void dropIndex( String name )
throws MongoException {
dropIndexes( name );
}
/**
* gets the collections statistics ("collstats" command)
* @return
*/
public CommandResult getStats() {
return getDB().command(new BasicDBObject("collstats", getName()), getOptions());
}
/**
* returns whether or not this is a capped collection
* @return
*/
public boolean isCapped() {
CommandResult stats = getStats();
Object capped = stats.get("capped");
return(capped != null && (Integer)capped == 1);
}
// ------
/**
* Initializes a new collection. No operation is actually performed on the database.
* @param base database in which to create the collection
* @param name the name of the collection
*/
protected DBCollection( DB base , String name ){
_db = base;
_name = name;
_fullName = _db.getName() + "." + name;
_options = new Bytes.OptionHolder( _db._options );
_encoderFactory = _db.getMongo().getMongoOptions().dbEncoderFactory;
}
protected DBObject _checkObject( DBObject o , boolean canBeNull , boolean query ){
if ( o == null ){
if ( canBeNull )
return null;
throw new IllegalArgumentException( "can't be null" );
}
if ( o.isPartialObject() && ! query )
throw new IllegalArgumentException( "can't save partial objects" );
if ( ! query ){
_checkKeys(o);
}
return o;
}
/**
* Checks key strings for invalid characters.
*/
private void _checkKeys( DBObject o ) {
for ( String s : o.keySet() ){
validateKey ( s );
Object inner = o.get( s );
if ( inner instanceof DBObject ) {
_checkKeys( (DBObject)inner );
} else if ( inner instanceof Map ) {
_checkKeys( (Map<String, Object>)inner );
}
}
}
/**
* Checks key strings for invalid characters.
*/
private void _checkKeys( Map<String, Object> o ) {
for ( String s : o.keySet() ){
validateKey ( s );
Object inner = o.get( s );
if ( inner instanceof DBObject ) {
_checkKeys( (DBObject)inner );
} else if ( inner instanceof Map ) {
_checkKeys( (Map<String, Object>)inner );
}
}
}
/**
* Check for invalid key names
* @param s the string field/key to check
* @exception IllegalArgumentException if the key is not valid.
*/
private void validateKey(String s ) {
if ( s.contains( "." ) )
throw new IllegalArgumentException( "fields stored in the db can't have . in them. (Bad Key: '" + s + "')" );
if ( s.startsWith( "$" ) )
throw new IllegalArgumentException( "fields stored in the db can't start with '$' (Bad Key: '" + s + "')" );
}
/**
* Finds a collection that is prefixed with this collection's name.
* A typical use of this might be
* <blockquote><pre>
* DBCollection users = mongo.getCollection( "wiki" ).getCollection( "users" );
* </pre></blockquote>
* Which is equivalent to
* <pre><blockquote>
* DBCollection users = mongo.getCollection( "wiki.users" );
* </pre></blockquote>
* @param n the name of the collection to find
* @return the matching collection
*/
public DBCollection getCollection( String n ){
return _db.getCollection( _name + "." + n );
}
/**
* Returns the name of this collection.
* @return the name of this collection
*/
public String getName(){
return _name;
}
/**
* Returns the full name of this collection, with the database name as a prefix.
* @return the name of this collection
*/
public String getFullName(){
return _fullName;
}
/**
* Returns the database this collection is a member of.
* @return this collection's database
*/
public DB getDB(){
return _db;
}
/**
* Returns if this collection's database is read-only
* @param strict if an exception should be thrown if the database is read-only
* @return if this collection's database is read-only
* @throws RuntimeException if the database is read-only and <code>strict</code> is set
*/
protected boolean checkReadOnly( boolean strict ){
if ( ! _db._readOnly )
return false;
if ( ! strict )
return true;
throw new IllegalStateException( "db is read only" );
}
@Override
public int hashCode(){
return _fullName.hashCode();
}
@Override
public boolean equals( Object o ){
return o == this;
}
@Override
public String toString(){
return _name;
}
/**
* Sets a default class for objects in this collection; null resets the class to nothing.
* @param c the class
* @throws IllegalArgumentException if <code>c</code> is not a DBObject
*/
public void setObjectClass( Class c ){
if ( c == null ){
// reset
_wrapper = null;
_objectClass = null;
return;
}
if ( ! DBObject.class.isAssignableFrom( c ) )
throw new IllegalArgumentException( c.getName() + " is not a DBObject" );
_objectClass = c;
if ( ReflectionDBObject.class.isAssignableFrom( c ) )
_wrapper = ReflectionDBObject.getWrapper( c );
else
_wrapper = null;
}
/**
* Gets the default class for objects in the collection
* @return the class
*/
public Class getObjectClass(){
return _objectClass;
}
/**
* sets the internal class
* @param path
* @param c
*/
public void setInternalClass( String path , Class c ){
_internalClass.put( path , c );
}
/**
* gets the internal class
* @param path
* @return
*/
protected Class getInternalClass( String path ){
Class c = _internalClass.get( path );
if ( c != null )
return c;
if ( _wrapper == null )
return null;
return _wrapper.getInternalClass( path );
}
/**
* Set the write concern for this collection. Will be used for
* writes to this collection. Overrides any setting of write
* concern at the DB level. See the documentation for
* {@link WriteConcern} for more information.
*
* @param concern write concern to use
*/
public void setWriteConcern( WriteConcern concern ){
_concern = concern;
}
/**
* Get the write concern for this collection.
* @return
*/
public WriteConcern getWriteConcern(){
if ( _concern != null )
return _concern;
return _db.getWriteConcern();
}
/**
* Sets the read preference for this collection. Will be used as default
* for reads from this collection; overrides DB & Connection level settings.
* See the * documentation for {@link ReadPreference} for more information.
*
* @param preference Read Preference to use
*/
public void setReadPreference( ReadPreference preference ){
_readPref = preference;
}
/**
* Gets the read preference
* @return
*/
public ReadPreference getReadPreference(){
if ( _readPref != null )
return _readPref;
return _db.getReadPreference();
}
/**
* makes this query ok to run on a slave node
*
* @deprecated Replaced with ReadPreference.SECONDARY
* @see com.mongodb.ReadPreference.SECONDARY
*/
@Deprecated
public void slaveOk(){
addOption( Bytes.QUERYOPTION_SLAVEOK );
}
/**
* adds a default query option
* @param option
*/
public void addOption( int option ){
_options.add( option );
}
/**
* sets the default query options
* @param options
*/
public void setOptions( int options ){
_options.set( options );
}
/**
* resets the default query options
*/
public void resetOptions(){
_options.reset();
}
/**
* gets the default query options
* @return
*/
public int getOptions(){
return _options.get();
}
/**
* Set a customer decoder factory for this collection. Set to null to use the default from MongoOptions.
* @param fact the factory to set.
*/
public void setDBDecoderFactory(DBDecoderFactory fact) {
_decoderFactory = fact;
}
/**
* Get the decoder factory for this collection. A null return value means that the default from MongoOptions
* is being used.
* @return the factory
*/
public DBDecoderFactory getDBDecoderFactory() {
return _decoderFactory;
}
/**
* Set a customer encoder factory for this collection. Set to null to use the default from MongoOptions.
* @param fact the factory to set.
*/
public void setDBEncoderFactory(DBEncoderFactory fact) {
if (fact == null)
_encoderFactory = _db.getMongo().getMongoOptions().dbEncoderFactory;
else
_encoderFactory = fact;
}
/**
* Get the encoder factory for this collection. A null return value means that the default from MongoOptions
* is being used.
* @return the factory
*/
public DBEncoderFactory getDBEncoderFactory() {
return _encoderFactory;
}
final DB _db;
final protected String _name;
final protected String _fullName;
protected List<DBObject> _hintFields;
private WriteConcern _concern = null;
private ReadPreference _readPref = null;
private DBDecoderFactory _decoderFactory;
private DBEncoderFactory _encoderFactory;
final Bytes.OptionHolder _options;
protected Class _objectClass = null;
private Map<String,Class> _internalClass = Collections.synchronizedMap( new HashMap<String,Class>() );
private ReflectionDBObject.JavaWrapper _wrapper = null;
final private Set<String> _createdIndexes = new HashSet<String>();
}
|
JAVA-505: Made encoder creation work just like decoder creation
|
src/main/com/mongodb/DBCollection.java
|
JAVA-505: Made encoder creation work just like decoder creation
|
|
Java
|
apache-2.0
|
fd8f3c280a80139945337e84fa788430c1b3d116
| 0
|
eg-zhang/h2o-2,eg-zhang/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o,111t8e/h2o-2,100star/h2o,elkingtonmcb/h2o-2,100star/h2o,vbelakov/h2o,rowhit/h2o-2,rowhit/h2o-2,h2oai/h2o-2,rowhit/h2o-2,rowhit/h2o-2,rowhit/h2o-2,rowhit/h2o-2,h2oai/h2o,vbelakov/h2o,h2oai/h2o-2,calvingit21/h2o-2,calvingit21/h2o-2,111t8e/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,111t8e/h2o-2,calvingit21/h2o-2,calvingit21/h2o-2,h2oai/h2o,elkingtonmcb/h2o-2,elkingtonmcb/h2o-2,100star/h2o,h2oai/h2o,h2oai/h2o,vbelakov/h2o,eg-zhang/h2o-2,rowhit/h2o-2,calvingit21/h2o-2,vbelakov/h2o,100star/h2o,eg-zhang/h2o-2,h2oai/h2o,vbelakov/h2o,h2oai/h2o,h2oai/h2o-2,calvingit21/h2o-2,elkingtonmcb/h2o-2,calvingit21/h2o-2,eg-zhang/h2o-2,h2oai/h2o,h2oai/h2o-2,vbelakov/h2o,111t8e/h2o-2,eg-zhang/h2o-2,vbelakov/h2o,rowhit/h2o-2,eg-zhang/h2o-2,h2oai/h2o,eg-zhang/h2o-2,111t8e/h2o-2,rowhit/h2o-2,100star/h2o,h2oai/h2o-2,100star/h2o,elkingtonmcb/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,elkingtonmcb/h2o-2,rowhit/h2o-2,h2oai/h2o-2,vbelakov/h2o,100star/h2o,elkingtonmcb/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,111t8e/h2o-2,111t8e/h2o-2,vbelakov/h2o,h2oai/h2o,calvingit21/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,vbelakov/h2o,100star/h2o,h2oai/h2o-2,eg-zhang/h2o-2,111t8e/h2o-2,111t8e/h2o-2,100star/h2o,eg-zhang/h2o-2
|
package water.api;
import water.AbstractBuildVersion;
import water.H2O;
/**
* Summary page referencing all tutorials.
*
* @author michal
*/
public class Tutorials extends HTMLOnlyRequest {
@Override protected String build(Response response) {
AbstractBuildVersion abv = H2O.getBuildVersion();
String branchName = abv.branchName();
String buildNumber = abv.buildNumber();
String documentationUrl = "http://s3.amazonaws.com/h2o-release/h2o/" + branchName + "/" + buildNumber + "/docs-website";
String RPackageDocumentationUrl = documentationUrl + "/Ruser/top.html";
return "<div class='container'><div class='hero-unit' style='overflow: hidden'>"
+ "<style scoped='scoped'>"
+ " h2 { font-size:18px; }"
+ " p { font-size:16px; }"
+ "</style>"
+ "<h1>H<sub>2</sub>O Tutorials</h1>"
+ "<blockquote><small>A unique way to explore H<sub>2</sub>O</small></blockquote>"
+ "</div>"
+ "<div class='row'>"
+ "<div class='span2 col'>"
+ "<h2>Use H<sub>2</sub>O from R</h2>"
+ "<div style='background-color:#006dcc;color:white;background-image:linear-gradient(to bottom,#08c,#04c);text-align:center;font-size:70px;font-weight:bold;height:100px;line-height:100px;border-radius:15px;max-width:110px;margin-bottom:5px'>R</div>"
+ "<p>H<sub>2</sub>O supports both R and R Studio.</p>"
+ "<a href='" + RPackageDocumentationUrl + "' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ " <h2>Random Forest</h2>"
+ "<p>Random Forest is a classical machine learning method for classification and regression. Learn how to use it with H<sub>2</sub>O for better predictions.</it></p>"
+ "<a href='/TutorialRFIris.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ " <h2>GBM</h2>"
+ "<p>GBM uses gradient boosted trees for classification and regression, and is one of the most powerful machine learning methods in H<sub>2</sub>O.</p>"
+ "<a href='/TutorialGBM.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ "<h2>GLM</h2>"
+ "<p>Generalized linear model is a generalization of linear regression. Experience its unique power and blazing speed on top of H<sub>2</sub>O.</p>"
+ "<a href='/TutorialGLMProstate.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ "<h2>K-Means</h2>"
+ "<p>Perform clustering analysis with H<sub>2</sub>O. K-means is a highly scalable clustering algorithm for unsupervised learning on big data.</p>"
+ "<a href='/TutorialKMeans.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ "<h2>Deep Learning</h2>"
+ "<p>H<sub>2</sub>O's distributed Deep Learning gives you the power of deep neural networks for highest accuracy for classification and regression.</p>"
+ "<a href='/TutorialDeepLearning.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "</div>"
+ "</div>";
}
}
|
src/main/java/water/api/Tutorials.java
|
package water.api;
import water.AbstractBuildVersion;
import water.H2O;
/**
* Summary page referencing all tutorials.
*
* @author michal
*/
public class Tutorials extends HTMLOnlyRequest {
@Override protected String build(Response response) {
AbstractBuildVersion abv = H2O.getBuildVersion();
String branchName = abv.branchName();
String buildNumber = abv.buildNumber();
String documentationUrl = "http://s3.amazonaws.com/h2o-release/h2o/" + branchName + "/" + buildNumber + "/docs-website/index.html";
return "<div class='container'><div class='hero-unit' style='overflow: hidden'>"
+ "<style scoped='scoped'>"
+ " h2 { font-size:18px; }"
+ " p { font-size:16px; }"
+ "</style>"
+ "<h1>H<sub>2</sub>O Tutorials</h1>"
+ "<blockquote><small>A unique way to explore H<sub>2</sub>O</small></blockquote>"
+ "</div>"
+ "<div class='row'>"
+ "<div class='span2 col'>"
+ "<h2>Use H<sub>2</sub>O from R</h2>"
+ "<div style='background-color:#006dcc;color:white;background-image:linear-gradient(to bottom,#08c,#04c);text-align:center;font-size:70px;font-weight:bold;height:100px;line-height:100px;border-radius:15px;max-width:110px;margin-bottom:5px'>R</div>"
+ "<p>H<sub>2</sub>O supports both R and R Studio.</p>"
+ "<a href='" + documentationUrl + "#R' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ " <h2>Random Forest</h2>"
+ "<p>Random Forest is a classical machine learning method for classification and regression. Learn how to use it with H<sub>2</sub>O for better predictions.</it></p>"
+ "<a href='/TutorialRFIris.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ " <h2>GBM</h2>"
+ "<p>GBM uses gradient boosted trees for classification and regression, and is one of the most powerful machine learning methods in H<sub>2</sub>O.</p>"
+ "<a href='/TutorialGBM.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ "<h2>GLM</h2>"
+ "<p>Generalized linear model is a generalization of linear regression. Experience its unique power and blazing speed on top of H<sub>2</sub>O.</p>"
+ "<a href='/TutorialGLMProstate.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ "<h2>K-Means</h2>"
+ "<p>Perform clustering analysis with H<sub>2</sub>O. K-means is a highly scalable clustering algorithm for unsupervised learning on big data.</p>"
+ "<a href='/TutorialKMeans.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "<div class='span2 col'>"
+ "<h2>Deep Learning</h2>"
+ "<p>H<sub>2</sub>O's distributed Deep Learning gives you the power of deep neural networks for highest accuracy for classification and regression.</p>"
+ "<a href='/TutorialDeepLearning.html' class='btn btn-primary'>Try it!</a>"
+ "</div>"
+ "</div>"
+ "</div>";
}
}
|
Fix R tutorials tryit link.
|
src/main/java/water/api/Tutorials.java
|
Fix R tutorials tryit link.
|
|
Java
|
apache-2.0
|
bb574c79b5863b4cbce8e80657b14231efed196d
| 0
|
getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern
|
package org.lantern;
import java.io.IOException;
import java.net.MalformedURLException;
import java.security.SecureRandom;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.util.resource.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Launcher and secure path handler for Jetty.
*/
public class JettyLauncher {
private final Logger log = LoggerFactory.getLogger(getClass());
private final SecureRandom sr = new SecureRandom();
private final String secureBase = "/"+String.valueOf(sr.nextLong());
private final int randomPort = LanternUtils.randomPort();
private final String fullBasePath =
"http://localhost:"+randomPort+secureBase;
private Server server = new Server();
public void start() {
final SelectChannelConnector connector = new SelectChannelConnector();
connector.setHost("127.0.0.1");
connector.setPort(randomPort);
server.addConnector(connector);
final ResourceHandler rh = new FileServingResourceHandler();
rh.setDirectoriesListed(false);
rh.setAliases(false);
//rh.setWelcomeFiles(new String[]{ "lanternmap.html" });
rh.setResourceBase("viz/skel");
final HandlerList handlers = new HandlerList();
handlers.setHandlers(
new Handler[] { rh, new DefaultHandler() });
server.setHandler(handlers);
final Thread serve = new Thread(new Runnable() {
@Override
public void run() {
try {
server.start();
server.join();
} catch (final Exception e) {
log.error("Exception on HTTP server");
}
}
}, "HTTP-Server-Thread");
serve.setDaemon(true);
serve.start();
}
private final class FileServingResourceHandler extends ResourceHandler {
@Override public void handle(final String target,
final Request baseRequest, final HttpServletRequest request,
final HttpServletResponse response)
throws IOException, ServletException {
if (!target.startsWith(secureBase)) {
// This can happen quite often, as the pages we serve
// themselves don't know about the secure base. As long as
// they get referred by the secure base, however, we're all
// good.
log.info("Got request without secure base!!");
final String referer = request.getHeader("Referer");
if (referer == null || !referer.startsWith(fullBasePath)) {
log.error("Got request with bad referer: {}", referer);
response.getOutputStream().close();
return;
}
}
super.handle(target, baseRequest, request, response);
}
@Override
public Resource getResource(final String path)
throws MalformedURLException {
if (!path.startsWith(secureBase)) {
log.info("Requesting unstripped: {}", path);
return super.getResource(path);
}
final String stripped =
StringUtils.substringAfter(path, secureBase);
log.info("Requesting stripped: {}", stripped);
return super.getResource(stripped);
}
}
public void openBrowserWhenReady() {
while(!server.isRunning()) {
try {
Thread.sleep(200);
} catch (final InterruptedException e) {
log.info("Interrupted?");
}
}
final String url = fullBasePath + "/lanternmap.html";
LanternUtils.browseUrl(url);
}
public static void main (final String[] args) {
final JettyLauncher jl = LanternHub.jettyLauncher();
System.out.println("Starting!!");
jl.start();
System.out.println("Opening browser!!");
jl.openBrowserWhenReady();
try {
Thread.sleep(200000);
} catch (InterruptedException e) {
}
}
}
|
src/main/java/org/lantern/JettyLauncher.java
|
package org.lantern;
import java.io.IOException;
import java.net.MalformedURLException;
import java.security.SecureRandom;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.server.handler.HandlerList;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.util.resource.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Launcher and secure path handler for Jetty.
*/
public class JettyLauncher {
private final Logger log = LoggerFactory.getLogger(getClass());
private final SecureRandom sr = new SecureRandom();
private final String secureBase = "/"+String.valueOf(sr.nextLong());
private Server server = new Server();
public void start() {
final SelectChannelConnector connector = new SelectChannelConnector();
connector.setPort(8080);
server.addConnector(connector);
final ResourceHandler rh = new FileServingResourceHandler();
rh.setDirectoriesListed(false);
rh.setAliases(false);
//rh.setWelcomeFiles(new String[]{ "lanternmap.html" });
rh.setResourceBase("viz/skel");
final HandlerList handlers = new HandlerList();
handlers.setHandlers(
new Handler[] { rh, new DefaultHandler() });
server.setHandler(handlers);
final Thread serve = new Thread(new Runnable() {
@Override
public void run() {
try {
server.start();
server.join();
} catch (final Exception e) {
log.error("Exception on HTTP server");
}
}
}, "HTTP-Server-Thread");
serve.setDaemon(true);
serve.start();
}
private final class FileServingResourceHandler extends ResourceHandler {
@Override public void handle(final String target,
final Request baseRequest, final HttpServletRequest request,
final HttpServletResponse response)
throws IOException, ServletException {
if (!target.startsWith(secureBase)) {
response.getOutputStream().close();
return;
}
super.handle(target, baseRequest, request, response);
}
@Override
public Resource getResource(final String path)
throws MalformedURLException {
final String stripped =
StringUtils.substringAfter(path, secureBase);
return super.getResource(stripped);
}
}
public void openBrowserWhenReady() {
while(!server.isRunning()) {
try {
Thread.sleep(200);
} catch (final InterruptedException e) {
log.info("Interrupted?");
}
}
final String url = "http://localhost:8080"+secureBase+"/lanternmap.html";
LanternUtils.browseUrl(url);
}
public static void main (final String[] args) {
final JettyLauncher jl = LanternHub.jettyLauncher();
System.out.println("Starting!!");
jl.start();
System.out.println("Opening browser!!");
jl.openBrowserWhenReady();
try {
Thread.sleep(200000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
Lots of tweaks to local server
|
src/main/java/org/lantern/JettyLauncher.java
|
Lots of tweaks to local server
|
|
Java
|
apache-2.0
|
ad72159b0e3ff641e3a08754297c81278b94d94e
| 0
|
qtvbwfn/dubbo,qtvbwfn/dubbo,alibaba/dubbo,fengyie007/dubbo,yuyijq/dubbo,qtvbwfn/dubbo,alibaba/dubbo,aglne/dubbo,bpzhang/dubbo,lovepoem/dubbo,wuwen5/dubbo,qtvbwfn/dubbo,wuwen5/dubbo,aglne/dubbo,yuyijq/dubbo,bpzhang/dubbo,fengyie007/dubbo,fengyie007/dubbo,lovepoem/dubbo,lovepoem/dubbo,wuwen5/dubbo,aglne/dubbo,bpzhang/dubbo
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.configcenter.support.zookeeper;
import org.apache.dubbo.common.utils.CollectionUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.configcenter.ConfigChangeEvent;
import org.apache.dubbo.configcenter.ConfigChangeType;
import org.apache.dubbo.configcenter.ConfigurationListener;
import org.apache.dubbo.remoting.zookeeper.DataListener;
import org.apache.dubbo.remoting.zookeeper.EventType;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.CountDownLatch;
/**
*
*/
public class CacheListener implements DataListener {
private static final int MIN_PATH_DEPTH = 5;
private Map<String, Set<ConfigurationListener>> keyListeners = new ConcurrentHashMap<>();
private CountDownLatch initializedLatch;
private String rootPath;
public CacheListener(String rootPath, CountDownLatch initializedLatch) {
this.rootPath = rootPath;
this.initializedLatch = initializedLatch;
}
public void addListener(String key, ConfigurationListener configurationListener) {
Set<ConfigurationListener> listeners = this.keyListeners.computeIfAbsent(key, k -> new CopyOnWriteArraySet<>());
listeners.add(configurationListener);
}
public void removeListener(String key, ConfigurationListener configurationListener) {
Set<ConfigurationListener> listeners = this.keyListeners.get(key);
if (listeners != null) {
listeners.remove(configurationListener);
}
}
/**
* This is used to convert a configuration nodePath into a key
* TODO doc
*
* @param path
* @return key (nodePath less the config root path)
*/
private String pathToKey(String path) {
if (StringUtils.isEmpty(path)) {
return path;
}
return path.replace(rootPath + "/", "").replaceAll("/", ".");
}
@Override
public void dataChanged(String path, Object value, EventType eventType) {
if (eventType == null) {
return;
}
if (eventType == EventType.INITIALIZED) {
initializedLatch.countDown();
return;
}
if (path == null || (value == null && eventType != EventType.NodeDeleted)) {
return;
}
// TODO We limit the notification of config changes to a specific path level, for example
// /dubbo/config/service/configurators, other config changes not in this level will not get notified,
// say /dubbo/config/dubbo.properties
if (path.split("/").length >= MIN_PATH_DEPTH) {
String key = pathToKey(path);
ConfigChangeType changeType;
switch (eventType) {
case NodeCreated:
changeType = ConfigChangeType.ADDED;
break;
case NodeDeleted:
changeType = ConfigChangeType.DELETED;
break;
case NodeDataChanged:
changeType = ConfigChangeType.MODIFIED;
break;
default:
return;
}
ConfigChangeEvent configChangeEvent = new ConfigChangeEvent(key, (String) value, changeType);
Set<ConfigurationListener> listeners = keyListeners.get(key);
if (CollectionUtils.isNotEmpty(listeners)) {
listeners.forEach(listener -> listener.process(configChangeEvent));
}
}
}
}
|
dubbo-configcenter/dubbo-configcenter-zookeeper/src/main/java/org/apache/dubbo/configcenter/support/zookeeper/CacheListener.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.configcenter.support.zookeeper;
import org.apache.dubbo.common.utils.CollectionUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.configcenter.ConfigChangeEvent;
import org.apache.dubbo.configcenter.ConfigChangeType;
import org.apache.dubbo.configcenter.ConfigurationListener;
import org.apache.dubbo.remoting.zookeeper.DataListener;
import org.apache.dubbo.remoting.zookeeper.EventType;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.CountDownLatch;
/**
*
*/
public class CacheListener implements DataListener {
private Map<String, Set<ConfigurationListener>> keyListeners = new ConcurrentHashMap<>();
private CountDownLatch initializedLatch;
private String rootPath;
public CacheListener(String rootPath, CountDownLatch initializedLatch) {
this.rootPath = rootPath;
this.initializedLatch = initializedLatch;
}
public void addListener(String key, ConfigurationListener configurationListener) {
Set<ConfigurationListener> listeners = this.keyListeners.computeIfAbsent(key, k -> new CopyOnWriteArraySet<>());
listeners.add(configurationListener);
}
public void removeListener(String key, ConfigurationListener configurationListener) {
Set<ConfigurationListener> listeners = this.keyListeners.get(key);
if (listeners != null) {
listeners.remove(configurationListener);
}
}
/**
* This is used to convert a configuration nodePath into a key
* TODO doc
*
* @param path
* @return key (nodePath less the config root path)
*/
private String pathToKey(String path) {
if (StringUtils.isEmpty(path)) {
return path;
}
return path.replace(rootPath + "/", "").replaceAll("/", ".");
}
@Override
public void dataChanged(String path, Object value, EventType eventType) {
if (eventType == null) {
return;
}
if (eventType == EventType.INITIALIZED) {
initializedLatch.countDown();
return;
}
if (path == null || (value == null && eventType != EventType.NodeDeleted)) {
return;
}
// TODO We limit the notification of config changes to a specific path level, for example
// /dubbo/config/service/configurators, other config changes not in this level will not get notified,
// say /dubbo/config/dubbo.properties
if (path.split("/").length >= 5) {
String key = pathToKey(path);
ConfigChangeType changeType;
switch (eventType) {
case NodeCreated:
changeType = ConfigChangeType.ADDED;
break;
case NodeDeleted:
changeType = ConfigChangeType.DELETED;
break;
case NodeDataChanged:
changeType = ConfigChangeType.MODIFIED;
break;
default:
return;
}
ConfigChangeEvent configChangeEvent = new ConfigChangeEvent(key, (String) value, changeType);
Set<ConfigurationListener> listeners = keyListeners.get(key);
if (CollectionUtils.isNotEmpty(listeners)) {
listeners.forEach(listener -> listener.process(configChangeEvent));
}
}
}
}
|
Fixes #3625 (#3730)
use constant to replace magic number
|
dubbo-configcenter/dubbo-configcenter-zookeeper/src/main/java/org/apache/dubbo/configcenter/support/zookeeper/CacheListener.java
|
Fixes #3625 (#3730)
|
|
Java
|
apache-2.0
|
1c998eed73a1312b2e4178d42ee99c1b62fd1fbe
| 0
|
fazlan-nazeem/carbon-apimgt,malinthaprasan/carbon-apimgt,uvindra/carbon-apimgt,wso2/carbon-apimgt,jaadds/carbon-apimgt,ruks/carbon-apimgt,Rajith90/carbon-apimgt,chamilaadhi/carbon-apimgt,chamilaadhi/carbon-apimgt,fazlan-nazeem/carbon-apimgt,prasa7/carbon-apimgt,isharac/carbon-apimgt,chamindias/carbon-apimgt,fazlan-nazeem/carbon-apimgt,praminda/carbon-apimgt,wso2/carbon-apimgt,chamindias/carbon-apimgt,uvindra/carbon-apimgt,chamindias/carbon-apimgt,bhathiya/carbon-apimgt,fazlan-nazeem/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,tharikaGitHub/carbon-apimgt,prasa7/carbon-apimgt,bhathiya/carbon-apimgt,chamindias/carbon-apimgt,prasa7/carbon-apimgt,praminda/carbon-apimgt,jaadds/carbon-apimgt,malinthaprasan/carbon-apimgt,isharac/carbon-apimgt,malinthaprasan/carbon-apimgt,chamilaadhi/carbon-apimgt,Rajith90/carbon-apimgt,uvindra/carbon-apimgt,isharac/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,tharindu1st/carbon-apimgt,isharac/carbon-apimgt,bhathiya/carbon-apimgt,tharikaGitHub/carbon-apimgt,jaadds/carbon-apimgt,jaadds/carbon-apimgt,tharindu1st/carbon-apimgt,Rajith90/carbon-apimgt,tharikaGitHub/carbon-apimgt,ruks/carbon-apimgt,praminda/carbon-apimgt,Rajith90/carbon-apimgt,wso2/carbon-apimgt,ruks/carbon-apimgt,tharikaGitHub/carbon-apimgt,prasa7/carbon-apimgt,wso2/carbon-apimgt,malinthaprasan/carbon-apimgt,chamilaadhi/carbon-apimgt,bhathiya/carbon-apimgt,tharindu1st/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,ruks/carbon-apimgt,uvindra/carbon-apimgt,tharindu1st/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt
|
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.keymgt.handlers;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.keymgt.APIKeyMgtException;
import org.wso2.carbon.apimgt.keymgt.SubscriptionDataHolder;
import org.wso2.carbon.apimgt.keymgt.model.SubscriptionDataStore;
import org.wso2.carbon.apimgt.keymgt.model.entity.API;
import org.wso2.carbon.apimgt.keymgt.model.entity.ApiPolicy;
import org.wso2.carbon.apimgt.keymgt.model.entity.Application;
import org.wso2.carbon.apimgt.keymgt.model.entity.ApplicationKeyMapping;
import org.wso2.carbon.apimgt.keymgt.model.entity.ApplicationPolicy;
import org.wso2.carbon.apimgt.keymgt.model.entity.Subscription;
import org.wso2.carbon.apimgt.keymgt.model.entity.SubscriptionPolicy;
import org.wso2.carbon.apimgt.keymgt.model.exception.DataLoadingException;
import org.wso2.carbon.apimgt.keymgt.model.impl.SubscriptionDataLoaderImpl;
import org.wso2.carbon.apimgt.keymgt.service.TokenValidationContext;
import org.wso2.carbon.apimgt.keymgt.token.TokenGenerator;
import org.wso2.carbon.apimgt.keymgt.util.APIKeyMgtDataHolder;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.ArrayList;
import java.util.List;
public abstract class AbstractKeyValidationHandler implements KeyValidationHandler {
private static final Log log = LogFactory.getLog(AbstractKeyValidationHandler.class);
@Override
public boolean validateSubscription(TokenValidationContext validationContext) throws APIKeyMgtException {
if (validationContext == null || validationContext.getValidationInfoDTO() == null) {
return false;
}
if (validationContext.isCacheHit()) {
return true;
}
APIKeyValidationInfoDTO dto = validationContext.getValidationInfoDTO();
if (validationContext.getTokenInfo() != null) {
if (validationContext.getTokenInfo().isApplicationToken()) {
dto.setUserType(APIConstants.ACCESS_TOKEN_USER_TYPE_APPLICATION);
} else {
dto.setUserType("APPLICATION_USER");
}
AccessTokenInfo tokenInfo = validationContext.getTokenInfo();
// This block checks if a Token of Application Type is trying to access a resource protected with
// Application Token
if (!hasTokenRequiredAuthLevel(validationContext.getRequiredAuthenticationLevel(), tokenInfo)) {
dto.setAuthorized(false);
dto.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_INCORRECT_ACCESS_TOKEN_TYPE);
return false;
}
}
boolean state = false;
try {
if (log.isDebugEnabled()) {
log.debug("Before validating subscriptions : " + dto);
log.debug("Validation Info : { context : " + validationContext.getContext() + " , " + "version : "
+ validationContext.getVersion() + " , consumerKey : " + dto.getConsumerKey() + " }");
}
state = validateSubscriptionDetails(validationContext.getContext(), validationContext.getVersion(),
dto.getConsumerKey(), dto.getKeyManager(), dto);
if (log.isDebugEnabled()) {
log.debug("After validating subscriptions : " + dto);
}
} catch (APIManagementException e) {
log.error("Error Occurred while validating subscription.", e);
}
return state;
}
/**
* Determines whether the provided token is an ApplicationToken.
*
* @param tokenInfo - Access Token Information
*/
protected void setTokenType(AccessTokenInfo tokenInfo) {
}
/**
* Resources protected with Application token type can only be accessed using Application Access Tokens. This method
* verifies if a particular resource can be accessed using the obtained token.
*
* @param authScheme Type of token required by the resource (Application | User Token)
* @param tokenInfo Details about the Token
* @return {@code true} if token is of the type required, {@code false} otherwise.
*/
protected boolean hasTokenRequiredAuthLevel(String authScheme,
AccessTokenInfo tokenInfo) {
if (authScheme == null || authScheme.isEmpty() || tokenInfo == null) {
return false;
}
if (APIConstants.AUTH_APPLICATION_LEVEL_TOKEN.equals(authScheme)) {
return tokenInfo.isApplicationToken();
} else if (APIConstants.AUTH_APPLICATION_USER_LEVEL_TOKEN.equals(authScheme)) {
return !tokenInfo.isApplicationToken();
}
return true;
}
@Override
public boolean generateConsumerToken(TokenValidationContext validationContext) throws APIKeyMgtException {
TokenGenerator generator = APIKeyMgtDataHolder.getTokenGenerator();
try {
String jwt = generator.generateToken(validationContext);
validationContext.getValidationInfoDTO().setEndUserToken(jwt);
return true;
} catch (APIManagementException e) {
log.error("Error occurred while generating JWT. ", e);
}
return false;
}
@Override
public APIKeyValidationInfoDTO validateSubscription(String apiContext, String apiVersion, String consumerKey,
String keyManager) {
APIKeyValidationInfoDTO apiKeyValidationInfoDTO = new APIKeyValidationInfoDTO();
try {
if (log.isDebugEnabled()) {
log.debug("Before validating subscriptions");
log.debug("Validation Info : { context : " + apiContext + " , " + "version : "
+ apiVersion + " , consumerKey : " + consumerKey + " }");
}
validateSubscriptionDetails(apiContext, apiVersion, consumerKey, keyManager, apiKeyValidationInfoDTO);
if (log.isDebugEnabled()) {
log.debug("After validating subscriptions");
}
} catch (APIManagementException e) {
log.error("Error Occurred while validating subscription.", e);
}
return apiKeyValidationInfoDTO;
}
private boolean validateSubscriptionDetails(String context, String version, String consumerKey, String keyManager,
APIKeyValidationInfoDTO infoDTO) throws APIManagementException {
boolean defaultVersionInvoked = false;
String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context);
if (apiTenantDomain == null) {
apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
int apiOwnerTenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain);
// Check if the api version has been prefixed with _default_
if (version != null && version.startsWith(APIConstants.DEFAULT_VERSION_PREFIX)) {
defaultVersionInvoked = true;
// Remove the prefix from the version.
version = version.split(APIConstants.DEFAULT_VERSION_PREFIX)[1];
}
validateSubscriptionDetails(infoDTO, context, version, consumerKey, keyManager, defaultVersionInvoked);
return infoDTO.isAuthorized();
}
private APIKeyValidationInfoDTO validateSubscriptionDetails(APIKeyValidationInfoDTO infoDTO, String context,
String version, String consumerKey, String keyManager, boolean defaultVersionInvoked) {
String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context);
if (apiTenantDomain == null) {
apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
int tenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain);
API api = null;
ApplicationKeyMapping key = null;
Application app = null;
Subscription sub = null;
SubscriptionDataStore datastore = SubscriptionDataHolder.getInstance()
.getTenantSubscriptionStore(apiTenantDomain);
//TODO add a check to see whether datastore is initialized an load data using rest api if it is not loaded
if (datastore != null) {
api = datastore.getApiByContextAndVersion(context, version);
if (api == null && (context.startsWith("/" + version)
|| context.startsWith("/t/" + apiTenantDomain + "/" + version))) {
// for websocket default version. context comes as the version. get the default api for that context
api = datastore.getDefaultApiByContext(context);
}
if (api != null) {
key = datastore.getKeyMappingByKeyAndKeyManager(consumerKey, keyManager);
if (key != null) {
app = datastore.getApplicationById(key.getApplicationId());
if (app != null) {
sub = datastore.getSubscriptionById(app.getId(), api.getApiId());
if (sub != null) {
if (log.isDebugEnabled()) {
log.debug("All information is retrieved from the inmemory data store.");
}
} else {
if (log.isDebugEnabled()) {
log.debug("Valid subscription not found for appId " + app.getId() + " and apiId "
+ api.getApiId());
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey,
keyManager, datastore, apiTenantDomain, infoDTO, tenantId);
}
} else {
if (log.isDebugEnabled()) {
log.debug("Application not found in the datastore for id " + key.getApplicationId());
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager,
datastore, apiTenantDomain, infoDTO, tenantId);
}
} else {
if (log.isDebugEnabled()) {
log.debug(
"Application keymapping not found in the datastore for id consumerKey " + consumerKey);
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager,
datastore, apiTenantDomain, infoDTO, tenantId);
}
} else {
if (log.isDebugEnabled()) {
log.debug("API not found in the datastore for " + context + ":" + version);
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager, datastore,
apiTenantDomain, infoDTO, tenantId);
}
} else {
log.error("Subscription datastore is null for tenant domain " + apiTenantDomain);
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager, datastore,
apiTenantDomain, infoDTO, tenantId);
}
if (api != null && app != null && key != null && sub != null) {
validate(infoDTO, apiTenantDomain, tenantId, datastore, api, key, app, sub, keyManager);
} else if (!infoDTO.isAuthorized() && infoDTO.getValidationStatus() == 0) {
//Scenario where validation failed and message is not set
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_RESOURCE_FORBIDDEN);
}
return infoDTO;
}
private void loadInfoFromRestAPIAndValidate(API api, Application app, ApplicationKeyMapping key, Subscription sub,
String context, String version, String consumerKey, String keyManager, SubscriptionDataStore datastore, String apiTenantDomain, APIKeyValidationInfoDTO infoDTO, int tenantId) {
// TODO Load using a single single rest api.
if(log.isDebugEnabled()) {
log.debug("Loading missing information in the datastore by invoking the Rest API");
}
try {
// only loading if the api is not found previously
if (api == null) {
api = new SubscriptionDataLoaderImpl().getApi(context, version);
if (api != null && api.getApiId() != 0) {
// load to the memory
log.debug("Loading API to the in-memory datastore.");
datastore.addOrUpdateAPI(api);
}
}
// only loading if the key is not found previously
if (key == null) {
key = new SubscriptionDataLoaderImpl().getKeyMapping(consumerKey);
if (key != null && !StringUtils.isEmpty(key.getConsumerKey())) {
// load to the memory
log.debug("Loading Keymapping to the in-memory datastore.");
datastore.addOrUpdateApplicationKeyMapping(key);
}
}
// check whether still api and keys are not found
if(api == null || key == null) {
// invalid request. nothing to do. return without any further processing
if (log.isDebugEnabled()) {
if (api == null) {
log.debug("API not found for the " + context + " " + version);
}
if (key == null) {
log.debug("KeyMapping not found for the " + consumerKey);
}
}
return;
} else {
//go further and load missing objects
if(app == null) {
app = new SubscriptionDataLoaderImpl().getApplicationById(key.getApplicationId());
if(app != null && app.getId() != 0) {
// load to the memory
log.debug("Loading Application to the in-memory datastore.");
datastore.addOrUpdateApplication(app);
} else {
log.debug("Application not found.");
}
}
if (app != null) {
sub = new SubscriptionDataLoaderImpl().getSubscriptionById(Integer.toString(api.getApiId()),
Integer.toString(app.getId()));
if(sub != null && !StringUtils.isEmpty(sub.getSubscriptionId())) {
// load to the memory
log.debug("Loading Subscription to the in-memory datastore.");
datastore.addOrUpdateSubscription(sub);
validate(infoDTO, apiTenantDomain, tenantId, datastore, api, key, app, sub, keyManager);
}
}
}
} catch (DataLoadingException e) {
log.error("Error while connecting the backend for loading subscription related data ", e);
}
}
private APIKeyValidationInfoDTO validate(APIKeyValidationInfoDTO infoDTO, String apiTenantDomain, int tenantId,
SubscriptionDataStore datastore, API api, ApplicationKeyMapping key, Application app, Subscription sub,
String keyManager) {
String subscriptionStatus = sub.getSubscriptionState();
String type = key.getKeyType();
if (APIConstants.SubscriptionStatus.BLOCKED.equals(subscriptionStatus)) {
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED);
infoDTO.setAuthorized(false);
return infoDTO;
} else if (APIConstants.SubscriptionStatus.ON_HOLD.equals(subscriptionStatus)
|| APIConstants.SubscriptionStatus.REJECTED.equals(subscriptionStatus)) {
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.SUBSCRIPTION_INACTIVE);
infoDTO.setAuthorized(false);
return infoDTO;
} else if (APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subscriptionStatus)
&& !APIConstants.API_KEY_TYPE_SANDBOX.equals(type)) {
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED);
infoDTO.setType(type);
infoDTO.setAuthorized(false);
return infoDTO;
}
infoDTO.setTier(sub.getPolicyId());
infoDTO.setSubscriber(app.getSubName());
infoDTO.setApplicationId(app.getId().toString());
infoDTO.setApiName(api.getApiName());
infoDTO.setApiPublisher(api.getApiProvider());
infoDTO.setApplicationName(app.getName());
infoDTO.setApplicationTier(app.getPolicy());
infoDTO.setType(type);
// Advanced Level Throttling Related Properties
String apiTier = api.getApiTier();
String subscriberUserId = sub.getSubscriptionId();
String subscriberTenant = MultitenantUtils.getTenantDomain(app.getSubName());
ApplicationPolicy appPolicy = datastore.getApplicationPolicyByName(app.getPolicy(),
tenantId);
if (appPolicy == null) {
try {
appPolicy = new SubscriptionDataLoaderImpl()
.getApplicationPolicy(app.getPolicy(), apiTenantDomain);
datastore.addOrUpdateApplicationPolicy(appPolicy);
} catch (DataLoadingException e) {
log.error("Error while loading ApplicationPolicy");
}
}
SubscriptionPolicy subPolicy = datastore.getSubscriptionPolicyByName(sub.getPolicyId(),
tenantId);
if (subPolicy == null) {
try {
subPolicy = new SubscriptionDataLoaderImpl()
.getSubscriptionPolicy(sub.getPolicyId(), apiTenantDomain);
datastore.addOrUpdateSubscriptionPolicy(subPolicy);
} catch (DataLoadingException e) {
log.error("Error while loading SubscriptionPolicy");
}
}
ApiPolicy apiPolicy = datastore.getApiPolicyByName(api.getApiTier(), tenantId);
boolean isContentAware = false;
if (appPolicy.isContentAware() || subPolicy.isContentAware()
|| (apiPolicy != null && apiPolicy.isContentAware())) {
isContentAware = true;
}
infoDTO.setContentAware(isContentAware);
// TODO this must implement as a part of throttling implementation.
int spikeArrest = 0;
String apiLevelThrottlingKey = "api_level_throttling_key";
if (subPolicy.getRateLimitCount() > 0) {
spikeArrest = subPolicy.getRateLimitCount();
}
String spikeArrestUnit = null;
if (subPolicy.getRateLimitTimeUnit() != null) {
spikeArrestUnit = subPolicy.getRateLimitTimeUnit();
}
boolean stopOnQuotaReach = subPolicy.isStopOnQuotaReach();
int graphQLMaxDepth = 0;
if (subPolicy.getGraphQLMaxDepth() > 0) {
graphQLMaxDepth = subPolicy.getGraphQLMaxDepth();
}
int graphQLMaxComplexity = 0;
if (subPolicy.getGraphQLMaxComplexity() > 0) {
graphQLMaxComplexity = subPolicy.getGraphQLMaxComplexity();
}
List<String> list = new ArrayList<String>();
list.add(apiLevelThrottlingKey);
infoDTO.setSpikeArrestLimit(spikeArrest);
infoDTO.setSpikeArrestUnit(spikeArrestUnit);
infoDTO.setStopOnQuotaReach(stopOnQuotaReach);
infoDTO.setSubscriberTenantDomain(subscriberTenant);
infoDTO.setGraphQLMaxDepth(graphQLMaxDepth);
infoDTO.setGraphQLMaxComplexity(graphQLMaxComplexity);
if (apiTier != null && apiTier.trim().length() > 0) {
infoDTO.setApiTier(apiTier);
}
// We also need to set throttling data list associated with given API. This need to have
// policy id and
// condition id list for all throttling tiers associated with this API.
infoDTO.setThrottlingDataList(list);
infoDTO.setAuthorized(true);
return infoDTO;
}
}
|
components/apimgt/org.wso2.carbon.apimgt.keymgt/src/main/java/org/wso2/carbon/apimgt/keymgt/handlers/AbstractKeyValidationHandler.java
|
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.keymgt.handlers;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.keymgt.APIKeyMgtException;
import org.wso2.carbon.apimgt.keymgt.SubscriptionDataHolder;
import org.wso2.carbon.apimgt.keymgt.model.SubscriptionDataStore;
import org.wso2.carbon.apimgt.keymgt.model.entity.API;
import org.wso2.carbon.apimgt.keymgt.model.entity.ApiPolicy;
import org.wso2.carbon.apimgt.keymgt.model.entity.Application;
import org.wso2.carbon.apimgt.keymgt.model.entity.ApplicationKeyMapping;
import org.wso2.carbon.apimgt.keymgt.model.entity.ApplicationPolicy;
import org.wso2.carbon.apimgt.keymgt.model.entity.Subscription;
import org.wso2.carbon.apimgt.keymgt.model.entity.SubscriptionPolicy;
import org.wso2.carbon.apimgt.keymgt.model.exception.DataLoadingException;
import org.wso2.carbon.apimgt.keymgt.model.impl.SubscriptionDataLoaderImpl;
import org.wso2.carbon.apimgt.keymgt.service.TokenValidationContext;
import org.wso2.carbon.apimgt.keymgt.token.TokenGenerator;
import org.wso2.carbon.apimgt.keymgt.util.APIKeyMgtDataHolder;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.ArrayList;
import java.util.List;
public abstract class AbstractKeyValidationHandler implements KeyValidationHandler {
private static final Log log = LogFactory.getLog(AbstractKeyValidationHandler.class);
@Override
public boolean validateSubscription(TokenValidationContext validationContext) throws APIKeyMgtException {
if (validationContext == null || validationContext.getValidationInfoDTO() == null) {
return false;
}
if (validationContext.isCacheHit()) {
return true;
}
APIKeyValidationInfoDTO dto = validationContext.getValidationInfoDTO();
if (validationContext.getTokenInfo() != null) {
if (validationContext.getTokenInfo().isApplicationToken()) {
dto.setUserType(APIConstants.ACCESS_TOKEN_USER_TYPE_APPLICATION);
} else {
dto.setUserType("APPLICATION_USER");
}
AccessTokenInfo tokenInfo = validationContext.getTokenInfo();
// This block checks if a Token of Application Type is trying to access a resource protected with
// Application Token
if (!hasTokenRequiredAuthLevel(validationContext.getRequiredAuthenticationLevel(), tokenInfo)) {
dto.setAuthorized(false);
dto.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_INCORRECT_ACCESS_TOKEN_TYPE);
return false;
}
}
boolean state = false;
try {
if (log.isDebugEnabled()) {
log.debug("Before validating subscriptions : " + dto);
log.debug("Validation Info : { context : " + validationContext.getContext() + " , " + "version : "
+ validationContext.getVersion() + " , consumerKey : " + dto.getConsumerKey() + " }");
}
state = validateSubscriptionDetails(validationContext.getContext(), validationContext.getVersion(),
dto.getConsumerKey(), dto.getKeyManager(), dto);
if (log.isDebugEnabled()) {
log.debug("After validating subscriptions : " + dto);
}
} catch (APIManagementException e) {
log.error("Error Occurred while validating subscription.", e);
}
return state;
}
/**
* Determines whether the provided token is an ApplicationToken.
*
* @param tokenInfo - Access Token Information
*/
protected void setTokenType(AccessTokenInfo tokenInfo) {
}
/**
* Resources protected with Application token type can only be accessed using Application Access Tokens. This method
* verifies if a particular resource can be accessed using the obtained token.
*
* @param authScheme Type of token required by the resource (Application | User Token)
* @param tokenInfo Details about the Token
* @return {@code true} if token is of the type required, {@code false} otherwise.
*/
protected boolean hasTokenRequiredAuthLevel(String authScheme,
AccessTokenInfo tokenInfo) {
if (authScheme == null || authScheme.isEmpty() || tokenInfo == null) {
return false;
}
if (APIConstants.AUTH_APPLICATION_LEVEL_TOKEN.equals(authScheme)) {
return tokenInfo.isApplicationToken();
} else if (APIConstants.AUTH_APPLICATION_USER_LEVEL_TOKEN.equals(authScheme)) {
return !tokenInfo.isApplicationToken();
}
return true;
}
@Override
public boolean generateConsumerToken(TokenValidationContext validationContext) throws APIKeyMgtException {
TokenGenerator generator = APIKeyMgtDataHolder.getTokenGenerator();
try {
String jwt = generator.generateToken(validationContext);
validationContext.getValidationInfoDTO().setEndUserToken(jwt);
return true;
} catch (APIManagementException e) {
log.error("Error occurred while generating JWT. ", e);
}
return false;
}
@Override
public APIKeyValidationInfoDTO validateSubscription(String apiContext, String apiVersion, String consumerKey,
String keyManager) {
APIKeyValidationInfoDTO apiKeyValidationInfoDTO = new APIKeyValidationInfoDTO();
try {
if (log.isDebugEnabled()) {
log.debug("Before validating subscriptions");
log.debug("Validation Info : { context : " + apiContext + " , " + "version : "
+ apiVersion + " , consumerKey : " + consumerKey + " }");
}
validateSubscriptionDetails(apiContext, apiVersion, consumerKey, keyManager, apiKeyValidationInfoDTO);
if (log.isDebugEnabled()) {
log.debug("After validating subscriptions");
}
} catch (APIManagementException e) {
log.error("Error Occurred while validating subscription.", e);
}
return apiKeyValidationInfoDTO;
}
private boolean validateSubscriptionDetails(String context, String version, String consumerKey, String keyManager,
APIKeyValidationInfoDTO infoDTO) throws APIManagementException {
boolean defaultVersionInvoked = false;
String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context);
if (apiTenantDomain == null) {
apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
int apiOwnerTenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain);
// Check if the api version has been prefixed with _default_
if (version != null && version.startsWith(APIConstants.DEFAULT_VERSION_PREFIX)) {
defaultVersionInvoked = true;
// Remove the prefix from the version.
version = version.split(APIConstants.DEFAULT_VERSION_PREFIX)[1];
}
validateSubscriptionDetails(infoDTO, context, version, consumerKey, keyManager, defaultVersionInvoked);
return infoDTO.isAuthorized();
}
private APIKeyValidationInfoDTO validateSubscriptionDetails(APIKeyValidationInfoDTO infoDTO, String context,
String version, String consumerKey, String keyManager, boolean defaultVersionInvoked) {
String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context);
if (apiTenantDomain == null) {
apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
int tenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain);
API api = null;
ApplicationKeyMapping key = null;
Application app = null;
Subscription sub = null;
SubscriptionDataStore datastore = SubscriptionDataHolder.getInstance()
.getTenantSubscriptionStore(apiTenantDomain);
//TODO add a check to see whether datastore is initialized an load data using rest api if it is not loaded
if (datastore != null) {
if (version != null && context.startsWith("/" + version)) {
api = datastore.getDefaultApiByContext(context);
} else {
api = datastore.getApiByContextAndVersion(context, version);
}
if (api != null) {
key = datastore.getKeyMappingByKeyAndKeyManager(consumerKey, keyManager);
if (key != null) {
app = datastore.getApplicationById(key.getApplicationId());
if (app != null) {
sub = datastore.getSubscriptionById(app.getId(), api.getApiId());
if (sub != null) {
if (log.isDebugEnabled()) {
log.debug("All information is retrieved from the inmemory data store.");
}
} else {
if (log.isDebugEnabled()) {
log.debug("Valid subscription not found for appId " + app.getId() + " and apiId "
+ api.getApiId());
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey,
keyManager, datastore, apiTenantDomain, infoDTO, tenantId);
}
} else {
if (log.isDebugEnabled()) {
log.debug("Application not found in the datastore for id " + key.getApplicationId());
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager,
datastore, apiTenantDomain, infoDTO, tenantId);
}
} else {
if (log.isDebugEnabled()) {
log.debug(
"Application keymapping not found in the datastore for id consumerKey " + consumerKey);
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager,
datastore, apiTenantDomain, infoDTO, tenantId);
}
} else {
if (log.isDebugEnabled()) {
log.debug("API not found in the datastore for " + context + ":" + version);
}
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager, datastore,
apiTenantDomain, infoDTO, tenantId);
}
} else {
log.error("Subscription datastore is null for tenant domain " + apiTenantDomain);
loadInfoFromRestAPIAndValidate(api, app, key, sub, context, version, consumerKey, keyManager, datastore,
apiTenantDomain, infoDTO, tenantId);
}
if (api != null && app != null && key != null && sub != null) {
validate(infoDTO, apiTenantDomain, tenantId, datastore, api, key, app, sub, keyManager);
} else if (!infoDTO.isAuthorized() && infoDTO.getValidationStatus() == 0) {
//Scenario where validation failed and message is not set
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_RESOURCE_FORBIDDEN);
}
return infoDTO;
}
private void loadInfoFromRestAPIAndValidate(API api, Application app, ApplicationKeyMapping key, Subscription sub,
String context, String version, String consumerKey, String keyManager, SubscriptionDataStore datastore, String apiTenantDomain, APIKeyValidationInfoDTO infoDTO, int tenantId) {
// TODO Load using a single single rest api.
if(log.isDebugEnabled()) {
log.debug("Loading missing information in the datastore by invoking the Rest API");
}
try {
// only loading if the api is not found previously
if (api == null) {
api = new SubscriptionDataLoaderImpl().getApi(context, version);
if (api != null && api.getApiId() != 0) {
// load to the memory
log.debug("Loading API to the in-memory datastore.");
datastore.addOrUpdateAPI(api);
}
}
// only loading if the key is not found previously
if (key == null) {
key = new SubscriptionDataLoaderImpl().getKeyMapping(consumerKey);
if (key != null && !StringUtils.isEmpty(key.getConsumerKey())) {
// load to the memory
log.debug("Loading Keymapping to the in-memory datastore.");
datastore.addOrUpdateApplicationKeyMapping(key);
}
}
// check whether still api and keys are not found
if(api == null || key == null) {
// invalid request. nothing to do. return without any further processing
if (log.isDebugEnabled()) {
if (api == null) {
log.debug("API not found for the " + context + " " + version);
}
if (key == null) {
log.debug("KeyMapping not found for the " + consumerKey);
}
}
return;
} else {
//go further and load missing objects
if(app == null) {
app = new SubscriptionDataLoaderImpl().getApplicationById(key.getApplicationId());
if(app != null && app.getId() != 0) {
// load to the memory
log.debug("Loading Application to the in-memory datastore.");
datastore.addOrUpdateApplication(app);
} else {
log.debug("Application not found.");
}
}
if (app != null) {
sub = new SubscriptionDataLoaderImpl().getSubscriptionById(Integer.toString(api.getApiId()),
Integer.toString(app.getId()));
if(sub != null && !StringUtils.isEmpty(sub.getSubscriptionId())) {
// load to the memory
log.debug("Loading Subscription to the in-memory datastore.");
datastore.addOrUpdateSubscription(sub);
validate(infoDTO, apiTenantDomain, tenantId, datastore, api, key, app, sub, keyManager);
}
}
}
} catch (DataLoadingException e) {
log.error("Error while connecting the backend for loading subscription related data ", e);
}
}
private APIKeyValidationInfoDTO validate(APIKeyValidationInfoDTO infoDTO, String apiTenantDomain, int tenantId,
SubscriptionDataStore datastore, API api, ApplicationKeyMapping key, Application app, Subscription sub,
String keyManager) {
String subscriptionStatus = sub.getSubscriptionState();
String type = key.getKeyType();
if (APIConstants.SubscriptionStatus.BLOCKED.equals(subscriptionStatus)) {
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED);
infoDTO.setAuthorized(false);
return infoDTO;
} else if (APIConstants.SubscriptionStatus.ON_HOLD.equals(subscriptionStatus)
|| APIConstants.SubscriptionStatus.REJECTED.equals(subscriptionStatus)) {
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.SUBSCRIPTION_INACTIVE);
infoDTO.setAuthorized(false);
return infoDTO;
} else if (APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subscriptionStatus)
&& !APIConstants.API_KEY_TYPE_SANDBOX.equals(type)) {
infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED);
infoDTO.setType(type);
infoDTO.setAuthorized(false);
return infoDTO;
}
infoDTO.setTier(sub.getPolicyId());
infoDTO.setSubscriber(app.getSubName());
infoDTO.setApplicationId(app.getId().toString());
infoDTO.setApiName(api.getApiName());
infoDTO.setApiPublisher(api.getApiProvider());
infoDTO.setApplicationName(app.getName());
infoDTO.setApplicationTier(app.getPolicy());
infoDTO.setType(type);
// Advanced Level Throttling Related Properties
String apiTier = api.getApiTier();
String subscriberUserId = sub.getSubscriptionId();
String subscriberTenant = MultitenantUtils.getTenantDomain(app.getSubName());
ApplicationPolicy appPolicy = datastore.getApplicationPolicyByName(app.getPolicy(),
tenantId);
if (appPolicy == null) {
try {
appPolicy = new SubscriptionDataLoaderImpl()
.getApplicationPolicy(app.getPolicy(), apiTenantDomain);
datastore.addOrUpdateApplicationPolicy(appPolicy);
} catch (DataLoadingException e) {
log.error("Error while loading ApplicationPolicy");
}
}
SubscriptionPolicy subPolicy = datastore.getSubscriptionPolicyByName(sub.getPolicyId(),
tenantId);
if (subPolicy == null) {
try {
subPolicy = new SubscriptionDataLoaderImpl()
.getSubscriptionPolicy(sub.getPolicyId(), apiTenantDomain);
datastore.addOrUpdateSubscriptionPolicy(subPolicy);
} catch (DataLoadingException e) {
log.error("Error while loading SubscriptionPolicy");
}
}
ApiPolicy apiPolicy = datastore.getApiPolicyByName(api.getApiTier(), tenantId);
boolean isContentAware = false;
if (appPolicy.isContentAware() || subPolicy.isContentAware()
|| (apiPolicy != null && apiPolicy.isContentAware())) {
isContentAware = true;
}
infoDTO.setContentAware(isContentAware);
// TODO this must implement as a part of throttling implementation.
int spikeArrest = 0;
String apiLevelThrottlingKey = "api_level_throttling_key";
if (subPolicy.getRateLimitCount() > 0) {
spikeArrest = subPolicy.getRateLimitCount();
}
String spikeArrestUnit = null;
if (subPolicy.getRateLimitTimeUnit() != null) {
spikeArrestUnit = subPolicy.getRateLimitTimeUnit();
}
boolean stopOnQuotaReach = subPolicy.isStopOnQuotaReach();
int graphQLMaxDepth = 0;
if (subPolicy.getGraphQLMaxDepth() > 0) {
graphQLMaxDepth = subPolicy.getGraphQLMaxDepth();
}
int graphQLMaxComplexity = 0;
if (subPolicy.getGraphQLMaxComplexity() > 0) {
graphQLMaxComplexity = subPolicy.getGraphQLMaxComplexity();
}
List<String> list = new ArrayList<String>();
list.add(apiLevelThrottlingKey);
infoDTO.setSpikeArrestLimit(spikeArrest);
infoDTO.setSpikeArrestUnit(spikeArrestUnit);
infoDTO.setStopOnQuotaReach(stopOnQuotaReach);
infoDTO.setSubscriberTenantDomain(subscriberTenant);
infoDTO.setGraphQLMaxDepth(graphQLMaxDepth);
infoDTO.setGraphQLMaxComplexity(graphQLMaxComplexity);
if (apiTier != null && apiTier.trim().length() > 0) {
infoDTO.setApiTier(apiTier);
}
// We also need to set throttling data list associated with given API. This need to have
// policy id and
// condition id list for all throttling tiers associated with this API.
infoDTO.setThrottlingDataList(list);
infoDTO.setAuthorized(true);
return infoDTO;
}
}
|
Update logic
|
components/apimgt/org.wso2.carbon.apimgt.keymgt/src/main/java/org/wso2/carbon/apimgt/keymgt/handlers/AbstractKeyValidationHandler.java
|
Update logic
|
|
Java
|
apache-2.0
|
73e722784d610266d84e1bf75d1db16e9ad3c2d6
| 0
|
Genymobile/scrcpy,Genymobile/scrcpy,Genymobile/scrcpy
|
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ContentProvider;
import com.genymobile.scrcpy.wrappers.InputManager;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import com.genymobile.scrcpy.wrappers.WindowManager;
import android.graphics.Rect;
import android.os.Build;
import android.os.IBinder;
import android.os.RemoteException;
import android.view.IRotationWatcher;
import android.view.InputEvent;
public final class Device {
public static final int POWER_MODE_OFF = SurfaceControl.POWER_MODE_OFF;
public static final int POWER_MODE_NORMAL = SurfaceControl.POWER_MODE_NORMAL;
public interface RotationListener {
void onRotationChanged(int rotation);
}
private final ServiceManager serviceManager = new ServiceManager();
private ScreenInfo screenInfo;
private RotationListener rotationListener;
/**
* Logical display identifier
*/
private final int displayId;
/**
* The surface flinger layer stack associated with this logical display
*/
private final int layerStack;
private final boolean supportsInputEvents;
public Device(Options options) {
displayId = options.getDisplayId();
DisplayInfo displayInfo = serviceManager.getDisplayManager().getDisplayInfo(displayId);
if (displayInfo == null) {
int[] displayIds = serviceManager.getDisplayManager().getDisplayIds();
throw new InvalidDisplayIdException(displayId, displayIds);
}
int displayInfoFlags = displayInfo.getFlags();
screenInfo = ScreenInfo.computeScreenInfo(displayInfo, options.getCrop(), options.getMaxSize(), options.getLockedVideoOrientation());
layerStack = displayInfo.getLayerStack();
serviceManager.getWindowManager().registerRotationWatcher(new IRotationWatcher.Stub() {
@Override
public void onRotationChanged(int rotation) {
synchronized (Device.this) {
screenInfo = screenInfo.withDeviceRotation(rotation);
// notify
if (rotationListener != null) {
rotationListener.onRotationChanged(rotation);
}
}
}
}, displayId);
if ((displayInfoFlags & DisplayInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) == 0) {
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
}
// main display or any display on Android >= Q
supportsInputEvents = displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
if (!supportsInputEvents) {
Ln.w("Input events are not supported for secondary displays before Android 10");
}
}
public synchronized ScreenInfo getScreenInfo() {
return screenInfo;
}
public int getLayerStack() {
return layerStack;
}
public Point getPhysicalPoint(Position position) {
// it hides the field on purpose, to read it with a lock
@SuppressWarnings("checkstyle:HiddenField")
ScreenInfo screenInfo = getScreenInfo(); // read with synchronization
// ignore the locked video orientation, the events will apply in coordinates considered in the physical device orientation
Size unlockedVideoSize = screenInfo.getUnlockedVideoSize();
int reverseVideoRotation = screenInfo.getReverseVideoRotation();
// reverse the video rotation to apply the events
Position devicePosition = position.rotate(reverseVideoRotation);
Size clientVideoSize = devicePosition.getScreenSize();
if (!unlockedVideoSize.equals(clientVideoSize)) {
// The client sends a click relative to a video with wrong dimensions,
// the device may have been rotated since the event was generated, so ignore the event
return null;
}
Rect contentRect = screenInfo.getContentRect();
Point point = devicePosition.getPoint();
int convertedX = contentRect.left + point.getX() * contentRect.width() / unlockedVideoSize.getWidth();
int convertedY = contentRect.top + point.getY() * contentRect.height() / unlockedVideoSize.getHeight();
return new Point(convertedX, convertedY);
}
public static String getDeviceName() {
return Build.MODEL;
}
public boolean supportsInputEvents() {
return supportsInputEvents;
}
public boolean injectInputEvent(InputEvent inputEvent, int mode) {
if (!supportsInputEvents()) {
throw new AssertionError("Could not inject input event if !supportsInputEvents()");
}
if (displayId != 0 && !InputManager.setDisplayId(inputEvent, displayId)) {
return false;
}
return serviceManager.getInputManager().injectInputEvent(inputEvent, mode);
}
public boolean isScreenOn() {
return serviceManager.getPowerManager().isScreenOn();
}
public synchronized void setRotationListener(RotationListener rotationListener) {
this.rotationListener = rotationListener;
}
public void expandNotificationPanel() {
serviceManager.getStatusBarManager().expandNotificationsPanel();
}
public void collapsePanels() {
serviceManager.getStatusBarManager().collapsePanels();
}
public String getClipboardText() {
CharSequence s = serviceManager.getClipboardManager().getText();
if (s == null) {
return null;
}
return s.toString();
}
public void setClipboardText(String text) {
boolean ok = serviceManager.getClipboardManager().setText(text);
if (ok) {
Ln.i("Device clipboard set");
}
}
/**
* @param mode one of the {@code SCREEN_POWER_MODE_*} constants
*/
public void setScreenPowerMode(int mode) {
IBinder d = SurfaceControl.getBuiltInDisplay();
if (d == null) {
Ln.e("Could not get built-in display");
return;
}
boolean ok = SurfaceControl.setDisplayPowerMode(d, mode);
if (ok) {
Ln.i("Device screen turned " + (mode == Device.POWER_MODE_OFF ? "off" : "on"));
}
}
/**
* Disable auto-rotation (if enabled), set the screen rotation and re-enable auto-rotation (if it was enabled).
*/
public void rotateDevice() {
WindowManager wm = serviceManager.getWindowManager();
boolean accelerometerRotation = !wm.isRotationFrozen();
int currentRotation = wm.getRotation();
int newRotation = (currentRotation & 1) ^ 1; // 0->1, 1->0, 2->1, 3->0
String newRotationString = newRotation == 0 ? "portrait" : "landscape";
Ln.i("Device rotation requested: " + newRotationString);
wm.freezeRotation(newRotation);
// restore auto-rotate if necessary
if (accelerometerRotation) {
wm.thawRotation();
}
}
public ContentProvider createSettingsProvider() {
return serviceManager.getActivityManager().createSettingsProvider();
}
}
|
server/src/main/java/com/genymobile/scrcpy/Device.java
|
package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ContentProvider;
import com.genymobile.scrcpy.wrappers.InputManager;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import com.genymobile.scrcpy.wrappers.WindowManager;
import android.graphics.Rect;
import android.os.Build;
import android.os.IBinder;
import android.os.RemoteException;
import android.view.IRotationWatcher;
import android.view.InputEvent;
public final class Device {
public static final int POWER_MODE_OFF = SurfaceControl.POWER_MODE_OFF;
public static final int POWER_MODE_NORMAL = SurfaceControl.POWER_MODE_NORMAL;
public interface RotationListener {
void onRotationChanged(int rotation);
}
private final ServiceManager serviceManager = new ServiceManager();
private ScreenInfo screenInfo;
private RotationListener rotationListener;
/**
* Logical display identifier
*/
private final int displayId;
/**
* The surface flinger layer stack associated with this logical display
*/
private final int layerStack;
private final boolean supportsInputEvents;
public Device(Options options) {
displayId = options.getDisplayId();
DisplayInfo displayInfo = serviceManager.getDisplayManager().getDisplayInfo(displayId);
if (displayInfo == null) {
int[] displayIds = serviceManager.getDisplayManager().getDisplayIds();
throw new InvalidDisplayIdException(displayId, displayIds);
}
int displayInfoFlags = displayInfo.getFlags();
screenInfo = ScreenInfo.computeScreenInfo(displayInfo, options.getCrop(), options.getMaxSize(), options.getLockedVideoOrientation());
layerStack = displayInfo.getLayerStack();
serviceManager.getWindowManager().registerRotationWatcher(new IRotationWatcher.Stub() {
@Override
public void onRotationChanged(int rotation) throws RemoteException {
synchronized (Device.this) {
screenInfo = screenInfo.withDeviceRotation(rotation);
// notify
if (rotationListener != null) {
rotationListener.onRotationChanged(rotation);
}
}
}
}, displayId);
if ((displayInfoFlags & DisplayInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) == 0) {
Ln.w("Display doesn't have FLAG_SUPPORTS_PROTECTED_BUFFERS flag, mirroring can be restricted");
}
// main display or any display on Android >= Q
supportsInputEvents = displayId == 0 || Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q;
if (!supportsInputEvents) {
Ln.w("Input events are not supported for secondary displays before Android 10");
}
}
public synchronized ScreenInfo getScreenInfo() {
return screenInfo;
}
public int getLayerStack() {
return layerStack;
}
public Point getPhysicalPoint(Position position) {
// it hides the field on purpose, to read it with a lock
@SuppressWarnings("checkstyle:HiddenField")
ScreenInfo screenInfo = getScreenInfo(); // read with synchronization
// ignore the locked video orientation, the events will apply in coordinates considered in the physical device orientation
Size unlockedVideoSize = screenInfo.getUnlockedVideoSize();
int reverseVideoRotation = screenInfo.getReverseVideoRotation();
// reverse the video rotation to apply the events
Position devicePosition = position.rotate(reverseVideoRotation);
Size clientVideoSize = devicePosition.getScreenSize();
if (!unlockedVideoSize.equals(clientVideoSize)) {
// The client sends a click relative to a video with wrong dimensions,
// the device may have been rotated since the event was generated, so ignore the event
return null;
}
Rect contentRect = screenInfo.getContentRect();
Point point = devicePosition.getPoint();
int convertedX = contentRect.left + point.getX() * contentRect.width() / unlockedVideoSize.getWidth();
int convertedY = contentRect.top + point.getY() * contentRect.height() / unlockedVideoSize.getHeight();
return new Point(convertedX, convertedY);
}
public static String getDeviceName() {
return Build.MODEL;
}
public boolean supportsInputEvents() {
return supportsInputEvents;
}
public boolean injectInputEvent(InputEvent inputEvent, int mode) {
if (!supportsInputEvents()) {
throw new AssertionError("Could not inject input event if !supportsInputEvents()");
}
if (displayId != 0 && !InputManager.setDisplayId(inputEvent, displayId)) {
return false;
}
return serviceManager.getInputManager().injectInputEvent(inputEvent, mode);
}
public boolean isScreenOn() {
return serviceManager.getPowerManager().isScreenOn();
}
public synchronized void setRotationListener(RotationListener rotationListener) {
this.rotationListener = rotationListener;
}
public void expandNotificationPanel() {
serviceManager.getStatusBarManager().expandNotificationsPanel();
}
public void collapsePanels() {
serviceManager.getStatusBarManager().collapsePanels();
}
public String getClipboardText() {
CharSequence s = serviceManager.getClipboardManager().getText();
if (s == null) {
return null;
}
return s.toString();
}
public void setClipboardText(String text) {
boolean ok = serviceManager.getClipboardManager().setText(text);
if (ok) {
Ln.i("Device clipboard set");
}
}
/**
* @param mode one of the {@code SCREEN_POWER_MODE_*} constants
*/
public void setScreenPowerMode(int mode) {
IBinder d = SurfaceControl.getBuiltInDisplay();
if (d == null) {
Ln.e("Could not get built-in display");
return;
}
boolean ok = SurfaceControl.setDisplayPowerMode(d, mode);
if (ok) {
Ln.i("Device screen turned " + (mode == Device.POWER_MODE_OFF ? "off" : "on"));
}
}
/**
* Disable auto-rotation (if enabled), set the screen rotation and re-enable auto-rotation (if it was enabled).
*/
public void rotateDevice() {
WindowManager wm = serviceManager.getWindowManager();
boolean accelerometerRotation = !wm.isRotationFrozen();
int currentRotation = wm.getRotation();
int newRotation = (currentRotation & 1) ^ 1; // 0->1, 1->0, 2->1, 3->0
String newRotationString = newRotation == 0 ? "portrait" : "landscape";
Ln.i("Device rotation requested: " + newRotationString);
wm.freezeRotation(newRotation);
// restore auto-rotate if necessary
if (accelerometerRotation) {
wm.thawRotation();
}
}
public ContentProvider createSettingsProvider() {
return serviceManager.getActivityManager().createSettingsProvider();
}
}
|
Remove useless exception declaration
The interface declares it can throw a RemoteException, but the
implementation never throws such exception.
|
server/src/main/java/com/genymobile/scrcpy/Device.java
|
Remove useless exception declaration
|
|
Java
|
apache-2.0
|
4da5faa723f87646ee8ffb63b5c325465588fdc0
| 0
|
marginweb/Wikipedia-noSQL-Benchmark,marginweb/Wikipedia-noSQL-Benchmark,marginweb/Wikipedia-noSQL-Benchmark
|
package implementations;
/**
* Copyright 2011 Thibault Dory
* Licensed under the GPL Version 3 license
*/
import hbase_mapreduce.MapReduceHbaseDB;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import core.BenchDB;
/**
*
* @author Thibault Dory
*
* Use the following commands in HBase shell to create the needed tables and column families
* create 'myTable', 'myColumnFamily'
* create 'result', 'resultF'
* create 'result2', 'resultF'
*/
public class hbaseDB extends BenchDB{
HTable table;
HBaseConfiguration config;
@Override
public int connectNode(String nodeAddress) {
int ret;
config = new HBaseConfiguration();
try {
table = new HTable(config, "myTable");
ret = 1;
} catch (IOException e) {
e.printStackTrace();
ret = -1;
}
return ret;
}
@Override
public String readDB(String ID) {
String ret;
//The ID is converted to a uuid for performance reasons
Get g = new Get(Bytes.toBytes(ID));
try {
Result r = table.get(g);
byte [] value = r.getValue(Bytes.toBytes("myColumnFamily"),Bytes.toBytes("value"));
ret = Bytes.toString(value);
} catch (IOException e) {
e.printStackTrace();
ret = null;
}
return ret;
}
@Override
public int updateDB(String ID, String newValue) {
return writeDB(ID, newValue);
}
@Override
public int writeDB(String ID, String Value) {
int ret = 0;
//the row is called ID and is converted into a UUID
Put p = new Put(Bytes.toBytes(ID));
try{
p.add(Bytes.toBytes("myColumnFamily"), Bytes.toBytes("value"), Bytes.toBytes(Value));
table.put(p);
ret = 1;
}catch(Exception e){
e.printStackTrace();
ret = -1;
}
return ret;
}
@Override
public void searchDB(String keyword){
String[] args = {"master"};
try {
MapReduceHbaseDB.main(args);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
src/implementations/hbaseDB.java
|
package implementations;
/**
* Copyright 2011 Thibault Dory
* Licensed under the GPL Version 3 license
*/
import hbase_mapreduce.MapReduceHbaseDB;
import hbase_mapreduce.MapReduceHbaseDB.Mapper1;
import hbase_mapreduce.MapReduceHbaseDB.Mapper2;
import hbase_mapreduce.MapReduceHbaseDB.Reducer1;
import hbase_mapreduce.MapReduceHbaseDB.Reducer2;
import java.io.IOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.mapreduce.*;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import cassandra_mapreduce.MapReduceCassandraDB;
import core.BenchDB;
/**
*
* @author Thibault Dory
*
* Use the following commands in HBase shell to create the needed tables and column families
* create 'myTable', 'myColumnFamily'
* create 'result', 'resultF'
* create 'result2', 'resultF'
*/
public class hbaseDB extends BenchDB{
HTable table;
HBaseConfiguration config;
@Override
public int connectNode(String nodeAddress) {
int ret;
config = new HBaseConfiguration();
try {
table = new HTable(config, "myTable");
ret = 1;
} catch (IOException e) {
e.printStackTrace();
ret = -1;
}
return ret;
}
@Override
public String readDB(String ID) {
String ret;
//The ID is converted to a uuid for performance reasons
Get g = new Get(Bytes.toBytes(ID));
try {
Result r = table.get(g);
byte [] value = r.getValue(Bytes.toBytes("myColumnFamily"),Bytes.toBytes("value"));
ret = Bytes.toString(value);
} catch (IOException e) {
e.printStackTrace();
ret = null;
}
return ret;
}
@Override
public int updateDB(String ID, String newValue) {
return writeDB(ID, newValue);
}
@Override
public int writeDB(String ID, String Value) {
int ret = 0;
//the row is called ID and is converted into a UUID
Put p = new Put(Bytes.toBytes(ID));
try{
p.add(Bytes.toBytes("myColumnFamily"), Bytes.toBytes("value"), Bytes.toBytes(Value));
table.put(p);
ret = 1;
}catch(Exception e){
e.printStackTrace();
ret = -1;
}
return ret;
}
@Override
public void searchDB(String keyword){
String[] args = {"master"};
try {
MapReduceHbaseDB.main(args);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
Import clean up
|
src/implementations/hbaseDB.java
|
Import clean up
|
|
Java
|
apache-2.0
|
1fe30edb67096aa082524b310e5911ef8201501f
| 0
|
allga/java_pft
|
package ru.stqa.ptf.mantis.tests;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import ru.lanwen.verbalregex.VerbalExpression;
import ru.stqa.ptf.mantis.model.MailMessage;
import java.io.IOException;
import java.util.List;
import static org.testng.Assert.assertTrue;
/**
* Created by Olga on 13.04.2016.
*/
public class RegistrationTests extends TestBase {
//чтоб тестируемое приложение знало что почту нужно доставлять именнно на наш почтовый сервер,
// нужно добавить в конфигурационный файл config_inc.php две строчки:
// $g_phpMailer_method = PHPMAILER_METHOD_SMTP; - способ доставки почты по протоколу SMTP
// $g_smtp_host = 'localhost'; - адрес доставки почты
// (если веб-сервер на удаленной машине, указываем сетевой адрес машины, на которой работают тесты и почтовый сервер)
// этот файл в начале тестов загружается по FTP на удаленную машину
@BeforeMethod
//добавляем запуск почтового сервера перед методом, чтоб пропадала старая почта
public void startMailServer() {
app.mail().start();
}
@Test
public void testRegistration() throws IOException {
long now = System.currentTimeMillis();
String email = String.format("user%s@localhost.localdomain", now);
String user = String.format("user%s", now);
String password = "password";
app.registration().start(user, email);
//ожидаем 2 письма в течении 10 сек
List<MailMessage> mailMessages = app.mail().waitForMail(2, 10000);
//извлекаем конфирм ссылку из письма
String confirmationLink = app.mail().findConfirmationLink(mailMessages, email);
//заканчиваем регистрацию с переходом по линке
app.registration().finish(confirmationLink, password);
//проверяем логин юзером
assertTrue(app.newSession().login(user, password));
}
//останавливаем сервер в любом лучае, если даже тест упал
@AfterMethod(alwaysRun = true)
public void stopMailServer() {
app.mail().stop();
}
}
|
mantis-tests/src/test/java/ru/stqa/ptf/mantis/tests/RegistrationTests.java
|
package ru.stqa.ptf.mantis.tests;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import ru.lanwen.verbalregex.VerbalExpression;
import ru.stqa.ptf.mantis.model.MailMessage;
import java.io.IOException;
import java.util.List;
import static org.testng.Assert.assertTrue;
/**
* Created by Olga on 13.04.2016.
*/
public class RegistrationTests extends TestBase {
//чтоб тестируемое приложение знало что почту нужно доставлять именнно на наш почтовый сервер,
// нужно добавить в конфигурационный файл config_inc.php две строчки:
// $g_phpMailer_method = PHPMAILER_METHOD_SMTP; - способ доставки почты по протоколу SMTP
// $g_smtp_host = 'localhost'; - адрес доставки почты
// (если веб-сервер на удаленной машине, указываем сетевой адрес машины, на которой работают тесты и почтовый сервер)
// этот файл в начале тестов загружается по FTP на удаленную машину
@BeforeMethod
//добавляем запуск почтового сервера перед методом, чтоб пропадала старая почта
public void startMailServer() {
app.mail().start();
}
@Test
public void testRegistration() throws IOException {
long now = System.currentTimeMillis();
String email = String.format("user%s@localhost.localdomain", now);
String user = String.format("user%s", now);
String password = "password";
app.registration().start(user, email);
//ожидаем 2 письма в течении 10 сек
List<MailMessage> mailMessages = app.mail().waitForMail(2, 10000);
//извлекаем конфирм ссылку из письма
String confirmationLink = findConfirmationLink(mailMessages, email);
//заканчиваем регистрацию с переходом по линке
app.registration().finish(confirmationLink, password);
//проверяем логин юзером
assertTrue(app.newSession().login(user, password));
}
private String findConfirmationLink(List<MailMessage> mailMessages, String email) {
//из потока извлекаем объект письмо, у которого получатель - имейл юзера
MailMessage mailMessage = mailMessages.stream().filter((m) -> m.to.equals(email)).findFirst().get();
//для получения регулярного выражения подключаем зависимость от библиотеки verbalregex
//строим выражение, которое содержит "http://".а после него непробельные символы.один или больше
VerbalExpression regex = VerbalExpression.regex().find("http://").nonSpace().oneOrMore().build();
//выбираем регулярным выражением ссылку из письма
return regex.getText(mailMessage.text);
}
//останавливаем сервер в любом лучае, если даже тест упал
@AfterMethod(alwaysRun = true)
public void stopMailServer() {
app.mail().stop();
}
}
|
RegistrationTests corrected
|
mantis-tests/src/test/java/ru/stqa/ptf/mantis/tests/RegistrationTests.java
|
RegistrationTests corrected
|
|
Java
|
bsd-3-clause
|
f7c2d0fd75d8f9ef4f72f740498f28eca22ed54a
| 0
|
sidbandeira/AppGestao
|
package com.example.sidnei.appgestao.pedidoCompra;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.example.sidnei.appgestao.MainActivity;
import com.example.sidnei.appgestao.R;
import com.example.sidnei.appgestao.classeProduto.Produto;
import com.example.sidnei.appgestao.pedidoCompra.Adaptadores.AdapterItemCompra;
import com.example.sidnei.appgestao.pedidoCompra.Classes.PedidoCompra;
import com.example.sidnei.appgestao.pedidoCompra.Classes.PedidoCompraItem;
import com.example.sidnei.appgestao.pedidoCompra.Repositorio.PedidoCompraItemRepositorio;
import com.example.sidnei.appgestao.pedidoCompra.Repositorio.PedidoCompraRepositorio;
import com.example.sidnei.appgestao.unidadeNegocio.UnidadeNegocioListFragment;
import com.example.sidnei.appgestao.utilitario.JSON;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.ArrayList;
public class PedidoCompraItemActivity extends AppCompatActivity {
private JSONObject jsonobject;
private JSONArray jsonarray;
private ArrayList<String> itemlist;
private ArrayList<Produto> item;
private ArrayList<String> itemlistpedido;
private ArrayList<PedidoCompraItem> itempedido;
private ArrayList<PedidoCompraItem> itens = new ArrayList<PedidoCompraItem>();
private ArrayList<String> listaprodutos = new ArrayList<String>();
private EditText edtCusto;
private EditText edtQtde;
private TextView txtSubTotalPedido;
private TextView txtTotalItem;
private Spinner spnProduto2;
private Button btAdicionar;
private Button btSalvarPedido;
private ListView listagem;
//VARIAVEIS COM AS INFORMAÇOES DO PEDIDOCOMPRA
private Integer codfornecedor;
private String descricaofornecedor;
private String email;
private String datapedido;
private String dataentrega;
private String formapgto;
//VARIAVEIS PARA CALCULAR OS VALORES DO PRODUTO SELECIONADO
private Integer codItem = 0;
private Double custo = 0.00;
private Double qtde = 0.00;
private Double total = 0.00;
private String descricaoProduto = "";
private String resultado = "";
private Double subtotalItem = 0.00;
private Double subtotalPedido = 0.00;
private AdapterItemCompra adapter;
private String gravouItens;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pedido_compra_item);
gravouItens = "false";
final TextView txtSubTotalPedido = (TextView) findViewById(R.id.txtSubTotalPedido);
final TextView txtTotalItem = (TextView) findViewById(R.id.txtTotalItem);
final EditText edtCusto = (EditText) findViewById(R.id.edtCusto);
final EditText edtQtde = (EditText) findViewById(R.id.edtQtde);
//RECEBE O VALOR DAS VARIAVEIS PASSADAS DA PEDIDOCOMPRAACTIVITY
Intent itPedidoCompra = getIntent();
codfornecedor = Integer.parseInt(itPedidoCompra.getStringExtra("codfornecedor"));
descricaofornecedor = itPedidoCompra.getStringExtra("descricaofornecedor");
email = itPedidoCompra.getStringExtra("email");
datapedido = itPedidoCompra.getStringExtra("datapedido");
dataentrega = itPedidoCompra.getStringExtra("dataentrega");
formapgto = itPedidoCompra.getStringExtra("formapgto");
//COMANDO PARA SUPRIMIR O TECLADO AO ABRIR A TELA
this.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN);
// RECUPERNADO A LISTVIEW DECLARADA NO XML PARA PODER DEFINIR O ADAPTER
final ListView listagem = (ListView) findViewById(R.id.lstProdutos);
// RECUPERA O BUTTON ACICIONAR DECLARADO NO XML
Button btAdicionar = (Button) findViewById(R.id.btAdicionar);
Button btSalvarPedido = (Button) findViewById(R.id.btSalvarPedido);
//CRIA O ADAPTER
adapter = new AdapterItemCompra(this, itens);
//DEFINE O ADAPTER
listagem.setAdapter(adapter);
listagem.setCacheColorHint(Color.TRANSPARENT);
// DOWNLOAD DO ARQUIVO JSON DE FORMA ASSINCRONA
new DownloadJSON().execute();
//METODO PARA ATUALIZAR O TOTAL QUANDO SETAR OU PERDER O FOCO DO CAMPO QUANTIDADE.
edtQtde.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
//VERIFICA QUAL A SITUACAO DO FOCO
if(hasFocus){ //SET FOCUS
custo = Double.parseDouble(edtCusto.getText().toString());
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
txtTotalItem.setText(resultado);
}else { //LOST FOCUS
qtde = Double.parseDouble(edtQtde.getText().toString());
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
txtTotalItem.setText(resultado);
}
}
});
// RESPONSAVEL POR EXECUTAR A AÇÃO DO CLIQUE DO BOTÃO ADICIONAR
btAdicionar.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
//VERIFICA SE FOI SELECIONADO ALGUM PRODUTO
if(descricaoProduto.contains("Selecione")) {
Toast.makeText(PedidoCompraItemActivity.this,"Selecione um produto!",Toast.LENGTH_SHORT).show();
}else{
String[] partes = descricaoProduto.split("-");
codItem = Integer.parseInt(partes[0]);
custo = Double.parseDouble(edtCusto.getText().toString());
qtde = Double.parseDouble(edtQtde.getText().toString());
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
subtotalPedido += total;
//FORMATA VALOR PARA 3 DECIMAIS
String tot = String.format("%.3f",subtotalPedido);
txtSubTotalPedido.setText(tot);
PedidoCompraItem item = new PedidoCompraItem();
item.setDescricaoItem(descricaoProduto);
item.setIdItem(codItem);
item.setPrecoCusto(custo);
item.setQtdeItem(qtde);
item.setTotalItem(Double.parseDouble(resultado));
itens.add(item);
adapter.notifyDataSetChanged();
limpaTela();
}
}
});
listagem.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, final int position, long id) {
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(PedidoCompraItemActivity.this);
// SETA UM TITULO PARA O DIALOG
alertDialogBuilder.setTitle("Excluir");
// SETA UMA MENSAGEM PARA O DIALOG
alertDialogBuilder
.setMessage("Confirma exclusão do item?")
.setCancelable(false)
.setPositiveButton("Sim",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
final TextView txtTotalPedido = (TextView) findViewById(R.id.txtSubTotalPedido);
String temp = txtTotalPedido.getText().toString();
temp = temp.replace(",", ".");
Double totPedido = Double.parseDouble(temp.toString());
String result = "";
subtotalPedido = totPedido;
subtotalPedido -= itens.get(position).getTotalItem();
result = String.format("%.3f", subtotalPedido);
result = result.replace(",", ".");
itens.remove(position);
adapter.notifyDataSetChanged();
txtSubTotalPedido.setText(result);
limpaTela();
}
})
.setNegativeButton("Não",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
dialog.cancel();
}
});
// CRIA O DIALOG
AlertDialog alertDialog = alertDialogBuilder.create();
// EXIBE O DIALOG
alertDialog.show();
}
});
}
protected void onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState){
View ListView = inflater.inflate(R.layout.item_listagem_pedido, null);
}
public void btSalvarPedido(View view) {
//VERIFICA SE FOI ADICIONADO ALGUM ITEM A LISTAGEM
if(itens.size()> 0) {
//GRAVA TABELA PEDIDO
PedidoCompraRepositorio pedRep = new PedidoCompraRepositorio(this);
PedidoCompra pedido = new PedidoCompra();
pedido.codEmpresa = MainActivity.codEmpresa;
pedido.codUnNegocio = UnidadeNegocioListFragment.codUnidade;
pedido.idFornecedor = codfornecedor;
pedido.email = email;
pedido.dtPedido = datapedido;
pedido.dtEntrega = dataentrega;
pedido.formapgto = formapgto;
String tot = "";
tot = String.format("%.3f", subtotalPedido);
tot = tot.replace(",", ".");
pedido.totalPedido = Double.parseDouble(tot);
pedRep.salvar(pedido);
if (pedido.get_id() > 0) {
//GRAVAR OS ITENS DO PEDIDO
for (int i = 0; i < itens.size(); i++) {
PedidoCompraItemRepositorio itemRep = new PedidoCompraItemRepositorio(this);
PedidoCompraItem pedItem = new PedidoCompraItem();
pedItem.idCompra = pedido.get_id();
pedItem.descricaoItem = itens.get(i).descricaoItem;
pedItem.idItem = itens.get(i).idItem;
pedItem.qtdeItem = itens.get(i).qtdeItem;
pedItem.precoCusto = itens.get(i).precoCusto;
pedItem.totalItem = itens.get(i).totalItem;
itemRep.salvar(pedItem);
}
}
limpaTela();
gravouItens = "true";
Toast.makeText(this,"Pedido salvo com sucesso!",Toast.LENGTH_LONG).show();
onBackPressed();
}else{
Toast.makeText(this,"Não foi adicionado itens ao pedido!",Toast.LENGTH_LONG).show();
}
}
@Override
public void onBackPressed() {
Intent it = new Intent();
it.putExtra("PARAM_ACTIVITY2",gravouItens);
setResult(1, it);
super.onBackPressed();
}
// METODO QUE FAZ O DOWNLOAD DO ARQUIVO JSON
private class DownloadJSON extends AsyncTask<Void, Void, Void> {
@Override
protected Void doInBackground(Void... params) {
// ARRAY COM A CLASSE PRODUTO
item = new ArrayList<Produto>();
// CRIA O ARRAY DE PRODUTOS PARA ALIMENTAR O SPINNER
itemlist = new ArrayList<String>();
// CHAMA A CLASSE JSON E PASSA A URL PARA BAIXAR O ARQUIVO COM OS PRODUTOS NO FORMATO JSON
//jsonobject = JSON.getJSONfromURL("http://10.0.2.2:81/ws_sgestao/Json/ProdutoWS.json");
jsonobject = JSON.getJSONfromURL("http://sgestao.hol.es/Json/ProdutoWS.json");
try {
// ADICIONA UM ITEM NA LISTA DE PRODUTOS PARA SERVIR DE HINT DO SPINNER
Produto prod0 = new Produto();
prod0.set_id(0);
prod0.setProdutoDescricao("Selecione um Produto...");
prod0.setProdutoPrecovenda(0.00);
prod0.setProdutoPrecoCusto(0.00);
item.add(prod0);
// PREENCHE O SPINNER COM O ITEM DO HINT
itemlist.add("Selecione um produto ...");
// PEGA O NÓ DOS PRODUTOS
jsonarray = jsonobject.getJSONArray("produtos");
for (int i = 0; i < jsonarray.length(); i++) {
jsonobject = jsonarray.getJSONObject(i);
Produto prod = new Produto();
prod.set_id(jsonobject.optInt("idproduto"));
prod.setProdutoDescricao(jsonobject.optString("produtodescricao"));
prod.setProdutoPrecovenda(jsonobject.optDouble("produtoprecovenda"));
prod.setProdutoPrecoCusto(jsonobject.optDouble("produtoprecocusto"));
item.add(prod);
// PREENCHE O SPINNER COM O ID E DESCRICAO DO PRODUTO
itemlist.add(jsonobject.optString("idproduto")+ "- " + jsonobject.optString("produtodescricao"));
}
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void args) {
try {
final Spinner spnProduto2 = (Spinner) findViewById(R.id.spnProduto2);
final EditText edtCusto = (EditText) findViewById(R.id.edtCusto);
final EditText edtQtde = (EditText) findViewById(R.id.edtQtde);
final TextView txtTotalItem = (TextView) findViewById(R.id.txtTotalItem);
// SPINNER ADAPTER
spnProduto2.setAdapter(new ArrayAdapter<String>(PedidoCompraItemActivity.this,
android.R.layout.simple_dropdown_item_1line, itemlist));
// EXECUTA A AÇÃO QUANDO CLICADO EM UM ITEM DO SPINNER
spnProduto2.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int position, long arg3) {
descricaoProduto = itemlist.get(position).toString();
// METODO PARA SETAR O FOCO NO SPINNER AO ENTRAR NA TELA.
arg0.post(new Runnable() {
@Override
public void run() {spnProduto2.requestFocusFromTouch();}
});
//ALIMENTA AS VARIAVEIS PARA CALCULAR O TOTAL DO PRODUTO
custo = Double.parseDouble(item.get(position).get_produtoPrecoCusto().toString());
qtde = Double.parseDouble("1.00");
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
subtotalItem += total;
//SETA OS VALORES NOS EDITTEXT
edtCusto.setText(custo.toString());
edtQtde.setText(qtde.toString());
txtTotalItem.setText(subtotalItem.toString());
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
}
// METODO RESPONSAVEL POR LIMPAR OS DADOS DA TELA.
public void limpaTela(){
final EditText edtCusto = (EditText) findViewById(R.id.edtCusto);
final EditText edtQtde = (EditText) findViewById(R.id.edtQtde);
final TextView txtTotalItem = (TextView) findViewById(R.id.txtTotalItem);
Spinner spnProduto2 = (Spinner) findViewById(R.id.spnProduto2);
spnProduto2.setSelection(0);
edtCusto.setText("0.00");
edtQtde.setText("1.00");
txtTotalItem.setText("0.00");
//txtSubTotalPedido.setText("0.00");
subtotalItem = 0.00;
}
}
|
app/src/main/java/com/example/sidnei/appgestao/pedidoCompra/PedidoCompraItemActivity.java
|
package com.example.sidnei.appgestao.pedidoCompra;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.example.sidnei.appgestao.MainActivity;
import com.example.sidnei.appgestao.R;
import com.example.sidnei.appgestao.classeProduto.Produto;
import com.example.sidnei.appgestao.pedidoCompra.Adaptadores.AdapterItemCompra;
import com.example.sidnei.appgestao.pedidoCompra.Classes.PedidoCompra;
import com.example.sidnei.appgestao.pedidoCompra.Classes.PedidoCompraItem;
import com.example.sidnei.appgestao.pedidoCompra.Repositorio.PedidoCompraItemRepositorio;
import com.example.sidnei.appgestao.pedidoCompra.Repositorio.PedidoCompraRepositorio;
import com.example.sidnei.appgestao.unidadeNegocio.UnidadeNegocioListFragment;
import com.example.sidnei.appgestao.utilitario.JSON;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.ArrayList;
public class PedidoCompraItemActivity extends AppCompatActivity {
private JSONObject jsonobject;
private JSONArray jsonarray;
private ArrayList<String> itemlist;
private ArrayList<Produto> item;
private ArrayList<String> itemlistpedido;
private ArrayList<PedidoCompraItem> itempedido;
private ArrayList<PedidoCompraItem> itens = new ArrayList<PedidoCompraItem>();
private ArrayList<String> listaprodutos = new ArrayList<String>();
private EditText edtCusto;
private EditText edtQtde;
private TextView txtSubTotalPedido;
private TextView txtTotalItem;
private Spinner spnProduto2;
private Button btAdicionar;
private Button btSalvarPedido;
private ListView listagem;
//VARIAVEIS COM AS INFORMAÇOES DO PEDIDOCOMPRA
private Integer codfornecedor;
private String descricaofornecedor;
private String email;
private String datapedido;
private String dataentrega;
private String formapgto;
//VARIAVEIS PARA CALCULAR OS VALORES DO PRODUTO SELECIONADO
private Integer codItem = 0;
private Double custo = 0.00;
private Double qtde = 0.00;
private Double total = 0.00;
private String descricaoProduto = "";
private String resultado = "";
private Double subtotalItem = 0.00;
private Double subtotalPedido = 0.00;
private AdapterItemCompra adapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pedido_compra_item);
final TextView txtSubTotalPedido = (TextView) findViewById(R.id.txtSubTotalPedido);
final TextView txtTotalItem = (TextView) findViewById(R.id.txtTotalItem);
final EditText edtCusto = (EditText) findViewById(R.id.edtCusto);
final EditText edtQtde = (EditText) findViewById(R.id.edtQtde);
//RECEBE O VALOR DAS VARIAVEIS PASSADAS DA PEDIDOCOMPRAACTIVITY
Intent itPedidoCompra = getIntent();
codfornecedor = Integer.parseInt(itPedidoCompra.getStringExtra("codfornecedor"));
descricaofornecedor = itPedidoCompra.getStringExtra("descricaofornecedor");
email = itPedidoCompra.getStringExtra("email");
datapedido = itPedidoCompra.getStringExtra("datapedido");
dataentrega = itPedidoCompra.getStringExtra("dataprevisao");
formapgto = itPedidoCompra.getStringExtra("formapgto");
//COMANDO PARA SUPRIMIR O TECLADO AO ABRIR A TELA
this.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN);
// RECUPERNADO A LISTVIEW DECLARADA NO XML PARA PODER DEFINIR O ADAPTER
final ListView listagem = (ListView) findViewById(R.id.lstProdutos);
// RECUPERA O BUTTON ACICIONAR DECLARADO NO XML
Button btAdicionar = (Button) findViewById(R.id.btAdicionar);
Button btSalvarPedido = (Button) findViewById(R.id.btSalvarPedido);
//CRIA O ADAPTER
adapter = new AdapterItemCompra(this, itens);
//DEFINE O ADAPTER
listagem.setAdapter(adapter);
listagem.setCacheColorHint(Color.TRANSPARENT);
// DOWNLOAD DO ARQUIVO JSON DE FORMA ASSINCRONA
new DownloadJSON().execute();
//METODO PARA ATUALIZAR O TOTAL QUANDO SETAR OU PERDER O FOCO DO CAMPO QUANTIDADE.
edtQtde.setOnFocusChangeListener(new View.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
//VERIFICA QUAL A SITUACAO DO FOCO
if(hasFocus){ //SET FOCUS
custo = Double.parseDouble(edtCusto.getText().toString());
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
txtTotalItem.setText(resultado);
}else { //LOST FOCUS
qtde = Double.parseDouble(edtQtde.getText().toString());
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
txtTotalItem.setText(resultado);
}
}
});
// RESPONSAVEL POR EXECUTAR A AÇÃO DO CLIQUE DO BOTÃO ADICIONAR
btAdicionar.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
//VERIFICA SE FOI SELECIONADO ALGUM PRODUTO
if(descricaoProduto.contains("Selecione")) {
Toast.makeText(PedidoCompraItemActivity.this,"Selecione um produto!",Toast.LENGTH_SHORT).show();
}else{
String[] partes = descricaoProduto.split("-");
codItem = Integer.parseInt(partes[0]);
custo = Double.parseDouble(edtCusto.getText().toString());
qtde = Double.parseDouble(edtQtde.getText().toString());
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
subtotalPedido += total;
//FORMATA VALOR PARA 3 DECIMAIS
String tot = String.format("%.3f",subtotalPedido);
txtSubTotalPedido.setText(tot);
PedidoCompraItem item = new PedidoCompraItem();
item.setDescricaoItem(descricaoProduto);
item.setIdItem(codItem);
item.setPrecoCusto(custo);
item.setQtdeItem(qtde);
item.setTotalItem(Double.parseDouble(resultado));
itens.add(item);
adapter.notifyDataSetChanged();
limpaTela();
}
}
});
listagem.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, final int position, long id) {
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(PedidoCompraItemActivity.this);
// SETA UM TITULO PARA O DIALOG
alertDialogBuilder.setTitle("Excluir");
// SETA UMA MENSAGEM PARA O DIALOG
alertDialogBuilder
.setMessage("Confirma exclusão do item?")
.setCancelable(false)
.setPositiveButton("Sim",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
final TextView txtTotalPedido = (TextView) findViewById(R.id.txtSubTotalPedido);
String temp = txtTotalPedido.getText().toString();
temp = temp.replace(",", ".");
Double totPedido = Double.parseDouble(temp.toString());
String result = "";
subtotalPedido = totPedido;
subtotalPedido -= itens.get(position).getTotalItem();
result = String.format("%.3f", subtotalPedido);
result = result.replace(",", ".");
itens.remove(position);
adapter.notifyDataSetChanged();
txtSubTotalPedido.setText(result);
limpaTela();
}
})
.setNegativeButton("Não",new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int id) {
dialog.cancel();
}
});
// CRIA O DIALOG
AlertDialog alertDialog = alertDialogBuilder.create();
// EXIBE O DIALOG
alertDialog.show();
}
});
}
protected void onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState){
View ListView = inflater.inflate(R.layout.item_listagem_pedido, null);
}
public void btSalvarPedido(View view) {
//Grava o Pedido
PedidoCompraRepositorio pedRep = new PedidoCompraRepositorio(this);
PedidoCompra pedido = new PedidoCompra();
pedido.codEmpresa = MainActivity.codEmpresa;
pedido.codUnNegocio = UnidadeNegocioListFragment.codUnidade;
pedido.idFornecedor = codfornecedor;
pedido.email = email;
pedido.dtPedido = datapedido;
pedido.dtEntrega = dataentrega;
pedido.formapgto = formapgto;
pedido.totalPedido = subtotalPedido;
pedRep.salvar(pedido);
if (pedido.get_id() > 0){
//GRAVAR OS ITENS DO PEDIDO
for (int i = 0; i < itens.size(); i++) {
PedidoCompraItemRepositorio itemRep = new PedidoCompraItemRepositorio(this);
PedidoCompraItem pedItem = new PedidoCompraItem();
pedItem.idCompra = pedido.get_id();
pedItem.descricaoItem = itens.get(i).descricaoItem;
pedItem.idItem = itens.get(i).idItem;
pedItem.qtdeItem = itens.get(i).qtdeItem;
pedItem.precoCusto = itens.get(i).precoCusto;
pedItem.totalItem = itens.get(i).totalItem;
itemRep.salvar(pedItem);
}
}
limpaTela();
Toast.makeText(this,"Pedido salvo com sucesso!",Toast.LENGTH_LONG).show();
// PROGRAMAR A VOLTA PARA A TELA INICIAL DO PEDIDO
}
// METODO QUE FAZ O DOWNLOAD DO ARQUIVO JSON
private class DownloadJSON extends AsyncTask<Void, Void, Void> {
@Override
protected Void doInBackground(Void... params) {
// ARRAY COM A CLASSE PRODUTO
item = new ArrayList<Produto>();
// CRIA O ARRAY DE PRODUTOS PARA ALIMENTAR O SPINNER
itemlist = new ArrayList<String>();
// CHAMA A CLASSE JSON E PASSA A URL PARA BAIXAR O ARQUIVO COM OS PRODUTOS NO FORMATO JSON
//jsonobject = JSON.getJSONfromURL("http://10.0.2.2:81/ws_sgestao/Json/ProdutoWS.json");
jsonobject = JSON.getJSONfromURL("http://sgestao.hol.es/Json/ProdutoWS.json");
try {
// ADICIONA UM ITEM NA LISTA DE PRODUTOS PARA SERVIR DE HINT DO SPINNER
Produto prod0 = new Produto();
prod0.set_id(0);
prod0.setProdutoDescricao("Selecione um Produto...");
prod0.setProdutoPrecovenda(0.00);
prod0.setProdutoPrecoCusto(0.00);
item.add(prod0);
// PREENCHE O SPINNER COM O ITEM DO HINT
itemlist.add("Selecione um produto ...");
// PEGA O NÓ DOS PRODUTOS
jsonarray = jsonobject.getJSONArray("produtos");
for (int i = 0; i < jsonarray.length(); i++) {
jsonobject = jsonarray.getJSONObject(i);
Produto prod = new Produto();
prod.set_id(jsonobject.optInt("idproduto"));
prod.setProdutoDescricao(jsonobject.optString("produtodescricao"));
prod.setProdutoPrecovenda(jsonobject.optDouble("produtoprecovenda"));
prod.setProdutoPrecoCusto(jsonobject.optDouble("produtoprecocusto"));
item.add(prod);
// PREENCHE O SPINNER COM O ID E DESCRICAO DO PRODUTO
itemlist.add(jsonobject.optString("idproduto")+ "- " + jsonobject.optString("produtodescricao"));
}
} catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void args) {
try {
final Spinner spnProduto2 = (Spinner) findViewById(R.id.spnProduto2);
final EditText edtCusto = (EditText) findViewById(R.id.edtCusto);
final EditText edtQtde = (EditText) findViewById(R.id.edtQtde);
final TextView txtTotalItem = (TextView) findViewById(R.id.txtTotalItem);
// SPINNER ADAPTER
spnProduto2.setAdapter(new ArrayAdapter<String>(PedidoCompraItemActivity.this,
android.R.layout.simple_dropdown_item_1line, itemlist));
// EXECUTA A AÇÃO QUANDO CLICADO EM UM ITEM DO SPINNER
spnProduto2.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int position, long arg3) {
descricaoProduto = itemlist.get(position).toString();
// METODO PARA SETAR O FOCO NO SPINNER AO ENTRAR NA TELA.
arg0.post(new Runnable() {
@Override
public void run() {spnProduto2.requestFocusFromTouch();}
});
//ALIMENTA AS VARIAVEIS PARA CALCULAR O TOTAL DO PRODUTO
custo = Double.parseDouble(item.get(position).get_produtoPrecoCusto().toString());
qtde = Double.parseDouble("1.00");
total = custo * qtde;
resultado = String.format("%.3f", total);
resultado = resultado.replace(",", ".");
subtotalItem += total;
//SETA OS VALORES NOS EDITTEXT
edtCusto.setText(custo.toString());
edtQtde.setText(qtde.toString());
txtTotalItem.setText(subtotalItem.toString());
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}catch (Exception e) {
Log.e("Error", e.getMessage());
e.printStackTrace();
}
}
}
// METODO RESPONSAVEL POR LIMPAR OS DADOS DA TELA.
public void limpaTela(){
final EditText edtCusto = (EditText) findViewById(R.id.edtCusto);
final EditText edtQtde = (EditText) findViewById(R.id.edtQtde);
final TextView txtTotalItem = (TextView) findViewById(R.id.txtTotalItem);
Spinner spnProduto2 = (Spinner) findViewById(R.id.spnProduto2);
spnProduto2.setSelection(0);
edtCusto.setText("0.00");
edtQtde.setText("1.00");
txtTotalItem.setText("0.00");
subtotalItem = 0.00;
}
}
|
Ajustes na gravação do pedido de compra.
|
app/src/main/java/com/example/sidnei/appgestao/pedidoCompra/PedidoCompraItemActivity.java
|
Ajustes na gravação do pedido de compra.
|
|
Java
|
isc
|
9362182dd6ce6b7e459409c725190a429e4d4810
| 0
|
io7m/jspatial,io7m/jspatial
|
/*
* Copyright © 2017 <code@io7m.com> http://io7m.com
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package com.io7m.jspatial.examples.swing;
import io.reactivex.Observable;
import io.reactivex.subjects.PublishSubject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Font;
import java.time.Instant;
/**
* Viewer window.
*/
final class QuadTreeWindow extends JFrame
{
private static final Logger LOG;
static {
LOG = LoggerFactory.getLogger(QuadTreeWindow.class);
}
QuadTreeWindow()
{
super("Quad Tree Viewer");
this.setPreferredSize(new Dimension(1024, 768));
this.setJMenuBar(makeMenu());
final PublishSubject<LogMessage> messages =
PublishSubject.create();
final QuadTreeControls controls =
new QuadTreeControls();
final QuadTreeCanvas canvas =
new QuadTreeCanvas(controls.events(), messages);
final JScrollPane controls_scroll =
new JScrollPane(controls);
final JScrollPane canvas_scroll =
new JScrollPane(canvas);
final JSplitPane split = new JSplitPane(
JSplitPane.HORIZONTAL_SPLIT, canvas_scroll, controls_scroll);
final StatusBar status =
new StatusBar(messages);
final LogPane log =
new LogPane(messages);
final JTabbedPane tabs = new JTabbedPane();
tabs.addTab("Quad Tree", split);
tabs.addTab("Log", log);
Thread.setDefaultUncaughtExceptionHandler(
(t, e) -> {
LOG.error("uncaught exception: ", e);
messages.onNext(
LogMessage.of(LogMessageType.Severity.ERROR, e.getMessage()));
});
final Container pane = this.getContentPane();
pane.add(tabs, BorderLayout.CENTER);
pane.add(status, BorderLayout.PAGE_END);
messages.onNext(LogMessage.of(
LogMessageType.Severity.INFO,
"Quad tree viewer started."));
this.pack();
split.setDividerLocation(0.6);
}
private static JMenuBar makeMenu()
{
final JMenuItem m_file_quit = new JMenuItem("Quit");
m_file_quit.addActionListener(e -> System.exit(0));
final JMenu m_file = new JMenu("File");
m_file.add(m_file_quit);
final JMenuBar bar = new JMenuBar();
bar.add(m_file);
return bar;
}
private static final class StatusBar extends JPanel
{
private final JLabel text;
StatusBar(final Observable<LogMessage> in_messages)
{
this.text = new JLabel();
this.add(this.text);
in_messages.subscribe(this::onMessage);
}
private void onMessage(final LogMessageType m)
{
switch (m.severity()) {
case DEBUG:
break;
case INFO:
case ERROR:
this.text.setText(m.message());
break;
}
}
}
private static final class LogPane extends JPanel
{
private final JTextArea text;
private final JScrollPane scroll;
LogPane(final Observable<LogMessage> in_messages)
{
this.setLayout(new BorderLayout());
this.text = new JTextArea();
this.text.setFont(Font.decode("Monospaced 10"));
this.scroll = new JScrollPane(this.text);
this.add(this.scroll, BorderLayout.CENTER);
in_messages.subscribe(this::onMessage);
}
private void onMessage(final LogMessageType m)
{
final StringBuilder sb = new StringBuilder(128);
sb.append(Instant.now());
sb.append(": ");
switch (m.severity()) {
case DEBUG: {
sb.append("debug: ");
break;
}
case INFO: {
sb.append("info: ");
break;
}
case ERROR: {
sb.append("error: ");
break;
}
}
sb.append(m.message());
sb.append(System.lineSeparator());
this.text.append(sb.toString());
}
}
}
|
com.io7m.jspatial.examples.swing/src/main/java/com/io7m/jspatial/examples/swing/QuadTreeWindow.java
|
/*
* Copyright © 2017 <code@io7m.com> http://io7m.com
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package com.io7m.jspatial.examples.swing;
import io.reactivex.Observable;
import io.reactivex.subjects.PublishSubject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Font;
import java.time.Instant;
/**
* Viewer window.
*/
final class QuadTreeWindow extends JFrame
{
private static final Logger LOG;
static {
LOG = LoggerFactory.getLogger(QuadTreeWindow.class);
}
QuadTreeWindow()
{
super("Quad Tree Viewer");
this.setPreferredSize(new Dimension(1024, 768));
this.setJMenuBar(QuadTreeWindow.makeMenu());
final PublishSubject<LogMessage> messages =
PublishSubject.create();
final QuadTreeControls controls =
new QuadTreeControls();
final QuadTreeCanvas canvas =
new QuadTreeCanvas(controls.events(), messages);
final JScrollPane controls_scroll =
new JScrollPane(controls);
final JScrollPane canvas_scroll =
new JScrollPane(canvas);
final JSplitPane split = new JSplitPane(
JSplitPane.HORIZONTAL_SPLIT, canvas_scroll, controls_scroll);
final StatusBar status =
new StatusBar(messages);
final LogPane log =
new LogPane(messages);
final JTabbedPane tabs = new JTabbedPane();
tabs.addTab("Quad Tree", split);
tabs.addTab("Log", log);
Thread.setDefaultUncaughtExceptionHandler(
(t, e) -> {
QuadTreeWindow.LOG.error("uncaught exception: ", e);
messages.onNext(
LogMessage.of(LogMessageType.Severity.ERROR, e.getMessage()));
});
final Container pane = this.getContentPane();
pane.add(tabs, BorderLayout.CENTER);
pane.add(status, BorderLayout.PAGE_END);
messages.onNext(LogMessage.of(
LogMessageType.Severity.INFO,
"Quad tree viewer started."));
this.pack();
split.setDividerLocation(0.6);
}
private static JMenuBar makeMenu()
{
final JMenuItem m_file_quit = new JMenuItem("Quit");
m_file_quit.addActionListener(e -> System.exit(0));
final JMenu m_file = new JMenu("File");
m_file.add(m_file_quit);
final JMenuBar bar = new JMenuBar();
bar.add(m_file);
return bar;
}
private static final class StatusBar extends JPanel
{
private final JLabel text;
StatusBar(final Observable<LogMessage> in_messages)
{
this.text = new JLabel();
this.add(this.text);
in_messages.subscribe(this::onMessage);
}
private void onMessage(final LogMessageType m)
{
switch (m.severity()) {
case DEBUG:
break;
case INFO:
case ERROR:
this.text.setText(m.message());
break;
}
}
}
private static final class LogPane extends JPanel
{
private final JTextArea text;
private final JScrollPane scroll;
LogPane(final Observable<LogMessage> in_messages)
{
this.setLayout(new BorderLayout());
this.text = new JTextArea();
this.text.setFont(Font.decode("Monospaced 10"));
this.scroll = new JScrollPane(this.text);
this.add(this.scroll, BorderLayout.CENTER);
in_messages.subscribe(this::onMessage);
}
private void onMessage(final LogMessageType m)
{
final StringBuilder sb = new StringBuilder(128);
sb.append(Instant.now());
sb.append(": ");
switch (m.severity()) {
case DEBUG: {
sb.append("debug: ");
break;
}
case INFO: {
sb.append("info: ");
break;
}
case ERROR: {
sb.append("error: ");
break;
}
}
sb.append(m.message());
sb.append(System.lineSeparator());
this.text.append(sb.toString());
}
}
}
|
Remove unnecessarily qualified static access
|
com.io7m.jspatial.examples.swing/src/main/java/com/io7m/jspatial/examples/swing/QuadTreeWindow.java
|
Remove unnecessarily qualified static access
|
|
Java
|
mit
|
25164294d603b17f25d0fa3a6e9156f076d27579
| 0
|
vkazhdan/moskito,esmakula/moskito,anotheria/moskito,anotheria/moskito,vkazhdan/moskito,esmakula/moskito,StetsiukRoman/moskito,StetsiukRoman/moskito,vkazhdan/moskito,anotheria/moskito,StetsiukRoman/moskito,esmakula/moskito,StetsiukRoman/moskito,esmakula/moskito,vkazhdan/moskito,anotheria/moskito
|
package net.anotheria.moskito.core.config.dashboards;
import org.configureme.annotations.Configure;
import org.configureme.annotations.ConfigureMe;
import java.io.Serializable;
import java.util.Arrays;
/**
* Configuration holder for a single chart in a dashboard.
*/
@ConfigureMe
public class ChartConfig implements Serializable{
/**
* Chart caption.
*/
@Configure
private String caption;
/**
* Referenced accumulators.
*/
@Configure
private String[] accumulators;
public String[] getAccumulators() {
return accumulators;
}
public void setAccumulators(String[] accumulators) {
this.accumulators = accumulators;
}
public String getCaption() {
return caption;
}
public void setCaption(String caption) {
this.caption = caption;
}
@Override
public String toString() {
return "ChartConfig{" +
"accumulators=" + Arrays.toString(accumulators) +
", caption='" + caption + '\'' +
'}';
}
}
|
moskito-core/src/main/java/net/anotheria/moskito/core/config/dashboards/ChartConfig.java
|
package net.anotheria.moskito.core.config.dashboards;
import org.configureme.annotations.Configure;
import org.configureme.annotations.ConfigureMe;
import java.io.Serializable;
import java.util.Arrays;
@ConfigureMe
public class ChartConfig implements Serializable{
@Configure
private String caption;
@Configure
private String[] accumulators;
public String[] getAccumulators() {
return accumulators;
}
public void setAccumulators(String[] accumulators) {
this.accumulators = accumulators;
}
public String getCaption() {
return caption;
}
public void setCaption(String caption) {
this.caption = caption;
}
@Override
public String toString() {
return "ChartConfig{" +
"accumulators=" + Arrays.toString(accumulators) +
", caption='" + caption + '\'' +
'}';
}
}
|
checkstyle
|
moskito-core/src/main/java/net/anotheria/moskito/core/config/dashboards/ChartConfig.java
|
checkstyle
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.