answer
stringlengths 17
10.2M
|
|---|
package com.fincatto.nfe310.webservices;
import com.fincatto.nfe310.NFeConfig;
import com.fincatto.nfe310.classes.NFAutorizador31;
import com.fincatto.nfe310.classes.NFModelo;
import com.fincatto.nfe310.classes.NFUnidadeFederativa;
import com.fincatto.nfe310.classes.lote.consulta.NFLoteConsulta;
import com.fincatto.nfe310.classes.lote.consulta.NFLoteConsultaRetorno;
import com.fincatto.nfe310.transformers.NFRegistryMatcher;
import com.fincatto.nfe310.webservices.gerado.NfeRetAutorizacaoStub;
import com.fincatto.nfe310.webservices.gerado.NfeRetAutorizacaoStub.NfeRetAutorizacaoLoteResult;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.util.AXIOMUtil;
import org.simpleframework.xml.core.Persister;
import org.simpleframework.xml.stream.Format;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.rmi.RemoteException;
class WSLoteConsulta {
final private static Logger LOGGER = LoggerFactory.getLogger(WSLoteConsulta.class);
private final NFeConfig config;
WSLoteConsulta(final NFeConfig config) {
this.config = config;
}
NFLoteConsultaRetorno consultaLote(final String numeroRecibo, final NFModelo modelo) throws Exception {
final OMElement omElementConsulta = AXIOMUtil.stringToOM(this.gerarDadosConsulta(numeroRecibo).toString());
WSLoteConsulta.LOGGER.debug(omElementConsulta.toString());
final OMElement omElementResult = this.efetuaConsulta(omElementConsulta, this.config.getCUF(), modelo);
WSLoteConsulta.LOGGER.debug(omElementResult.toString());
return new Persister(new NFRegistryMatcher(), new Format(0)).read(NFLoteConsultaRetorno.class, omElementResult.toString());
}
private OMElement efetuaConsulta(final OMElement omElement, final NFUnidadeFederativa uf, final NFModelo modelo) throws RemoteException {
final NfeRetAutorizacaoStub.NfeCabecMsg cabec = new NfeRetAutorizacaoStub.NfeCabecMsg();
cabec.setCUF(uf.getCodigoIbge());
cabec.setVersaoDados(NFeConfig.VERSAO_NFE);
final NfeRetAutorizacaoStub.NfeCabecMsgE cabecE = new NfeRetAutorizacaoStub.NfeCabecMsgE();
cabecE.setNfeCabecMsg(cabec);
final NfeRetAutorizacaoStub.NfeDadosMsg dados = new NfeRetAutorizacaoStub.NfeDadosMsg();
dados.setExtraElement(omElement);
final NFAutorizador31 autorizador = NFAutorizador31.valueOfTipoEmissao(this.config.getTipoEmissao(), this.config.getCUF());
final String urlWebService = NFModelo.NFCE.equals(modelo) ? autorizador.getNfceRetAutorizacao(this.config.getAmbiente()) : autorizador.getNfeRetAutorizacao(this.config.getAmbiente());
if (urlWebService == null) {
throw new IllegalArgumentException("Nao foi possivel encontrar URL para RetAutorizacao " + modelo.name() + ", autorizador " + autorizador.name());
}
final NfeRetAutorizacaoLoteResult autorizacaoLoteResult = new NfeRetAutorizacaoStub(urlWebService).nfeRetAutorizacaoLote(dados, cabecE);
return autorizacaoLoteResult.getExtraElement();
}
private NFLoteConsulta gerarDadosConsulta(final String numeroRecibo) {
final NFLoteConsulta consulta = new NFLoteConsulta();
consulta.setRecibo(numeroRecibo);
consulta.setAmbiente(this.config.getAmbiente());
consulta.setVersao(new BigDecimal(NFeConfig.VERSAO_NFE));
return consulta;
}
}
|
package com.github.wolf480pl.log4j2_to_jul;
import java.io.Serializable;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.core.layout.PatternLayout;
import org.apache.logging.log4j.spi.AbstractLogger;
import org.apache.logging.log4j.status.StatusLogger;
@Plugin(name = "JUL", category = "Core", elementType = "appender", printObject = true)
public final class JULAppender extends AbstractAppender {
protected static final Logger LOGGER = StatusLogger.getLogger();
protected static final String DEFAULT_FCQN = AbstractLogger.class.getName();
private final JULManager manager;
private JULAppender(String name, Layout<? extends Serializable> layout, Filter filter, JULManager manager, boolean ignoreExceptions) {
super(name, filter, layout, ignoreExceptions);
this.manager = manager;
}
@Override
public void append(LogEvent event) {
Level level = levelToJUL(event.getLevel());
LogRecord record = new LogRecord(level, event.getMessage().getFormattedMessage());
java.util.logging.Logger jul = this.manager.getJUL();
record.setThrown(event.getThrown());
record.setMillis(event.getMillis());
record.setLoggerName(jul.getName() + "." + event.getLoggerName());
StackTraceElement source = event.getSource();
if (source != null) {
record.setSourceClassName(source.getClassName());
record.setSourceMethodName(source.getMethodName());
} else if (!event.getFQCN().equals(DEFAULT_FCQN)) {
record.setSourceClassName(event.getFQCN());
}
jul.log(record);
}
private Level levelToJUL(org.apache.logging.log4j.Level lvl) {
switch (lvl) {
case OFF:
return Level.OFF;
case FATAL:
case ERROR:
return Level.SEVERE;
case WARN:
return Level.WARNING;
case INFO:
return Level.INFO;
case DEBUG:
return Level.FINE;
case TRACE:
return Level.FINER;
case ALL:
return Level.ALL;
}
return null;
}
@PluginFactory
public static JULAppender createAppender(@PluginAttribute("name") String name, @PluginAttribute("ignoreExceptions") String ignore,
@PluginElement("Layout") Layout<? extends Serializable> layout,
@PluginElement("Filters") Filter filter) {
boolean ignoreExceptions = Boolean.parseBoolean(ignore);
if (name == null) {
LOGGER.error("No name provided for JULAppender");
return null;
}
JULManager manager = JULManager.getJULManager(name);
if (manager == null) {
return null;
}
if (layout == null) {
layout = PatternLayout.createLayout(null, null, null, null, null);
}
return new JULAppender(name, layout, filter, manager, ignoreExceptions);
}
}
|
package com.jaamsim.input;
import java.util.ArrayList;
import com.jaamsim.basicsim.Entity;
import com.jaamsim.units.DimensionlessUnit;
import com.jaamsim.units.Unit;
/**
* AttributeDefinitionListInput is an object for parsing inputs consisting of a list of
* Attribute definitions using the syntax:
* Entity AttributeDefinitionList { { AttibuteName1 Value1 Unit1 } { AttibuteName2 Value2 Unit2 } ... }
* @author Harry King
*/
public class AttributeDefinitionListInput extends ListInput<ArrayList<AttributeHandle>> {
private Entity ent;
public AttributeDefinitionListInput(Entity e, String key, String cat, ArrayList<AttributeHandle> def) {
super(key, cat, def);
ent = e;
}
@Override
public void parse(KeywordIndex kw) throws InputErrorException {
// Divide up the inputs by the inner braces
ArrayList<KeywordIndex> subArgs = kw.getSubArgs();
ArrayList<AttributeHandle> temp = new ArrayList<>(subArgs.size());
// Parse the inputs within each inner brace
for (int i = 0; i < subArgs.size(); i++) {
KeywordIndex subArg = subArgs.get(i);
Input.assertCount(subArg, 2, 3);
try {
// Parse the attribute name
String name = subArg.getArg(0);
if (OutputHandle.hasOutput(ent.getClass(), name)) {
throw new InputErrorException("Attribute name is the same as existing output name: %s", name);
}
// Parse the unit type
double factor = 1.0;
Class<? extends Unit> unitType = DimensionlessUnit.class;
if (subArg.numArgs() == 3) {
Unit unit = Input.parseUnit(subArg.getArg(2));
unitType = unit.getClass();
factor = unit.getConversionFactorToSI();
}
// Parse the initial value
double val;
try {
val = factor * Double.valueOf(subArg.getArg(1));
} catch (Exception e) {
throw new InputErrorException(INP_ERR_DOUBLE, subArg.getArg(1));
}
// Save the data for this attribute
AttributeHandle h = (AttributeHandle) ent.getOutputHandle(name);
if (h == null)
h = new AttributeHandle(ent, name);
h.setUnitType(unitType);
h.setInitialValue(val);
h.setValue(val);
temp.add(h);
} catch (InputErrorException e) {
throw new InputErrorException(INP_ERR_ELEMENT, i, e.getMessage());
}
}
// Save the data for each attribute
value = temp;
}
@Override
public void copyFrom(Input<?> in) {
super.copyFrom(in);
value = new ArrayList<>();
@SuppressWarnings("unchecked")
ArrayList<AttributeHandle> inValue = (ArrayList<AttributeHandle>) (in.value);
for (AttributeHandle h : inValue) {
AttributeHandle hNew = new AttributeHandle(ent, h.getName());
hNew.setUnitType(h.getUnitType());
hNew.setInitialValue(h.getInitialValue());
hNew.setValue(h.getValueAsDouble(0.0d, 0.0d));
value.add(hNew);
}
}
@Override
public int getListSize() {
if (value == null)
return 0;
else
return value.size();
}
@Override
public String getDefaultString() {
if (defValue == null || defValue.isEmpty()) return "";
return this.getInputString(defValue);
}
private String getInputString(ArrayList<AttributeHandle> handleList) {
StringBuilder tmp = new StringBuilder();
for (int i = 0; i < handleList.size(); i++) {
if (i > 0) tmp.append(SEPARATOR);
AttributeHandle h = handleList.get(i);
tmp.append("{ ");
tmp.append(h.getName());
tmp.append(SEPARATOR);
double val = h.getInitialValue();
String unitString = Unit.getSIUnit(h.getUnitType());
// Check for a preferred unit
Unit unit = Unit.getPreferredUnit(h.getUnitType());
if (unit != null) {
unitString = unit.toString();
val = h.getValueAsDouble(0.0d, 0.0d, unit);
}
tmp.append(val);
// Print the unit unless it is dimensionless
if (h.getUnitType() != DimensionlessUnit.class) {
tmp.append(SEPARATOR);
tmp.append(unitString);
}
tmp.append(" }");
}
return tmp.toString();
}
}
|
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.microsoft.graph.http;
import com.microsoft.graph.core.ClientException;
import com.microsoft.graph.core.IBaseClient;
import com.microsoft.graph.options.FunctionOption;
import com.microsoft.graph.options.HeaderOption;
import com.microsoft.graph.options.Option;
import com.microsoft.graph.options.QueryOption;
import java.net.URL;
import java.util.List;
/**
* A request against a collection
*
* @param <T1> the raw response class returned by the service
* @param <T2> the class of the collection page
*/
public abstract class BaseCollectionRequest<T1, T2> implements IHttpRequest {
/**
* The base request for this collection request
*/
private final BaseRequest baseRequest;
/**
* The class for the response
*/
private final Class<T1> responseClass;
/**
* The class for the collection page
*/
private final Class<T2> collectionPageClass;
/**
* Create the collection request
*
* @param requestUrl the URL to make the request against
* @param client the client which can issue the request
* @param options the options for this request
* @param responseClass the class for the response
* @param collectionPageClass the class for the collection page
*/
public BaseCollectionRequest(final String requestUrl,
final IBaseClient client,
final List<? extends Option> options,
final Class<T1> responseClass,
final Class<T2> collectionPageClass) {
this.responseClass = responseClass;
this.collectionPageClass = collectionPageClass;
baseRequest = new BaseRequest(requestUrl, client, options, responseClass) {
};
}
/**
* Send this request
*
* @return the response object
* @throws ClientException an exception occurs if there was an error while the request was sent
*/
protected T1 send() throws ClientException {
baseRequest.setHttpMethod(HttpMethod.GET);
return baseRequest.getClient().getHttpProvider().send(this, responseClass, /* serialization object */ null);
}
/**
* Posts this request
*
* @param serializedObject the object to serialize as the body
* @param <T1> the type of the callback result
* @param <BodyType> the type of the serialized body
* @return the response object
* @throws ClientException an exception occurs if there was an error while the request was sent
*/
protected <BodyType> T1 post(final BodyType serializedObject) throws ClientException {
baseRequest.setHttpMethod(HttpMethod.POST);
return (T1) baseRequest.getClient().getHttpProvider().send(this, responseClass, serializedObject);
}
/**
* Gets the request URL
*
* @return the request URL
*/
@Override
public URL getRequestUrl() {
return baseRequest.getRequestUrl();
}
/**
* Gets the HTTP method
*
* @return the HTTP method
*/
@Override
public HttpMethod getHttpMethod() {
return baseRequest.getHttpMethod();
}
/**
* Gets the headers
*
* @return the headers
*/
@Override
public List<HeaderOption> getHeaders() {
return baseRequest.getHeaders();
}
/**
* Adds a header to this request
*
* @param header the name of the header
* @param value the value of the header
*/
@Override
public void addHeader(final String header, final String value) {
baseRequest.addHeader(header, value);
}
/**
* Sets useCaches parameter to cache the response
*
* @param useCaches the value of useCaches
*/
@Override
public void setUseCaches(boolean useCaches) {
baseRequest.setUseCaches(useCaches);
}
/**
* Gets useCaches parameter
*
* @return the value of useCaches
*/
@Override
public boolean getUseCaches() {
return baseRequest.getUseCaches();
}
/**
* Gets the full list of options for this request
*
* @return the full list of options for this request
*/
public List<Option> getOptions() {
return baseRequest.getOptions();
}
/**
* Adds a query option
*
* @param option the query option to add
*/
public void addQueryOption(final QueryOption option) {
baseRequest.getQueryOptions().add(option);
}
/**
* Adds a query option
*
* @param option the query option to add
*/
public void addFunctionOption(final FunctionOption option) {
baseRequest.getFunctionOptions().add(option);
}
/**
* Gets the base request for this collection request
*
* @return the base request for this collection request
*/
protected BaseRequest getBaseRequest() {
return baseRequest;
}
/**
* Gets the class for the collection page
*
* @return the class for the collection page
*/
public Class<T2> getCollectionPageClass() {
return collectionPageClass;
}
}
|
package com.ociweb.gl.impl.stage;
import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ociweb.gl.api.Behavior;
import com.ociweb.gl.api.HTTPRequestReader;
import com.ociweb.gl.api.HTTPResponseListener;
import com.ociweb.gl.api.HTTPResponseReader;
import com.ociweb.gl.api.ListenerFilter;
import com.ociweb.gl.api.MsgCommandChannel;
import com.ociweb.gl.api.PubSubListener;
import com.ociweb.gl.api.RestListener;
import com.ociweb.gl.api.ShutdownListener;
import com.ociweb.gl.api.StartupListener;
import com.ociweb.gl.api.StateChangeListener;
import com.ociweb.gl.api.TimeListener;
import com.ociweb.gl.impl.BuilderImpl;
import com.ociweb.gl.impl.ChildClassScanner;
import com.ociweb.gl.impl.HTTPResponseListenerBase;
import com.ociweb.gl.impl.PayloadReader;
import com.ociweb.gl.impl.PubSubListenerBase;
import com.ociweb.gl.impl.RestListenerBase;
import com.ociweb.gl.impl.schema.MessageSubscription;
import com.ociweb.gl.impl.schema.TrafficOrderSchema;
import com.ociweb.pronghorn.network.ClientCoordinator;
import com.ociweb.pronghorn.network.config.HTTPContentType;
import com.ociweb.pronghorn.network.config.HTTPRevision;
import com.ociweb.pronghorn.network.config.HTTPSpecification;
import com.ociweb.pronghorn.network.config.HTTPVerb;
import com.ociweb.pronghorn.network.config.HTTPVerbDefaults;
import com.ociweb.pronghorn.network.schema.HTTPRequestSchema;
import com.ociweb.pronghorn.network.schema.NetResponseSchema;
import com.ociweb.pronghorn.pipe.DataInputBlobReader;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.pipe.PipeUTF8MutableCharSquence;
import com.ociweb.pronghorn.pipe.util.hash.IntHashTable;
import com.ociweb.pronghorn.stage.PronghornStage;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.util.TrieParser;
import com.ociweb.pronghorn.util.TrieParserReader;
public class ReactiveListenerStage<H extends BuilderImpl> extends PronghornStage implements ListenerFilter {
private static final int SIZE_OF_MSG_STATECHANGE = Pipe.sizeOf(MessageSubscription.instance, MessageSubscription.MSG_STATECHANGED_71);
private static final int SIZE_OF_MSG_PUBLISH = Pipe.sizeOf(MessageSubscription.instance, MessageSubscription.MSG_PUBLISH_103);
protected final Object listener;
protected final TimeListener[] timeListeners;
protected final Pipe<?>[] inputPipes;
protected final Pipe<?>[] outputPipes;
protected long timeTrigger;
protected long timeRate;
protected H builder;
private static final Logger logger = LoggerFactory.getLogger(ReactiveListenerStage.class);
protected boolean startupCompleted;
protected boolean shutdownCompleted;
//all non shutdown listening reactors will be shutdown only after the listeners have finished.
protected static AtomicInteger liveShutdownListeners = new AtomicInteger();
protected static AtomicInteger totalLiveReactors = new AtomicInteger();
protected static AtomicBoolean shutdownRequsted = new AtomicBoolean(false);
protected static Runnable lastCall;
//only used for direct method dispatch upon subscription topic arrival
private TrieParser methodLookup;
private TrieParserReader methodReader;
private CallableMethod[] methods;
private boolean restRoutesDefined = false;
protected int[] oversampledAnalogValues;
private static final int MAX_PORTS = 10;
public static final ReactiveOperators operators = reactiveOperators();
protected final Enum[] states;
protected boolean timeEvents = false;
//Listener Filters
private long[] includedToStates;
private long[] includedFromStates;
private long[] excludedToStates;
private long[] excludedFromStates;
private Number stageRate;
protected final GraphManager graphManager;
protected int timeProcessWindow;
private PipeUTF8MutableCharSquence mutableTopic = new PipeUTF8MutableCharSquence();
private PayloadReader payloadReader;
private HTTPSpecification httpSpec;
private IntHashTable headerToPositionTable; //for HTTPClient
private TrieParser headerTrieParser; //for HTTPClient
private final ClientCoordinator ccm;
protected ReactiveManagerPipeConsumer consumer;
protected static final long MS_to_NS = 1_000_000;
private int timeIteration = 0;
private int parallelInstance;
private final ArrayList<ReactiveManagerPipeConsumer> consumers;
///NOTE: keep all the work here to a minimum, we should just
// take data off pipes and hand off to the application
// the thread here is the applications thread if
// much work needs to be done is must be done elsewhere
public ReactiveListenerStage(GraphManager graphManager, Behavior listener,
Pipe<?>[] inputPipes, Pipe<?>[] outputPipes,
ArrayList<ReactiveManagerPipeConsumer> consumers,
H builder, int parallelInstance) {
super(graphManager, inputPipes, outputPipes);
this.listener = listener;
assert(null!=listener) : "Behavior must be defined";
this.parallelInstance = parallelInstance;
this.consumers = consumers;
this.inputPipes = inputPipes;
this.outputPipes = outputPipes;
this.builder = builder;
this.states = builder.getStates();
this.graphManager = graphManager;
this.ccm = builder.getClientCoordinator();
int totalCount = totalLiveReactors.incrementAndGet();
assert(totalCount>=0);
if (listener instanceof ShutdownListener) {
int shudownListenrCount = liveShutdownListeners.incrementAndGet();
assert(shudownListenrCount>=0);
}
//TODO: add child object here as well?
if (listener instanceof TimeListener) {
timeListeners = new TimeListener[]{(TimeListener)listener};
} else {
timeListeners = new TimeListener[0];
}
}
//TODO: this common list of operators is getting creted for EVERY stages and its not needed.
private static ReactiveOperators reactiveOperators() {
return new ReactiveOperators()
.addOperator(PubSubListenerBase.class,
MessageSubscription.instance,
new ReactiveOperator() {
@Override
public void apply(Object target, Pipe input, ReactiveListenerStage r) {
r.consumePubSubMessage(target, input);
}
})
.addOperator(HTTPResponseListenerBase.class,
NetResponseSchema.instance,
new ReactiveOperator() {
@Override
public void apply(Object target, Pipe input, ReactiveListenerStage r) {
r.consumeNetResponse((HTTPResponseListener)target, input);
}
})
.addOperator(RestListenerBase.class,
HTTPRequestSchema.instance,
new ReactiveOperator() {
@Override
public void apply(Object target, Pipe input, ReactiveListenerStage r) {
r.consumeRestRequest((RestListener)target, input);
}
});
}
public int getId() {
return builder.behaviorId((Behavior)listener);
}
public static boolean isShutdownRequested() {
return shutdownRequsted.get();
}
public static void requestSystemShutdown(Runnable shutdownRunnable) {
lastCall = shutdownRunnable;
shutdownRequsted.set(true);
//logger.info("shutdown requested");
}
private String toStringDetails = "\n";
public String toString() {
return (null==listener ? "Unknown Behavior" :
listener.getClass().getSimpleName())+"\n"+
super.toString()+toStringDetails;
}
public final void setTimeEventSchedule(long rate, long start) {
timeRate = rate;
timeTrigger = start;
timeEvents = (0 != timeRate) && (listener instanceof TimeListener);
}
@Override
public void startup() {
//ALL operators have been added to operators so it can be used to create consumers as needed
consumer = new ReactiveManagerPipeConsumer(listener, operators, inputPipes);
if (listener instanceof RestListener) {
if (!restRoutesDefined) {
throw new UnsupportedOperationException("a RestListener requires a call to includeRoutes() first to define which routes it consumes.");
}
}
httpSpec = HTTPSpecification.defaultSpec();
///HTTPClient support
TrieParserReader parserReader = new TrieParserReader(2, true);
headerToPositionTable = httpSpec.headerTable(parserReader);
headerTrieParser = httpSpec.headerParser();
stageRate = (Number)GraphManager.getNota(graphManager, this.stageId, GraphManager.SCHEDULE_RATE, null);
timeProcessWindow = (null==stageRate? 0 : (int)(stageRate.longValue()/MS_to_NS));
//TODO: the transducers need to be listed here as startup listeners.
//Do last so we complete all the initializations first
if (listener instanceof StartupListener) {
((StartupListener)listener).startup();
}
startupCompleted=true;
}
@Override
public void run() {
if (shutdownRequsted.get()) {
if (!shutdownCompleted) {
if (listener instanceof ShutdownListener) {
if (((ShutdownListener)listener).acceptShutdown()) {
int remaining = liveShutdownListeners.decrementAndGet();
assert(remaining>=0);
requestShutdown();
return;
}
//else continue with normal run processing
} else {
//this one is not a listener so we must wait for all the listeners to close first
if (0 == liveShutdownListeners.get()) {
requestShutdown();
return;
}
//else continue with normal run processing.
}
} else {
assert(shutdownCompleted);
assert(false) : "run should not have been called if this stage was shut down.";
return;
}
}
if (timeEvents) {
processTimeEvents(timeListeners, timeTrigger);
}
consumer.process(this);
int j = consumers.size();
while(--j>=0) {
consumers.get(j).process(this);
}
}
@Override
public void shutdown() {
assert(!shutdownCompleted) : "already shut down why was this called a second time?";
int i = outputPipes.length;
while (--i>=0) {
Pipe<?> output = outputPipes[i];
Pipe.spinBlockForRoom(output, Pipe.EOF_SIZE);
Pipe.publishEOF(output);
}
if (totalLiveReactors.decrementAndGet()==0) {
//ready for full system shutdown.
if (null!=lastCall) {
new Thread(lastCall).start();
}
}
shutdownCompleted = true;
}
final void consumeRestRequest(RestListener listener, Pipe<HTTPRequestSchema> p) {
while (Pipe.hasContentToRead(p)) {
Pipe.markTail(p);
int msgIdx = Pipe.takeMsgIdx(p);
if (HTTPRequestSchema.MSG_RESTREQUEST_300==msgIdx) {
long connectionId = Pipe.takeLong(p);
int sequenceNo = Pipe.takeInt(p);
int routeVerb = Pipe.takeInt(p);
int routeId = routeVerb>>>HTTPVerb.BITS;
int verbId = HTTPVerb.MASK & routeVerb;
HTTPRequestReader reader = (HTTPRequestReader)Pipe.inputStream(p);
reader.openLowLevelAPIField(); //NOTE: this will take meta then take len
reader.setParseDetails( builder.routeExtractionParser(routeId),
builder.routeHeaderToPositionTable(routeId),
builder.routeExtractionParserIndexCount(routeId),
builder.routeHeaderTrieParser(routeId)
);
int parallelRevision = Pipe.takeInt(p);
int parallelIdx = parallelRevision >>> HTTPRevision.BITS;
int revision = HTTPRevision.MASK & parallelRevision;
reader.setRevisionId(revision);
reader.setRequestContext(Pipe.takeInt(p));
reader.setRouteId(routeId);
//both these values are required in order to ensure the right sequence order once processed.
long sequenceCode = (((long)parallelIdx)<<32) | ((long)sequenceNo);
reader.setConnectionId(connectionId, sequenceCode);
//assign verbs as strings...
reader.setVerb((HTTPVerbDefaults)httpSpec.verbs[verbId]);
if (!listener.restRequest(reader)) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
reader.setParseDetails(null,null,0,null);//just to be safe.
} else {
logger.error("unrecognized message on {} ",p);
throw new UnsupportedOperationException("unexpected message "+msgIdx);
}
Pipe.confirmLowLevelRead(p, Pipe.sizeOf(p,msgIdx));
Pipe.releaseReadLock(p);
}
}
final void consumeNetResponse(HTTPResponseListener listener, Pipe<NetResponseSchema> p) {
assert(null!=ccm) : "must define coordinator";
while (Pipe.hasContentToRead(p)) {
Pipe.markTail(p);
int msgIdx = Pipe.takeMsgIdx(p);
//logger.info("response from HTTP request. Type is {} ",msgIdx);
switch (msgIdx) {
case NetResponseSchema.MSG_RESPONSE_101:
long ccId1 = Pipe.takeLong(p);
//ClientConnection cc = (ClientConnection)ccm.get(ccId1);
HTTPResponseReader reader = (HTTPResponseReader)Pipe.inputStream(p);
reader.openLowLevelAPIField();
final short statusId = reader.readShort();
reader.setParseDetails(headerToPositionTable, headerTrieParser);
//end of the big mess
int typeId = 0;
if (!listener.responseHTTP( statusId,
(HTTPContentType)httpSpec.contentTypes[typeId],
reader)) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
//TODO: application layer can not know that the response is complete or we will have a continuation...
break;
case NetResponseSchema.MSG_CONTINUATION_102:
long fieldConnectionId = Pipe.takeLong(p);
HTTPResponseReader continuation = (HTTPResponseReader)Pipe.inputStream(p);
continuation.openLowLevelAPIField();
if (!listener.responseHTTP((short)0,(HTTPContentType)null,continuation)) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
break;
case NetResponseSchema.MSG_CLOSED_10:
HTTPResponseReader hostReader = (HTTPResponseReader)Pipe.inputStream(p);
hostReader.openLowLevelAPIField();
int port = Pipe.takeInt(p);//the caller does not care which port we were on.
if (!listener.responseHTTP((short)-1,null,hostReader)) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
break;
default:
throw new UnsupportedOperationException("Unknown id: "+msgIdx);
}
Pipe.confirmLowLevelRead(p, Pipe.sizeOf(p,msgIdx));
Pipe.releaseReadLock(p);
}
}
final void consumePubSubMessage(Object listener, Pipe<MessageSubscription> p) {
while (Pipe.hasContentToRead(p)) {
Pipe.markTail(p);
final int msgIdx = Pipe.takeMsgIdx(p);
switch (msgIdx) {
case MessageSubscription.MSG_PUBLISH_103:
final int meta = Pipe.takeRingByteLen(p);
final int len = Pipe.takeRingByteMetaData(p);
final int pos = Pipe.convertToPosition(meta, p);
mutableTopic.setToField(p, meta, len);
DataInputBlobReader<MessageSubscription> reader = Pipe.inputStream(p);
reader.openLowLevelAPIField();
int dispatch;
if ((null==methodReader) || ((dispatch=methodLookup(p, len, pos))<0)) {
if (! ((PubSubListener)listener).message(mutableTopic,reader)) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
} else {
if (! methods[dispatch].method(this, mutableTopic, reader)) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
}
Pipe.confirmLowLevelRead(p, SIZE_OF_MSG_PUBLISH);
break;
case MessageSubscription.MSG_STATECHANGED_71:
int oldOrdinal = Pipe.takeInt(p);
int newOrdinal = Pipe.takeInt(p);
assert(oldOrdinal != newOrdinal) : "Stage change must actualt change the state!";
if (isIncluded(newOrdinal, includedToStates) && isIncluded(oldOrdinal, includedFromStates) &&
isNotExcluded(newOrdinal, excludedToStates) && isNotExcluded(oldOrdinal, excludedFromStates) ) {
if (!((StateChangeListener)listener).stateChange(states[oldOrdinal], states[newOrdinal])) {
Pipe.resetTail(p);
return;//continue later and repeat this same value.
}
}
Pipe.confirmLowLevelRead(p, SIZE_OF_MSG_STATECHANGE);
break;
case -1:
requestShutdown();
Pipe.confirmLowLevelRead(p, Pipe.EOF_SIZE);
Pipe.releaseReadLock(p);
return;
default:
throw new UnsupportedOperationException("Unknown id: "+msgIdx);
}
Pipe.releaseReadLock(p);
}
}
private final int methodLookup(Pipe<MessageSubscription> p, final int len, final int pos) {
return (int)TrieParserReader.query(methodReader, methodLookup,
Pipe.blob(p), pos, len, Pipe.blobMask(p));
}
protected final void processTimeEvents(TimeListener[] listener, long trigger) {
long msRemaining = (trigger-builder.currentTimeMillis());
if (msRemaining > timeProcessWindow) {
//if its not near, leave
return;
}
if (msRemaining>1) {
try {
Thread.sleep(msRemaining-1);
} catch (InterruptedException e) {
}
}
while (builder.currentTimeMillis() < trigger) {
Thread.yield();
}
int iteration = timeIteration++;
//all Internal Objects will get these sequentially
for(int i = 0; i<listener.length; i++) {
listener[i].timeEvent(trigger, iteration);
}
timeTrigger += timeRate;
}
protected final boolean isNotExcluded(int newOrdinal, long[] excluded) {
if (null!=excluded) {
return 0 == (excluded[newOrdinal>>6] & (1L<<(newOrdinal & 0x3F)));
}
return true;
}
protected final boolean isIncluded(int newOrdinal, long[] included) {
if (null!=included) {
return 0 != (included[newOrdinal>>6] & (1L<<(newOrdinal & 0x3F)));
}
return true;
}
protected final <T> boolean isNotExcluded(T port, T[] excluded) {
if (null!=excluded) {
int e = excluded.length;
while (--e>=0) {
if (excluded[e]==port) {
return false;
}
}
}
return true;
}
protected final boolean isNotExcluded(int a, int[] excluded) {
if (null!=excluded) {
int e = excluded.length;
while (--e>=0) {
if (excluded[e]==a) {
return false;
}
}
}
return true;
}
protected final <T> boolean isIncluded(T port, T[] included) {
if (null!=included) {
int i = included.length;
while (--i>=0) {
if (included[i]==port) {
return true;
}
}
return false;
}
return true;
}
protected final boolean isIncluded(int a, int[] included) {
if (null!=included) {
int i = included.length;
while (--i>=0) {
if (included[i]==a) {
return true;
}
}
return false;
}
return true;
}
@Override
public final ListenerFilter includeAllRoutes() {
restRoutesDefined = true;
if (listener instanceof RestListener) {
int count = 0;
int i = inputPipes.length;
while (--i>=0) {
//we only expect to find a single request pipe
if (Pipe.isForSchema(inputPipes[i], HTTPRequestSchema.class)) {
int routes = builder.routerConfig().routesCount();
int p = parallelInstance==-1?count:parallelInstance;
assert(routes>=0);
//for catch all
if (routes==0) {
routes=1;
}
while (--routes>=0) {
builder.appendPipeMapping((Pipe<HTTPRequestSchema>) inputPipes[i], routes, p);
}
count++;
}
}
return this;
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+RestListener.class.getSimpleName()+" in order to call this method.");
}
}
@Override
public final ListenerFilter includeRoutes(int... routeIds) {
if (listener instanceof RestListener) {
int count = 0;
int i = inputPipes.length;
while (--i>=0) {
//we only expect to find a single request pipe
if (Pipe.isForSchema(inputPipes[i], HTTPRequestSchema.class)) {
int x = routeIds.length;
int p = parallelInstance==-1?count:parallelInstance;
while (--x>=0) {
restRoutesDefined = true;
builder.appendPipeMapping((Pipe<HTTPRequestSchema>) inputPipes[i], routeIds[x], p);
}
count++;
}
}
return this;
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+RestListener.class.getSimpleName()+" in order to call this method.");
}
}
public final <T extends Behavior> ListenerFilter addSubscription(
CharSequence topic,
CallableMethod<T> method) {
if (null == methods) {
methodLookup = new TrieParser(16,1,false,false,false);
methodReader = new TrieParserReader(0, true);
methods = new CallableMethod[0];
}
if (!startupCompleted && listener instanceof PubSubListener) {
builder.addStartupSubscription(topic, System.identityHashCode(listener));
toStringDetails = toStringDetails+"sub:'"+topic+"'\n";
} else {
if (startupCompleted) {
throw new UnsupportedOperationException("Method dispatch subscritpions may not be modified at runtime.");
}
}
int id = methods.length;
methodLookup.setUTF8Value(topic,id);
//grow the array of methods to be called
CallableMethod[] newArray = new CallableMethod[id+1];
System.arraycopy(methods, 0, newArray, 0, id);
newArray[0] = method;
methods = newArray;
return this;
}
@Override
public final ListenerFilter addSubscription(CharSequence topic) {
if (!startupCompleted && listener instanceof PubSubListener) {
builder.addStartupSubscription(topic, System.identityHashCode(listener));
toStringDetails = toStringDetails+"sub:'"+topic+"'\n";
return this;
} else {
if (startupCompleted) {
throw new UnsupportedOperationException("Call addSubscription on CommandChanel to modify subscriptions at runtime.");
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+PubSubListener.class.getSimpleName()+" in order to call this method.");
}
}
}
@Override
public final <E extends Enum<E>> ListenerFilter includeStateChangeTo(E ... state) {
if (!startupCompleted && listener instanceof StateChangeListener) {
includedToStates = buildMaskArray(state);
return this;
} else {
if (startupCompleted) {
throw new UnsupportedOperationException("ListenerFilters may only be set before startup is called. Eg. the filters can not be changed at runtime.");
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+StateChangeListener.class.getSimpleName()+" in order to call this method.");
}
}
}
@Override
public final <E extends Enum<E>> ListenerFilter excludeStateChangeTo(E ... state) {
if (!startupCompleted && listener instanceof StateChangeListener) {
excludedToStates = buildMaskArray(state);
return this;
} else {
if (startupCompleted) {
throw new UnsupportedOperationException("ListenerFilters may only be set before startup is called. Eg. the filters can not be changed at runtime.");
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+StateChangeListener.class.getSimpleName()+" in order to call this method.");
}
}
}
@Override
public final <E extends Enum<E>> ListenerFilter includeStateChangeToAndFrom(E ... state) {
return includeStateChangeTo(state).includeStateChangeFrom(state);
}
@Override
public final <E extends Enum<E>> ListenerFilter includeStateChangeFrom(E ... state) {
if (!startupCompleted && listener instanceof StateChangeListener) {
includedFromStates = buildMaskArray(state);
return this;
} else {
if (startupCompleted) {
throw new UnsupportedOperationException("ListenerFilters may only be set before startup is called. Eg. the filters can not be changed at runtime.");
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+StateChangeListener.class.getSimpleName()+" in order to call this method.");
}
}
}
@Override
public final <E extends Enum<E>> ListenerFilter excludeStateChangeFrom(E ... state) {
if (!startupCompleted && listener instanceof StateChangeListener) {
excludedFromStates = buildMaskArray(state);
return this;
} else {
if (startupCompleted) {
throw new UnsupportedOperationException("ListenerFilters may only be set before startup is called. Eg. the filters can not be changed at runtime.");
} else {
throw new UnsupportedOperationException("The Listener must be an instance of "+StateChangeListener.class.getSimpleName()+" in order to call this method.");
}
}
}
private final <E extends Enum<E>> long[] buildMaskArray(E[] state) {
int maxOrdinal = findMaxOrdinal(state);
int a = maxOrdinal >> 6;
int b = maxOrdinal & 0x3F;
int longsCount = a+(b==0?0:1);
long[] array = new long[longsCount+1];
int i = state.length;
while (--i>=0) {
int ordinal = state[i].ordinal();
array[ordinal>>6] |= 1L << (ordinal & 0x3F);
}
return array;
}
private final <E extends Enum<E>> int findMaxOrdinal(E[] state) {
int maxOrdinal = -1;
int i = state.length;
while (--i>=0) {
maxOrdinal = Math.max(maxOrdinal, state[i].ordinal());
}
return maxOrdinal;
}
//used for looking up the features used by this TrafficOrder goPipe
private CommandChannelWithMatchingPipe ccmwp = new CommandChannelWithMatchingPipe();
public int getFeatures(Pipe<TrafficOrderSchema> pipe) {
ccmwp.init(pipe);
ChildClassScanner.visitUsedByClass(listener, ccmwp, MsgCommandChannel.class);
return ccmwp.features();
}
}
|
package com.ociweb.pronghorn.image;
import com.ociweb.pronghorn.image.schema.ImageSchema;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.pipe.PipeReader;
import com.ociweb.pronghorn.pipe.PipeWriter;
import com.ociweb.pronghorn.stage.PronghornStage;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
/**
* Image processing stage which accepts an input image and outputs four
* image pipes at a downscaled resolution:
*
* - A R color channel pipe containing {@code outputWidth} x {@code outputHeight} bytes.
* - A G color channel pipe containing {@code outputWidth} x {@code outputHeight} bytes.
* - A B color channel pipe containing {@code outputWidth} x {@code outputHeight} bytes.
* - A Monochrome color channel pipe containing {@code outputWidth} x {@code outputHeight} bytes
* where each byte is the average of the R, G, and B bytes at that pixel.
*
* @author Brandon Sanders [brandon@alicorn.io]
*/
public class ImageDownscaleStage extends PronghornStage {
// Output pipe constants.
private final Pipe<ImageSchema> input;
private final Pipe<ImageSchema>[] outputs;
private final int outputHeight;
private final int outputWidth;
private final ByteBuffer encodingBytes = ByteBuffer.wrap(new byte[32]);
// Input pipe information.
private int imageFrameWidth = -1;
private int imageFrameHeight = -1;
private int imageFrameRowsReceived = 0;
private byte[] imageFrameRowBytes = null;
private int[] imageFrameRowBytesDownsampled = null;
private final byte[] imageFrameRowBytesR;
private final byte[] imageFrameRowBytesG;
private final byte[] imageFrameRowBytesB;
private final byte[] imageFrameRowBytesMono;
// Pipe indices and encodings.
public static final int R_OUTPUT_IDX = 0;
public static final int G_OUTPUT_IDX = 1;
public static final int B_OUTPUT_IDX = 2;
public static final int MONO_OUTPUT_IDX = 3;
public static final byte[] INPUT_ENCODING = "RGB24".getBytes(StandardCharsets.US_ASCII);
public static final byte[] R_OUTPUT_ENCODING = "R8".getBytes(StandardCharsets.US_ASCII);
public static final byte[] G_OUTPUT_ENCODING = "G8".getBytes(StandardCharsets.US_ASCII);
public static final byte[] B_OUTPUT_ENCODING = "B8".getBytes(StandardCharsets.US_ASCII);
public static final byte[] MONO_OUTPUT_ENCODING = "MONO8".getBytes(StandardCharsets.US_ASCII);
public ImageDownscaleStage(GraphManager graphManager, Pipe<ImageSchema> input, Pipe<ImageSchema>[] outputs, int outputWidth, int outputHeight) {
super(graphManager, input, outputs);
// Validate and assign pipes.
assert outputs.length == 4 : "Image downscaling stage expects R, G, B, and Monochrome output pipes.";
this.input = input;
this.outputs = outputs;
// Assign configurations.
this.outputHeight = outputHeight;
this.outputWidth = outputWidth;
// Setup frame row buffers.
this.imageFrameRowBytesR = new byte[outputWidth];
this.imageFrameRowBytesG = new byte[outputWidth];
this.imageFrameRowBytesB = new byte[outputWidth];
this.imageFrameRowBytesMono = new byte[outputWidth];
}
@Override
public void run() {
while (PipeReader.tryReadFragment(input)) {
int msgIdx = PipeReader.getMsgIdx(input);
switch(msgIdx) { //TODO: Brandon, change to use if msdIdx==MSG_FRAMECHUNK_2 this is teh most common first checking. then else ==MSG_FRAMESTART_1
case ImageSchema.MSG_FRAMESTART_1:
// Extract message start data.
imageFrameWidth = PipeReader.readInt(input, ImageSchema.MSG_FRAMESTART_1_FIELD_WIDTH_101);
imageFrameHeight = PipeReader.readInt(input, ImageSchema.MSG_FRAMESTART_1_FIELD_HEIGHT_201);
// Ensure source resolution is evenly divisible by target resolution.
assert imageFrameWidth % outputWidth == 0 &&
imageFrameHeight % outputHeight == 0 : "Source resolution must be evenly divisible by target resolution.";
// Extract and verify encoding.
encodingBytes.position(0);
encodingBytes.limit(encodingBytes.capacity());
PipeReader.readBytes(input, ImageSchema.MSG_FRAMESTART_1_FIELD_ENCODING_601, encodingBytes);
assert encodingBytes.position() == INPUT_ENCODING.length;
for (int i = 0; i < encodingBytes.position(); i++) { //TODO: Brandon: put this loop inside assert method...
assert encodingBytes.get(i) == INPUT_ENCODING[i];
}
// Write frame start to outputs.
for (int i = 0; i < outputs.length; i++) {
if (PipeWriter.tryWriteFragment(outputs[i], ImageSchema.MSG_FRAMESTART_1)) {
// Write basic data.
PipeWriter.writeInt(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_WIDTH_101, outputWidth);
PipeWriter.writeInt(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_HEIGHT_201, outputHeight);
PipeWriter.writeLong(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_TIMESTAMP_301, System.currentTimeMillis());
PipeWriter.writeInt(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_FRAMEBYTES_401, outputWidth * outputHeight);
PipeWriter.writeInt(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_BITSPERPIXEL_501, 8);
// Write encoding.
//TODO: Brandon: remove this switch, put the R_OUTPUT_ENCODING in array of 4 and look them up with encoding[i]
switch (i) {
case R_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_ENCODING_601, R_OUTPUT_ENCODING);
break;
case G_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_ENCODING_601, G_OUTPUT_ENCODING);
break;
case B_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_ENCODING_601, B_OUTPUT_ENCODING);
break;
case MONO_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i], ImageSchema.MSG_FRAMESTART_1_FIELD_ENCODING_601, MONO_OUTPUT_ENCODING);
break;
}
PipeWriter.publishWrites(outputs[i]);
}
}
break;
case ImageSchema.MSG_FRAMECHUNK_2:
// Calculate working frame sizes.
int inputFrameColumnsPerOutputColumn = imageFrameWidth / outputWidth; //TODO: Brandon: these invariants do not change per row and should be computed once be frame
int inputFrameRowsPerOutputFrameRow = imageFrameHeight / outputHeight; //TODO: Brandon: these invariants do not change per row and should be computed once be frame
// Determine row length.
int rowLength = PipeReader.readBytesLength(input, ImageSchema.MSG_FRAMECHUNK_2_FIELD_ROWBYTES_102);
// Prepare arrays if not already ready.
//TODO: Brandon: would be better to do at top of frame to avoid this conditional onevery row
if (imageFrameRowBytes == null || imageFrameRowBytes.length != rowLength) {
imageFrameRowBytes = new byte[rowLength];
imageFrameRowBytesDownsampled = new int[outputWidth * 3];
}
// Read bytes into array.
PipeReader.readBytes(input, ImageSchema.MSG_FRAMECHUNK_2_FIELD_ROWBYTES_102, imageFrameRowBytes, 0);
// Downsample frame width.
int i = 0;
int k = 0;
for (int j = 0; j < imageFrameRowBytes.length; j += 3) {
// Add bytes to sum.
imageFrameRowBytesDownsampled[i] += imageFrameRowBytes[j] & 0xFF;
imageFrameRowBytesDownsampled[i + 1] += imageFrameRowBytes[j + 1] & 0xFF;
imageFrameRowBytesDownsampled[i + 2] += imageFrameRowBytes[j + 2] & 0xFF;
k++;
// If we have summed enough pixels for one cell, reset and progress to next cell.
if (k >= inputFrameColumnsPerOutputColumn) {
i += 3;
k = 0;
}
}
assert i == imageFrameRowBytesDownsampled.length;
// If we've summed enough frames to downsample height, generate an output frame.
imageFrameRowsReceived++;
if (imageFrameRowsReceived >= inputFrameRowsPerOutputFrameRow) {
imageFrameRowsReceived = 0;
// Divide image frames by total pixels per cell.
int inputPixelsPerOutputPixel = inputFrameColumnsPerOutputColumn * inputFrameRowsPerOutputFrameRow;
for (i = 0; i < imageFrameRowBytesDownsampled.length; i += 3) {
imageFrameRowBytesDownsampled[i] = imageFrameRowBytesDownsampled[i] / inputPixelsPerOutputPixel;
imageFrameRowBytesDownsampled[i + 1] = imageFrameRowBytesDownsampled[i + 1] / inputPixelsPerOutputPixel;
imageFrameRowBytesDownsampled[i + 2] = imageFrameRowBytesDownsampled[i + 2] / inputPixelsPerOutputPixel;
}
// Extract RGB and Mono channels.
i = 0;
for (int j = 0; j < imageFrameRowBytesDownsampled.length; j += 3) {
assert imageFrameRowBytesDownsampled[j] <= 255;
assert imageFrameRowBytesDownsampled[j + 1] <= 255;
assert imageFrameRowBytesDownsampled[j + 2] <= 255;
// Extract RGB channels.
imageFrameRowBytesR[i] = (byte) imageFrameRowBytesDownsampled[j];
imageFrameRowBytesG[i] = (byte) imageFrameRowBytesDownsampled[j + 1];
imageFrameRowBytesB[i] = (byte) imageFrameRowBytesDownsampled[j + 2];
// Average bytes into mono channel.
int temp = 0;
temp += imageFrameRowBytesDownsampled[j];
temp += imageFrameRowBytesDownsampled[j + 1];
temp += imageFrameRowBytesDownsampled[j + 2];
temp = temp / 3;
imageFrameRowBytesMono[i] = (byte) temp;
// Progress counter.
i++;
}
// Clear downsample bytes.
Arrays.fill(imageFrameRowBytesDownsampled, 0);
// Send channels to clients.
// TODO: Refactor so that if a try-write fails, the row will be written during the next time slice.
for (i = 0; i < outputs.length; i++) {
if (PipeWriter.tryWriteFragment(outputs[i], ImageSchema.MSG_FRAMECHUNK_2)) {
switch (i) { //TODO: Brandon: remove switch and lookup imageFrameRowBytesR from an array of 4 from xxxx[i] aproach.
case R_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i],
ImageSchema.MSG_FRAMECHUNK_2_FIELD_ROWBYTES_102,
imageFrameRowBytesR, 0, imageFrameRowBytesR.length);
break;
case G_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i],
ImageSchema.MSG_FRAMECHUNK_2_FIELD_ROWBYTES_102,
imageFrameRowBytesG, 0, imageFrameRowBytesG.length);
break;
case B_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i],
ImageSchema.MSG_FRAMECHUNK_2_FIELD_ROWBYTES_102,
imageFrameRowBytesB, 0, imageFrameRowBytesB.length);
break;
case MONO_OUTPUT_IDX:
PipeWriter.writeBytes(outputs[i],
ImageSchema.MSG_FRAMECHUNK_2_FIELD_ROWBYTES_102,
imageFrameRowBytesMono, 0, imageFrameRowBytesMono.length);
break;
}
PipeWriter.publishWrites(outputs[i]);
}
}
}
break;
}
PipeReader.releaseReadLock(input);
}
}
}
|
package com.paulhammant.buildradiator;
import com.paulhammant.buildradiator.model.*;
import org.jooby.Err;
import org.jooby.Jooby;
import org.jooby.Request;
import org.jooby.Response;
import org.jooby.json.Jackson;
@SuppressWarnings({"unchecked", "rawtypes"})
public class BuildRadiatorApp extends Jooby {
public static final String DEMO_RADIATOR_CODE = "ueeusvcipmtsb755uq";
public static final String NO_UPDATES = "NO-UPDATES";
protected final RadiatorStore radiatorStore;
{
String gae_appId = System.getenv("GCLOUD_PROJECT");
use(new Jackson());
if (gae_appId != null) {
radiatorStore = new RadiatorStore.BackedByGoogleCloudDataStore();
} else {
radiatorStore = new RadiatorStore();
}
before((req, rsp) -> {
try {
if (gae_appId != null && req.header("X-Forwarded-Proto").value().equals("http")) {
rsp.redirect("https://" + req.hostname() + req.path());
}
} catch (Throwable throwable) {
rsp.send(req.path()+ " (before) " + throwable.getMessage());
}
});
use("/r")
.get("/:radiatorCode/", this::getRadiatorByCode) // used by radiator.html
.post("/:radiatorCode/stepPassed", this::stepPassed)
.post("/:radiatorCode/stepPassedAndStartStep", this::stepPassedAndStartStep)
.post("/:radiatorCode/stepFailed", this::stepFailed)
.post("/:radiatorCode/startStep", this::startStep)
.post("/:radiatorCode/buildCancelled", this::buildCancelled)
.post("/create", this::createRadiator);
get("/_ah/health", () -> {
return "yup, am healthy, Google App Engine";
});
// Routes /_ah/start and /_ah/stop - not enabled on Flex containers
assets("/", "index.html");
assets("/robots.txt", "robots.txt");
assets("/moment.min.js", "moment.min.js");
assets("/moment-duration-format.js", "moment-duration-format.js");
assets("/vue.min.js", "vue.min.js");
serveRadiatorPage();
err(RadiatorDoesntExist.class, (req, rsp, err) -> {
rsp.status(200);
nothingHere(req, rsp);
});
err(BuildRadiatorException.class, (req, rsp, err) -> {
rsp.status(200);
rsp.send(err.getCause().getMessage());
});
err(Err.Missing.class, (req, rsp, err) -> {
rsp.status(200);
String message = err.getMessage();
rsp.send(message.substring(message.indexOf(":")+2));
});
err((req, rsp, err) -> {
rsp.status(200);
nothingHere(req, rsp);
});
err(404, (req, rsp, err) -> {
System.out.println(req.route() + " page missing from " + req.ip());
rsp.status(404);
rsp.send("");
});
err(405, (req, rsp, err) -> {
System.out.println(req.route() + " blocked from " + req.ip() + ", type:" + req.type());
rsp.status(404);
rsp.send("");
});
onStart(this::starterData);
}
protected void serveRadiatorPage() {
assets("/r/", "radiator.html");
// assets("/well-known/acme-challenge/xxx", "well-known/acme-challenge/xxx");
// assets("/.well-known/acme-challenge/xxx", "well-known/acme-challenge/xxx");
}
protected void getRadiatorByCode(Request req, Response rsp) throws Throwable {
String lastUpdated = req.header("lastUpdated").value("");
if (!lastUpdated.equals("")) {
lastUpdated = new Long(lastUpdated).toString(); // ensure is a number
}
String radiatorCode = getRadiatorCodeButVerifyParamFirst(req);
Radiator radiator = getResultsStore().get(radiatorCode, req.ip());
if (lastUpdated.equals("" + radiator.lastUpdated)) {
rsp.status(204);
return;
}
rsp.status(200).type("application/json").send(radiator.withoutSecret());
}
protected void startStep(Request req, Response rsp) throws Throwable {
rsp.type("text/plain");
String build = getBuildIdButVerifyParamFirst(req);
String step = getStepButVerifyParamFirst(req);
String radiatorCode = getRadiatorCodeButVerifyParamFirst(req);
String secret = getRadiatorSecretButVerifyParamFirst(req);
getResultsStore().get(radiatorCode, req.ip()).verifySecret(secret).startStep(build, step);
rsp.send("OK");
}
protected void stepPassed(Request req, Response rsp) throws Throwable {
rsp.type("text/plain");
String radiatorCode = getRadiatorCodeButVerifyParamFirst(req);
String build = getBuildIdButVerifyParamFirst(req);
String step = getStepButVerifyParamFirst(req);
String secret = getRadiatorSecretButVerifyParamFirst(req);
getResultsStore().get(radiatorCode, req.ip()).verifySecret(secret).stepPassed(build, step);
rsp.send("OK");
}
protected void stepPassedAndStartStep(Request req, Response rsp) throws Throwable {
rsp.type("text/plain");
String radiatorCode = getRadiatorCodeButVerifyParamFirst(req);
String build = getBuildIdButVerifyParamFirst(req);
String step = getStepButVerifyParamFirst(req);
String pStep = getPreviousStepButVerifyParamFirst(req);
String secret = getRadiatorSecretButVerifyParamFirst(req);
Radiator radiator = getResultsStore().get(radiatorCode, req.ip()).verifySecret(secret);
radiator.stepPassed(build, pStep);
radiator.startStep(build, step);
rsp.send("OK");
}
private String getRadiatorCodeButVerifyParamFirst(Request req) {
return getParamStringAndVerify(req, "radiatorCode", 21);
}
private String getBuildIdButVerifyParamFirst(Request req) {
return getParamStringAndVerify(req, "build", 12);
}
private String getRadiatorSecretButVerifyParamFirst(Request req) {
return getParamStringAndVerify(req, "secret", 12);
}
private String getStepButVerifyParamFirst(Request req) {
return getParamStringAndVerify(req, "step", 21);
}
private String getPreviousStepButVerifyParamFirst(Request req) {
return getParamStringAndVerify(req, "pStep", 21);
}
private String getParamStringAndVerify(Request req, String name, int len) {
return verifyNotTooLong(name, len, req.param(name).value());
}
private String verifyNotTooLong(String name, int len, String val) {
if (val.length() > len) {
throw new TooLong(name);
}
return val;
}
protected void stepFailed(Request req, Response rsp) throws Throwable {
rsp.type("text/plain");
String step = getStepButVerifyParamFirst(req);
String build = getBuildIdButVerifyParamFirst(req);
String radiatorCode = getRadiatorCodeButVerifyParamFirst(req);
String secret = getRadiatorSecretButVerifyParamFirst(req);
getResultsStore().get(radiatorCode, req.ip()).verifySecret(secret).stepFailed(build, step);
rsp.send("OK");
}
protected void buildCancelled(Request req, Response rsp) throws Throwable {
rsp.type("text/plain");
String radiatorCode = getRadiatorCodeButVerifyParamFirst(req);
String secret = getRadiatorSecretButVerifyParamFirst(req);
getResultsStore().get(radiatorCode, req.ip()).verifySecret(secret).cancel(req.param("build").value());
rsp.send("OK");
}
protected void createRadiator(Request req, Response rsp) throws Throwable {
rsp.type("text/plain");
String[] stepNames = req.param("stepNames").value()
.replace("_", " ").split(",");
for (String stepName : stepNames) {
verifyNotTooLong("a stepName", 21, stepName);
}
String[] ips = req.param("ips").value("").split(",");
if (ips.length == 1 && ips[0].equals("")) {
ips = new String[0];
}
rsp.type("application/json");
rsp.send(getResultsStore().createRadiator(require(RandomGenerator.class), stepNames)
.withIpAccessRestrictedToThese(ips).codeAndSecretOnly());
}
protected void nothingHere(Request req, Response rsp) throws Throwable {
rsp.type("application/json").send(new ErrorMessage().withEgressIpAddress(req.ip()));
}
public static class ErrorMessage {
public ErrorMessage() {
message = "nothing here";
}
public final String message;
public String egressIpAddress = "";
public ErrorMessage withEgressIpAddress(String ip) {
egressIpAddress = ip;
return this;
}
}
protected RadiatorStore getResultsStore() {
return radiatorStore;
}
private void starterData() {
RadiatorStore store = getResultsStore();
// c = compile
// u = unit tests
// i = integration tests
// f = functional tests
// p = package
// Recreate the demo radiator each boot of the stack.
Radiator radiator = store.createRadiator(new RandomGenerator() {
protected String generateRadiatorCode() {
return DEMO_RADIATOR_CODE;
}
@Override
protected String generateSecret() {
return NO_UPDATES;
}
}, "c", "u", "i", "f", "p");
radiator.startStep("111", "c");
radiator.stepPassed("111", "c");
radiator.builds.get(0).steps.get(0).dur = 31230;
radiator.startStep("111", "u");
radiator.stepPassed("111", "u");
radiator.builds.get(0).steps.get(1).dur = 46610;
radiator.startStep("111", "i");
radiator.stepPassed("111", "i");
radiator.builds.get(0).steps.get(2).dur = 120000;
radiator.startStep("111", "f");
radiator.stepPassed("111", "f");
radiator.builds.get(0).steps.get(3).dur = 180020;
radiator.startStep("111", "p");
radiator.stepPassed("111", "p");
radiator.builds.get(0).steps.get(4).dur = 22200;
radiator.builds.get(0).dur = 185000;
radiator.startStep("112", "c");
radiator.stepPassed("112", "c");
radiator.builds.get(0).steps.get(0).dur = 33300;
radiator.startStep("112", "u");
radiator.stepPassed("112", "u");
radiator.builds.get(0).steps.get(1).dur = 45500;
radiator.startStep("112", "i");
radiator.stepPassed("112", "i");
radiator.builds.get(0).steps.get(2).dur = 123400;
radiator.startStep("112", "f");
radiator.stepPassed("112", "f");
radiator.builds.get(0).steps.get(3).dur = 173000;
radiator.startStep("112", "p");
radiator.stepPassed("112", "p");
radiator.builds.get(0).steps.get(4).dur = 21100;
radiator.builds.get(0).dur = 215300;
radiator.startStep("113", "c");
radiator.stepPassed("113", "c");
radiator.builds.get(0).steps.get(0).dur = 31000;
radiator.startStep("113", "u");
radiator.stepFailed("113", "u");
radiator.builds.get(0).steps.get(1).dur = 42600;
radiator.builds.get(0).dur = 68300;
radiator.startStep("114", "c");
radiator.stepPassed("114", "c");
radiator.builds.get(0).steps.get(0).dur = 800;
radiator.startStep("114", "u");
radiator.stepPassed("114", "u");
radiator.builds.get(0).steps.get(1).dur = 42600;
radiator.startStep("114", "i");
radiator.builds.get(0).dur = 43300;
}
}
|
package com.redhat.ceylon.compiler.js;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import com.redhat.ceylon.cmr.api.RepositoryManager;
import com.redhat.ceylon.cmr.api.ModuleQuery;
import com.redhat.ceylon.cmr.api.ModuleVersionDetails;
import com.redhat.ceylon.cmr.ceylon.RepoUsingTool;
import com.redhat.ceylon.common.ModuleUtil;
import com.redhat.ceylon.common.Versions;
import com.redhat.ceylon.common.config.DefaultToolOptions;
import com.redhat.ceylon.common.tool.Argument;
import com.redhat.ceylon.common.tool.Description;
import com.redhat.ceylon.common.tool.Option;
import com.redhat.ceylon.common.tool.OptionArgument;
import com.redhat.ceylon.common.tool.RemainingSections;
import com.redhat.ceylon.common.tool.Rest;
import com.redhat.ceylon.common.tool.Summary;
@Summary("Executes tests")
@Description(
"Executes tests in specified `<modules>`. " +
"The `<modules>` arguments are the names of the modules to test with an optional version.")
@RemainingSections(
"## Configuration file" +
"\n\n" +
"The test-js tool accepts the following option from the Ceylon configuration file: " +
"`testtool.compile` " +
"(the equivalent option on the command line always has precedence)." +
"## EXAMPLE" +
"\n\n" +
"The following would execute tests in the `com.example.foobar` module:" +
"\n\n" +
" ceylon test-js com.example.foobar/1.0.0")
public class CeylonTestJsTool extends RepoUsingTool {
private static final String TEST_MODULE_NAME = "com.redhat.ceylon.testjs";
private static final String TEST_RUN_FUNCTION = "com.redhat.ceylon.testjs.run";
private List<String> moduleNameOptVersionList;
private List<String> testList;
private List<String> argumentList;
private String version;
private String compileFlags;
private String nodeExe;
private boolean debug = true;
private boolean tap = false;
public CeylonTestJsTool() {
super(CeylonRunJsMessages.RESOURCE_BUNDLE);
}
@Override
public void initialize() throws Exception {
// noop
}
@Argument(argumentName = "modules", multiplicity = "+")
public void setModules(List<String> moduleNameOptVersionList) {
this.moduleNameOptVersionList = moduleNameOptVersionList;
}
@OptionArgument(longName = "test", argumentName = "test")
@Description("Specifies which tests will be run.")
public void setTests(List<String> testList) {
this.testList = testList;
}
@Option
@OptionArgument(argumentName = "flags")
@Description("Determines if and how compilation should be handled. Allowed flags include: `never`, `once`, `force`, `check`.")
public void setCompile(String compile) {
this.compileFlags = compile;
}
@OptionArgument(argumentName = "version")
@Description("Specifies which version of the test module to use, defaults to " + Versions.CEYLON_VERSION_NUMBER + ".")
public void setVersion(String version) {
this.version = version;
}
@OptionArgument(argumentName = "node-exe")
@Description("The path to the node.js executable. Will be searched in standard locations if not specified.")
public void setNodeExe(String nodeExe) {
this.nodeExe = nodeExe;
}
@OptionArgument(argumentName = "debug")
@Description("Shows more detailed output in case of errors.")
public void setDebug(boolean debug) {
this.debug = debug;
}
@Option(longName = "tap")
@Description("Enables the Test Anything Protocol v13.")
public void setTap(boolean tap) {
this.tap = tap;
}
@Rest
public void setArgs(List<String> argumentList) {
this.argumentList = argumentList;
}
@Override
public void run() throws Exception {
final List<String> args = new ArrayList<String>();
final List<String> moduleAndVersionList = new ArrayList<String>();
if (version == null) {
Collection<ModuleVersionDetails> versions = getModuleVersions(
getRepositoryManager(),
TEST_MODULE_NAME,
null,
ModuleQuery.Type.JS,
Versions.JS_BINARY_MAJOR_VERSION,
Versions.JS_BINARY_MINOR_VERSION);
if (versions == null || versions.isEmpty()) {
version = Versions.CEYLON_VERSION_NUMBER;
} else {
ModuleVersionDetails mdv = versions.toArray(new ModuleVersionDetails[] {})[versions.size() - 1];
version = mdv.getVersion();
}
}
if (moduleNameOptVersionList != null) {
for (String moduleNameOptVersion : moduleNameOptVersionList) {
String moduleAndVersion = resolveModuleAndVersion(moduleNameOptVersion);
if (moduleAndVersion == null) {
return;
}
args.add("__module");
args.add(moduleAndVersion);
moduleAndVersionList.add(moduleAndVersion);
}
}
if (testList != null) {
for (String test : testList) {
args.add("__test");
args.add(test);
}
}
if (argumentList != null) {
args.addAll(argumentList);
}
if (compileFlags == null) {
compileFlags = DefaultToolOptions.getTestToolCompileFlags();
if (compileFlags.isEmpty()) {
compileFlags = COMPILE_NEVER;
}
} else if (compileFlags.isEmpty()) {
compileFlags = COMPILE_ONCE;
}
if (tap) {
args.add("__tap");
}
CeylonRunJsTool ceylonRunJsTool = new CeylonRunJsTool() {
@Override
protected void customizeDependencies(Set<File> localRepos, RepositoryManager repoman) throws IOException {
for (String moduleAndVersion : moduleAndVersionList) {
String modName = ModuleUtil.moduleName(moduleAndVersion);
String modVersion = ModuleUtil.moduleVersion(moduleAndVersion);
File artifact = getArtifact(modName, modVersion, repoman);
localRepos.add(getRepoDir(modName, artifact));
loadDependencies(localRepos, repoman, artifact);
}
};
};
ceylonRunJsTool.setModuleVersion(TEST_MODULE_NAME + "/" + version);
ceylonRunJsTool.setRun(TEST_RUN_FUNCTION);
ceylonRunJsTool.setArgs(args);
ceylonRunJsTool.setRepository(repo);
ceylonRunJsTool.setSystemRepository(systemRepo);
ceylonRunJsTool.setCacheRepository(cacheRepo);
ceylonRunJsTool.setMavenOverrides(mavenOverrides);
ceylonRunJsTool.setNoDefRepos(noDefRepos);
ceylonRunJsTool.setOffline(offline);
ceylonRunJsTool.setVerbose(verbose);
ceylonRunJsTool.setNodeExe(nodeExe);
ceylonRunJsTool.setDebug(debug);
ceylonRunJsTool.setDefine(defines);
ceylonRunJsTool.setCompile(compileFlags);
ceylonRunJsTool.setCwd(cwd);
ceylonRunJsTool.run();
}
private String resolveModuleAndVersion(String moduleNameOptVersion) throws IOException {
String modName = ModuleUtil.moduleName(moduleNameOptVersion);
String modVersion = ModuleUtil.moduleVersion(moduleNameOptVersion);
modVersion = checkModuleVersionsOrShowSuggestions(
getRepositoryManager(),
modName,
modVersion,
ModuleQuery.Type.JS,
Versions.JS_BINARY_MAJOR_VERSION,
Versions.JS_BINARY_MINOR_VERSION,
compileFlags);
return modVersion != null ? modName + "/" + modVersion : null;
}
}
|
package com.rox.emu.p6502.dbg.ui.component;
import java.awt.*;
/**
* A {@link ByteBox} that represents a status register with added character
* representation of bits and highlights for flagged or non-flagged
*/
public class FlagByteBox extends ByteBox {
private final char[] flagIDs;
public FlagByteBox(String byteName, int initialValue, char[] flagIDs) {
super(byteName, initialValue);
this.flagIDs = flagIDs;
}
@Override
protected void paintBits(Graphics g, int startX, int startY, char[] bitValues) {
for (int i=0; i<8; i++){
g.setColor((bitValues[i] == '1') ? Color.black : Color.lightGray);
paintBit(g, startX + (i*bitSize), startY, flagIDs[i]);
}
}
@Override
protected void paintBit(Graphics g, int startX, int startY, char val){
final int padding = 5;
g.setFont(new Font("Courier New", Font.PLAIN, bitFontSize));
g.drawString(""+val, startX+padding, startY+(bitSize-padding));
g.setColor(Color.lightGray);
g.drawRect(startX, startY, bitSize, bitSize);
}
}
|
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package com.snakybo.torch.util.callback;
import com.snakybo.torch.object.Component;
import java.util.HashSet;
import java.util.Set;
public final class TorchCallbacks
{
/**
* <p>
* Callback handler.
* </p>
*
* <p>
* Keeps track of registered callbacks.
* </p>
*
* @param <T> The callback type.
*/
public static final class Callback<T>
{
private Set<T> callbacks;
private Callback()
{
callbacks = new HashSet<>();
}
/**
* <p>
* Add a listener to the callback, the {@code callback} will receive events
* for as long as it remains subscribed.
* </p>
*
* <p>
* <strong>Note:</strong> If the {@code callback} is a method in a {@link Component},
* it will receive callbacks even after the {@code Component} has been destroyed.
* <strong>Make sure to un-subscribe from the callback.</strong>
* </p>
*
* @param callback The callback method.
*/
public final void addListener(T callback)
{
callbacks.add(callback);
}
/**
* <p>
* Remove a listener from the callback.
* </p>
*
* @param callback The callback method.
*/
public final void removeListener(T callback)
{
callbacks.remove(callback);
}
/**
* <p>
* Remove all listeners from the callback.
* </p>
*/
public final void removeAllListeners()
{
callbacks.clear();
}
public final Iterable<T> getCallbacks()
{
return callbacks;
}
}
/**
* <p>
* The character pressed callback, this event will be fired when any character has been pressed while the
* window has focus.
* </p>
*/
public static final Callback<ICharPressedCallback> onCharPressed = new Callback<>();
/**
* <p>
* The cursor enter/exit callback, this event will be fired when the cursor enters or exits the
* window.
* </p>
*/
public static final Callback<ICursorEnterCallback> onCursorEnter = new Callback<>();
/**
* <p>
* The window focus callback, this event will be fired when the window either gains, or loses focus.
* </p>
*/
public static final Callback<IWindowFocusCallback> onWindowFocus = new Callback<>();
/**
* <p>
* The window iconify/restore callback, this event will be fired when the window has been iconified, or restored.
* </p>
*/
public static final Callback<IWindowIconifyCallback> onWindowIconify = new Callback<>();
private TorchCallbacks()
{
throw new AssertionError();
}
}
|
package com.vitalize.atg.dynadmin;
import atg.adapter.gsa.GSAAdminServlet;
import atg.adapter.gsa.GSARepository;
import atg.nucleus.Nucleus;
import atg.nucleus.logging.ApplicationLogging;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.transaction.TransactionManager;
import java.io.IOException;
import java.util.Scanner;
/**
* Puts some helper buttons around that RQL query box
*/
public class SwoleGSAAdminServlet extends GSAAdminServlet {
private transient final GSARepository repo;
//The strategy used here is to sniff for the output we want to inject our stuff right before
private static final String RQL_TEXT_AREA_MARKUP = "<p><textarea rows=\"12\" cols=\"80\" name=\"xmltext\">";
private static final String WIDER_TALLER_RQL_TEXT_AREA_MARKUP = "<p><textarea rows=\"20\" cols=\"160\" name=\"xmltext\">";
private static final String[] RQL_ACTION_TYPES = {
"query-items",
"add-item",
"remove-item",
"update-item"
};
//TODO: make this an external script?
private static final String RQL_TOOLBAR_SCRIPT = new Scanner(
GSAAdminServlet.class.getClassLoader().getResourceAsStream("com/vitalize/atg/dynadmin/SwoleGSAAdminServlet.js"),
"UTF-8"
)
.useDelimiter("\\A")
.next();
public SwoleGSAAdminServlet(GSARepository pService, ApplicationLogging pLogger, Nucleus pNucleus, TransactionManager pTransactionManager) {
super(pService, pLogger, pNucleus, pTransactionManager);
this.repo = pService;
}
private void outputRQLToolbar(ServletOutputStream o) throws IOException {
o.println("<script>");
o.println(RQL_TOOLBAR_SCRIPT);
o.println("</script>");
o.println("<div>");
o.println("<a href=\"https://docs.oracle.com/cd/E24152_01/Platform.10-1/ATGRepositoryGuide/html/s0305rqloverview01.html\" target=\"_blank\">Y U FORGET RQL?</a>");
o.println("</div>");
o.println("<div>");
o.println("<select id=\"RQL_ITEM_TYPE\" style=\"width: 20%\">");
for(String d : repo.getItemDescriptorNames()){
o.println("<option>" + d + "</option>");
}
o.println("</select>");
o.println("<select id=\"RQL_ACTION_TYPE\" style=\"width: 10%\">");
for(String action : RQL_ACTION_TYPES){
o.println("<option>" + action + "</option>");
}
o.println("</select>");
o.println("<button onclick=\"rqlAdd();return false;\">Add</button>");
o.println("<button onclick=\"rqlClear();return false;\">Clear</button>");
o.println("<input type=\"submit\" value=\"Go\"/>");
o.println("<br/>");
o.println("</div>");
}
/**
* Overrides the printAdmin and injects and RQL toolbar above the RQL box. Also makes the RQL box a little bigger by default.
* @param req
* @param res
* @param out
* @throws ServletException
* @throws IOException
*/
@Override
protected void printAdmin(
HttpServletRequest req,
HttpServletResponse res,
final ServletOutputStream out
) throws ServletException, IOException {
printAdminInternal(
req,
res,
new DelegatingServletOutputStream(out){
@Override
public void println(String s) throws IOException {
if(RQL_TEXT_AREA_MARKUP.equals(s)) {
outputRQLToolbar(out);
//While we're at it...make that a bit bigger
out.println(WIDER_TALLER_RQL_TEXT_AREA_MARKUP);
} else {
//otherwise just pass it on through
out.println(s);
}
}
}
);
}
/**
* This is just here for testing so we can mock up the super interface
* @param req
* @param res
* @param out
* @throws ServletException
* @throws IOException
*/
protected void printAdminInternal(
HttpServletRequest req,
HttpServletResponse res,
ServletOutputStream out
) throws ServletException, IOException {
super.printAdmin(
req,
res,
out
);
}
@Override
protected void insertStyle(
HttpServletRequest req,
HttpServletResponse res,
ServletOutputStream out
) throws ServletException, IOException {
//add jquery
out.println("<script src=\"https://code.jquery.com/jquery-1.12.4.min.js\" integrity=\"sha256-ZosEbRLbNQzLpnKIkEdrPv7lOy9C27hHQ+Xp8a4MxAQ=\" crossorigin=\"anonymous\"></script>");
//add select2
out.println("<link href=\"https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.3/css/select2.min.css\" rel=\"stylesheet\" />");
out.println("<script src=\"https://cdnjs.cloudflare.com/ajax/libs/select2/4.0.3/js/select2.min.js\"></script>");
super.insertStyle(req, res, out);
}
}
|
package de.otto.rx.composer.providers;
import com.damnhandy.uri.template.UriTemplate;
import com.google.common.collect.ImmutableList;
import de.otto.rx.composer.client.ServiceClient;
import de.otto.rx.composer.content.Content;
import de.otto.rx.composer.content.IndexedContent;
import de.otto.rx.composer.content.Parameters;
import de.otto.rx.composer.content.Position;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rx.Observable;
import java.util.function.Predicate;
import static com.google.common.base.Preconditions.checkNotNull;
import static de.otto.rx.composer.content.ContentMatcher.contentMatcher;
import static java.util.Comparator.comparingInt;
import static rx.Observable.just;
public final class ContentProviders {
private static final Logger LOG = LoggerFactory.getLogger(ContentProviders.class);
private ContentProviders() {}
/**
* Creates a {@link HttpContentProvider} that is using the specified {@code serviceClient} to fetch the content
* from the specified {@code url}.
*
* @param serviceClient ServiceClient used to get content
* @param url the URL of the requested service
* @param accept media type of the accepted content.
* @return ContentProvider
*/
public static ContentProvider contentFrom(final ServiceClient serviceClient,
final String url,
final String accept) {
return new HttpContentProvider(serviceClient, url, accept, null);
}
/**
* Creates a {@link HttpContentProvider} that is using the specified {@code serviceClient} to fetch the content
* from the specified {@code url}, falling back to the content returned by the {@code fallback} provider, if
* the primary content is not available.
*
* @param serviceClient ServiceClient used to get content
* @param url the URL of the requested service
* @param accept media type of the accepted content.
* @param fallback ContentProvider used as a fallback, if execution is failing with an exception of HTTP server error.
* @return ContentProvider
*/
public static ContentProvider contentFrom(final ServiceClient serviceClient,
final String url,
final String accept,
final ContentProvider fallback) {
return new HttpContentProvider(serviceClient, url, accept, fallback);
}
/**
* Creates a {@link HttpContentProvider} that is using the specified {@code serviceClient} to fetch the content
* from an URL that is created from {@code uriTemplate} and {@link Parameters}.
*
* @param serviceClient ServiceClient used to get content
* @param uriTemplate the URI template used to create the service URL. The {@link de.otto.rx.composer.content.Parameters}
* param of {@link ContentProvider#getContent(Position, Parameters)} is used to fill the
* template variables.
* @param accept media type of the accepted content.
* @return ContentProvider
*/
public static ContentProvider contentFrom(final ServiceClient serviceClient,
final UriTemplate uriTemplate,
final String accept) {
return new HttpContentProvider(serviceClient, uriTemplate, accept, null);
}
/**
* Creates a {@link HttpContentProvider} that is using the specified {@code serviceClient} to fetch the content
* from an URL that is created from {@code uriTemplate} and {@link Parameters}.
*
* @param serviceClient ServiceClient used to get content
* @param uriTemplate the URI template used to create the service URL. The {@link de.otto.rx.composer.content.Parameters}
* param of {@link ContentProvider#getContent(Position, Parameters)} is used to fill the
* template variables.
* @param accept media type of the accepted content.
* @param fallback ContentProvider used as a fallback, if execution is failing with an exception of HTTP server error.
* @return ContentProvider
*/
public static ContentProvider contentFrom(final ServiceClient serviceClient,
final UriTemplate uriTemplate,
final String accept,
final ContentProvider fallback) {
return new HttpContentProvider(serviceClient, uriTemplate, accept, fallback);
}
public static ContentProvider fallbackTo(final ContentProvider fallbackProvider) {
return fallbackProvider;
}
public static ContentProvider fallbackTo(final Observable<Content> observable) {
return (position, parameters) -> observable;
}
public static ContentProvider fallbackTo(final Content fallbackContent) {
return (position, parameters) -> just(fallbackContent);
}
public static ContentProvider withSingle(final ContentProvider contentProvider) {
return contentProvider;
}
/**
* Fetch the {@link Content#isAvailable() available and non-empty} content from the quickest-responding
* ContentProviders.
* <p>
* This method can be used to implement a "Fan Out and Quickest Wins" pattern, if there
* are multiple possible providers and 'best performance' is most important.
* </p>
*
* @param contentProviders the ContentProviders.
* @return QuickestWinsContentProvider
*/
public static ContentProvider withQuickest(final ImmutableList<ContentProvider> contentProviders) {
return new QuickestWinsContentProvider(
contentProviders,
contentMatcher(Content::isAvailable, "No content available"));
}
/**
* Fetch the first {@link Content#isAvailable() available and non-empty} content from the given ContentProviders.
* <p>
* Use this, if you need a prioritized list of providers, where you would prefer the content from the first
* provider over content coming from the second, and so on.
* </p>
*
* @param contentProviders list of content providers, where the first entries are more important than following entries.
* @return OneOfManyContentProvider
*/
public static ContentProvider withFirst(final ImmutableList<ContentProvider> contentProviders) {
return new SelectingContentProvider(
contentProviders,
contentMatcher(Content::isAvailable, "No content available"),
comparingInt(IndexedContent::getIndex),
1);
}
/**
* Fetch the first {@link Content#isAvailable() available and non-empty} content from the given ContentProviders
* that is matching the specified predicate.
* <p>
* Use this, if you need a prioritized list of providers, where you would prefer the content from the first
* provider over content coming from the second, and so on.
* </p>
*
* @param predicate the predicate used to match the contents
* @param contentProviders list of content providers, where the first entries are more important than following entries.
* @return OneOfManyContentProvider
*/
public static ContentProvider withFirstMatching(final Predicate<Content> predicate,
final ImmutableList<ContentProvider> contentProviders) {
return new SelectingContentProvider(
contentProviders,
contentMatcher(predicate.and(Content::isAvailable), "No content available"),
comparingInt(IndexedContent::getIndex),
1);
}
/**
* Fetch contents from all the given providers for a single position. The {@link Content} returned by this
* provider is a composite Content, consisting of all single Contents from the providers in the same order as
* specified.
*
* @param contentProviders the providers used to generate the composite Content. The ordering of the content
* providers is used to order the composite contents.
* @return ContentProvider
*/
public static ContentProvider withAll(final ContentProvider... contentProviders) {
checkNotNull(contentProviders, "Parameter must not be null");
return withAll(ImmutableList.copyOf(contentProviders));
}
/**
* Fetch contents from all the given providers for a single position. The {@link Content} returned by this
* provider is a composite Content, consisting of all single Contents from the providers in the same order as
* specified.
*
* @param contentProviders the providers used to generate the composite Content. The ordering of the content
* providers is used to order the composite contents.
* @return ContentProvider
*/
public static ContentProvider withAll(final ImmutableList<ContentProvider> contentProviders) {
return new SelectingContentProvider(
contentProviders,
contentMatcher(Content::isAvailable, "No content available"),
comparingInt(IndexedContent::getIndex),
contentProviders.size());
}
/**
* Fetch all contents matching the given predicate from all the given providers for a single position.
* The {@link Content} returned by this provider is a composite Content, consisting of all single Contents
* from the providers in the same order as specified.
*
* @param predicate the predicate used to match the contents
* @param contentProviders the providers used to generate the composite Content. The ordering of the content
* providers is used to order the composite contents.
* @return ContentProvider
*/
public static ContentProvider withAllMatching(final Predicate<Content> predicate, final ImmutableList<ContentProvider> contentProviders) {
return new SelectingContentProvider(
contentProviders,
contentMatcher(predicate.and(Content::isAvailable), "No content available"),
comparingInt(IndexedContent::getIndex),
contentProviders.size());
}
}
|
package edu.hm.hafner.analysis.parser;
import java.util.Optional;
import java.util.stream.Stream;
import org.json.JSONArray;
import org.json.JSONObject;
import org.json.JSONTokener;
import edu.hm.hafner.analysis.Issue;
import edu.hm.hafner.analysis.IssueBuilder;
import edu.hm.hafner.analysis.IssueParser;
import edu.hm.hafner.analysis.ParsingCanceledException;
import edu.hm.hafner.analysis.ParsingException;
import edu.hm.hafner.analysis.ReaderFactory;
import edu.hm.hafner.analysis.Report;
import edu.hm.hafner.analysis.Severity;
/**
* A parser for {@code rustc} compiler messages in the JSON format emitted by {@code cargo check --message-format
* json}.
*
* @author Gary Tierney
*/
public class CargoCheckParser extends IssueParser {
private static final long serialVersionUID = 7953467739178377581L;
/** The {@link #REASON} associated with messages that have code analysis information. */
private static final String ANALYSIS_MESSAGE_REASON = "compiler-message";
/** Top-level key indicating the reason for a message to be emitted, we only care about compiler-message. */
private static final String REASON = "reason";
/** Top-level key containing the code analysis message. */
private static final String MESSAGE = "message";
/** Key for {@code message.code}, an object containing the message category. */
private static final String MESSAGE_CODE = "code";
/** Key for {@code message.code.code}, a string representation of the message category. */
private static final String MESSAGE_CODE_CATEGORY = "code";
/** Key for {@code message.rendered}, the rendered string representation of the message. */
private static final String MESSAGE_RENDERED = "message";
/** Key for {@code message.level}, the string representation of the message severity. */
private static final String MESSAGE_LEVEL = "level";
/** Key for {@code message.spans}, an array of message location information. */
private static final String MESSAGE_SPANS = "spans";
/** Key for {@code message.spans.is_primary}, a boolean indicating if this is the primary error location". */
private static final String MESSAGE_SPAN_IS_PRIMARY = "is_primary";
/** Key for {@code message.spans.file_name}, a relative path to the file the message was emitted for. */
private static final String MESSAGE_SPAN_FILE_NAME = "file_name";
/** Key for {@code message.spans.line_start}, the line number where the associated code starts. */
private static final String MESSAGE_SPAN_LINE_START = "line_start";
/** Key for {@code message.spans.line_end}, the line number where the associated code ends. */
private static final String MESSAGE_SPAN_LINE_END = "line_end";
/** Key for {@code message.spans.column_start}, the column number where the associated code starts. */
private static final String MESSAGE_SPAN_COLUMN_START = "column_start";
/** Key for {@code message.spans.column_end}, the column number where the associated code ends. */
private static final String MESSAGE_SPAN_COLUMN_END = "column_end";
@Override
public Report parse(final ReaderFactory readerFactory) throws ParsingException, ParsingCanceledException {
Report report = new Report();
try (Stream<String> lines = readerFactory.readStream()) {
lines.map(line -> (JSONObject) new JSONTokener(line).nextValue())
.map(this::extractIssue)
.filter(Optional::isPresent)
.map(Optional::get)
.forEach(report::add);
}
return report;
}
/**
* Extract the compiler message from a cargo event if any is present.
*
* @param object
* A cargo event that may contain a compiler message.
*
* @return a built {@link Issue} object if any was present.
*/
private Optional<Issue> extractIssue(final JSONObject object) {
String reason = object.getString(REASON);
if (!ANALYSIS_MESSAGE_REASON.equals(reason)) {
return Optional.empty();
}
JSONObject message = object.getJSONObject(MESSAGE);
if (message.isNull(MESSAGE_CODE)) {
return Optional.empty();
}
JSONObject code = message.getJSONObject(MESSAGE_CODE);
String category = code.getString(MESSAGE_CODE_CATEGORY);
String renderedMessage = message.getString(MESSAGE_RENDERED);
Severity severity = Severity.guessFromString(message.getString(MESSAGE_LEVEL));
return parseDetails(message)
.map(details -> new IssueBuilder()
.setFileName(details.fileName)
.setLineStart(details.lineStart)
.setLineEnd(details.lineEnd)
.setColumnStart(details.columnStart)
.setColumnEnd(details.columnEnd)
.setCategory(category)
.setMessage(renderedMessage)
.setSeverity(severity)
.build());
}
private Optional<CompilerMessageDetails> parseDetails(final JSONObject message) {
JSONArray spans = message.getJSONArray(MESSAGE_SPANS);
for (int index = 0; index < spans.length(); index++) {
JSONObject span = spans.getJSONObject(index);
if (span.getBoolean(MESSAGE_SPAN_IS_PRIMARY)) {
String fileName = span.getString(MESSAGE_SPAN_FILE_NAME);
int lineStart = span.getInt(MESSAGE_SPAN_LINE_START);
int lineEnd = span.getInt(MESSAGE_SPAN_LINE_END);
int columnStart = span.getInt(MESSAGE_SPAN_COLUMN_START);
int columnEnd = span.getInt(MESSAGE_SPAN_COLUMN_END);
return Optional.of(new CompilerMessageDetails(fileName, lineStart, lineEnd, columnStart, columnEnd));
}
}
return Optional.empty();
}
/**
* A simplified representation of a primary {@code span} object in the {@code message.spans} an array.
*/
private static final class CompilerMessageDetails {
private final String fileName;
private final int lineStart;
private final int lineEnd;
private final int columnStart;
private final int columnEnd;
CompilerMessageDetails(final String fileName, final int lineStart, final int lineEnd, final int columnStart,
final int columnEnd) {
this.fileName = fileName;
this.lineStart = lineStart;
this.lineEnd = lineEnd;
this.columnStart = columnStart;
this.columnEnd = columnEnd;
}
}
}
|
package edu.ucar.unidata.rosetta.dsg;
import edu.ucar.unidata.rosetta.domain.AsciiFile;
import edu.ucar.unidata.rosetta.dsg.util.DateTimeBluePrint;
import org.apache.commons.io.FilenameUtils;
import org.apache.log4j.Logger;
import ucar.ma2.*;
import ucar.nc2.Attribute;
import ucar.nc2.Dimension;
import ucar.nc2.NetcdfFileWriter;
import ucar.nc2.Variable;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Handle writing out dsg netcdf file data.
*/
public abstract class NetcdfFileManager {
protected static Logger logger = Logger.getLogger(NetcdfFileManager.class);
private String myCfRole;
private String myDsgType;
private Map<String, HashMap> variableMetadataMap;
private Map<String,String> variableNameMap;
private Map<String, String> platformMetadataMap;
private Map<String, String> generalMetadataMap;
private Map<String, String> otherInfo;
public Map<String, String> getOtherInfo() {return this.otherInfo;}
public void setOtherInfo(Map<String, String> otherInfo) {this.otherInfo = otherInfo;}
private HashMap<String, Integer> nameCounts;
private String coordAttr;
private List<String> usedVarNames = new ArrayList<String>();
private List<String> allVarNames = new ArrayList<String>();
private String shapeAttr;
private List<String> coordVarList;
private DateTimeBluePrint dateTimeBluePrint;
public String getMyDsgType() {
return myDsgType;
}
public void setMyDsgType(String myDsgType) {
this.myDsgType = myDsgType;
}
public String getMyCfRole() {
return myCfRole;
}
public void setMyCfRole(String cfRole) {
this.myCfRole = cfRole;
}
public DateTimeBluePrint getDateTimeBluePrint() {
return dateTimeBluePrint;
}
public void setDateTimeBluePrint(DateTimeBluePrint dateTimeBluePrint) {
this.dateTimeBluePrint = dateTimeBluePrint;
}
private HashMap<String, ArrayList<String>> coordVars;
public HashMap<String, Integer> getNameCounts() {
return nameCounts;
}
public void setNameCounts(HashMap<String, Integer> nameCounts) {
this.nameCounts = nameCounts;
}
public String getCoordAttr() {
return coordAttr;
}
public void setCoordAttr(String coordAttr) {
this.coordAttr = coordAttr;
}
public List<String> getUsedVarNames() {
return usedVarNames;
}
public void setUsedVarNames(List<String> usedVarNames) {
this.usedVarNames = usedVarNames;
}
public List<String> getAllVarNames() {
return allVarNames;
}
public void setAllVarNames(List<String> allVarNames) {
this.allVarNames = allVarNames;
}
public String getShapeAttr() {
return shapeAttr;
}
public void setShapeAttr(String shapeAttr) {
this.shapeAttr = shapeAttr;
}
public ArrayList<String> getBuildTimeTriggers() {
return buildTimeTriggers;
}
public void setBuildTimeTriggers(ArrayList<String> buildTimeTriggers) {
this.buildTimeTriggers = buildTimeTriggers;
}
private ArrayList<String> buildTimeTriggers;
public String getRelTimeVarName() {
return relTimeVarName;
}
public void setRelTimeVarName(String relTimeVarName) {
this.relTimeVarName = relTimeVarName;
}
private String relTimeVarName;
private List<String> nonCoordVarList;
public HashMap<String, ArrayList<String>> getCoordVars() {
return coordVars;
}
public void setCoordVars(HashMap<String, ArrayList<String>> coordVars) {
this.coordVars = coordVars;
}
public List<String> getNonCoordVarList() {
return nonCoordVarList;
}
public void setNonCoordVarList(List<String> nonCoordVarList) {
this.nonCoordVarList = nonCoordVarList;
}
public List<String> getCoordVarList() {
return coordVarList;
}
public void setCoordVarList(List<String> coordVarList) {
this.coordVarList = coordVarList;
}
public Map<String, HashMap> getVariableMetadataMap() {
return variableMetadataMap;
}
public void setVariableMetadataMap(Map<String, HashMap> variableMetadataMap) {
this.variableMetadataMap = variableMetadataMap;
}
public Map<String, String> getVariableNameMap() {
return variableNameMap;
}
public void setVariableNameMap(Map<String, String> variableNameMap) {
this.variableNameMap = variableNameMap;
}
public Map<String, String> getPlatformMetadataMap() {
return platformMetadataMap;
}
public void setPlatformMetadataMap(Map<String, String> platformMetadataMap) {
this.platformMetadataMap = platformMetadataMap;
}
public Map<String, String> getGeneralMetadataMap() {
return generalMetadataMap;
}
public void setGeneralMetadataMap(Map<String, String> generalMetadataMap) {
this.generalMetadataMap = generalMetadataMap;
}
public boolean isMine(String reqType) {
boolean mine = false;
if (getMyDsgType().equals(reqType)) {
mine = true;
}
return mine;
}
protected void init(AsciiFile file) {
setUsedVarNames(new ArrayList<String>());
setAllVarNames(new ArrayList<String>());
setVariableNameMap(file.getVariableNameMap());
setVariableMetadataMap(file.getVariableMetadataMap());
setPlatformMetadataMap(file.getPlatformMetadataMap());
setGeneralMetadataMap(file.getGeneralMetadataMap());
setOtherInfo(file.getOtherInfo());
// create a list of the various types of time variables a user
// can supply. These need to be carefully handled in the code!
// these can be found in the addContentToDialog() function in
// src/main/webapp/resources/js/SlickGrid/custom/variableSpecification.js
// Note that "relTime" isn't included, as a relative time (e.g. days since 1970)
// can be handled as any other variable (i.e. we do not need to build
// anything special to handle this).
String[] specialTimeNames = {"fullDateTime", "dateOnly", "timeOnly"};
setBuildTimeTriggers(new ArrayList<String>());
ArrayList<String> btc = getBuildTimeTriggers();
btc.addAll(Arrays.asList(specialTimeNames));
setBuildTimeTriggers(btc);
setMyDsgType(file.getCfType());
setNameCounts(new HashMap<String, Integer>());
}
protected NetcdfFileWriter writeRosettaInfo(NetcdfFileWriter ncFileWriter, String rosettaJson) {
char[] rosettaJsonCharArray = rosettaJson.toCharArray();
Variable theVar = ncFileWriter.findVariable("Rosetta");
ArrayChar sa = new ArrayChar.D1(rosettaJson.length());
for (int i = 0; i < rosettaJson.length(); i++) {
sa.setChar(i, rosettaJsonCharArray[i]);
}
try {
ncFileWriter.write(theVar, sa);
} catch (IOException e) {
e.printStackTrace();
} catch (InvalidRangeException e) {
e.printStackTrace();
}
return ncFileWriter;
}
protected NetcdfFileWriter addCfAttributes(NetcdfFileWriter ncFileWriter){
ncFileWriter.addGroupAttribute(null, new Attribute("Conventions", "CF-1.6"));
ncFileWriter.addGroupAttribute(null, new Attribute("featureType", getMyDsgType()));
return ncFileWriter;
}
protected Boolean findCoordAndNonCoordVars() {
Set<String> variableNameKeys = getVariableNameMap().keySet();
Iterator<String> variableNameKeysIterator = variableNameKeys.iterator();
String key, value;
Map<String, String> variableMetadata;
// check to see if user supplied a relTime (i.e. days since yyyy-mm-dd) If so,
// then we do not need to construct a time variable. If not, then we need to create
// a time dimension.
Boolean hasRelTime = false;
nonCoordVarList = new ArrayList<String>();
coordVarList = new ArrayList<String>();
coordVars = new HashMap<String, ArrayList<String>>();
while (variableNameKeysIterator.hasNext()) {
// key = "variableName1"
// value = "time"
key = variableNameKeysIterator.next();
value = getVariableNameMap().get(key);
if (!value.equals("Do Not Use")) {
variableMetadata = getVariableMetadataMap().get(key + "Metadata");
// check if variable is a coordinate variable!
if (variableMetadata.containsKey("_coordinateVariable")) {
String coordVarType = variableMetadata.get("_coordinateVariableType");
if(variableMetadata.get("_coordinateVariable").equals("coordinate") && (!getBuildTimeTriggers().contains(coordVarType))){
if (coordVarType.toLowerCase().equals("reltime")) {
hasRelTime = true;
relTimeVarName = value;
}
coordVarList = coordVars.get(coordVarType);
if (coordVarList == null) {
coordVarList = new ArrayList<String>();
}
coordVarList.add(key);
coordVars.put(coordVarType, (ArrayList<String>) coordVarList);
} else {
nonCoordVarList.add(key);
}
} else {
nonCoordVarList.add(key);
}
}
}
return hasRelTime;
}
protected HashMap<String, ArrayList<String>> extractTimeRelatedVars(NetcdfFileWriter ncFileWriter, List<String> ncFileVariableNames) {
HashMap<String, ArrayList<String>> timeRelatedVars = new HashMap<String, ArrayList<String>>();
String timeType;
for (String varName : ncFileVariableNames) {
Variable theVar = ncFileWriter.findVariable(varName);
if (theVar != null) {
Attribute triggerAttr = theVar.findAttributeIgnoreCase("_coordinateVariableType");
if (triggerAttr != null) {
timeType = theVar.findAttributeIgnoreCase("_coordinateVariableType").getStringValue(0);
if (timeRelatedVars.containsKey(timeType)) {
ArrayList<String> tmpArray = timeRelatedVars.get(timeType);
tmpArray.add(varName);
timeRelatedVars.put(timeType, tmpArray);
} else {
ArrayList<String> singleValArray = new ArrayList<>();
singleValArray.add(varName);
timeRelatedVars.put(timeType, singleValArray);
}
}
}
}
return timeRelatedVars;
}
protected NetcdfFileWriter findAndCreateCoordVars(NetcdfFileWriter ncFileWriter, List<List<String>> parsedDataFile) {
String key;
for (String coordType : getCoordVars().keySet()) {
Iterator<String> coordVarNameIterator = getCoordVars().get(coordType).iterator();
while (coordVarNameIterator.hasNext()) {
key = coordVarNameIterator.next();
ncFileWriter = createNcfVariable(ncFileWriter, key, parsedDataFile);
}
}
return ncFileWriter;
}
protected NetcdfFileWriter findAndCreateNonCoordVars(NetcdfFileWriter ncFileWriter, List<List<String>> parsedDataFile) {
Iterator<String> nonCoordVarNameIterator = getNonCoordVarList().iterator();
String key;
while (nonCoordVarNameIterator.hasNext()) {
key = nonCoordVarNameIterator.next();
ncFileWriter = createNcfVariable(ncFileWriter, key, parsedDataFile);
}
return ncFileWriter;
}
protected NetcdfFileWriter createNcfVariable(NetcdfFileWriter ncFileWriter, String sessionStorageKey, List<List<String>> outerList) {
ncFileWriter = createNcfVariable(ncFileWriter, sessionStorageKey, outerList, null);
return ncFileWriter;
}
protected NetcdfFileWriter createNcfVariable(NetcdfFileWriter ncFileWriter, String sessionStorageKey, List<List<String>> outerList, String coordType) {
String userName = getVariableNameMap().get(sessionStorageKey);
String varName = userName.replace(" ", "_");
if (!nameCounts.containsKey(varName)) {
usedVarNames.add(varName);
nameCounts.put(varName, 1);
} else {
Integer newCount = nameCounts.get(varName) + 1;
nameCounts.put(varName, newCount);
varName = varName + "_" + newCount.toString();
}
allVarNames.add(varName);
HashMap variableMetadata = getVariableMetadataMap().get(sessionStorageKey + "Metadata");
Object coordVarTypeOb = variableMetadata.get("_coordinateVariableType");
String coordVarType = "";
if (coordVarTypeOb != null) {
coordVarType = coordVarTypeOb.toString();
}
String name = varName;
String shape = shapeAttr.replace(" ", ",");
shape = shapeAttr;
if (getCoordVars().containsKey(coordVarType)) {
if (getCoordVars().get(coordVarType).contains(sessionStorageKey)) {
name = name;
shape = coordVarType;
}
}
String type = (String) variableMetadata.get("dataType");
DataType ncType = null;
if (type.equals("Text")) {
ncType = DataType.CHAR;
} else if (type.equals("Integer")) {
ncType = DataType.INT;
} else if (type.equals("Float")) {
ncType = DataType.FLOAT;
}
Variable theVar = null;
if (ncType == DataType.CHAR) {
int colNum = Integer.parseInt(sessionStorageKey.replace("variableName", ""));
int charLen = outerList.get(0).get(colNum).length();
ArrayList<Dimension> dims = new ArrayList<>();
for (String dimStr : shape.split(",")) {
dims.add(ncFileWriter.renameDimension(null, dimStr, dimStr));
}
theVar = ncFileWriter.addStringVariable(null, name, dims, charLen);
} else {
theVar = ncFileWriter.addVariable(null, name, ncType, shape);
}
if (!userName.equals(varName)) {
ncFileWriter.addVariableAttribute(theVar, new Attribute("_userSuppliedName", userName));
}
Set<String> variableMetadataKeys = variableMetadata.keySet();
Iterator<String> variableMetadataKeysIterator = variableMetadataKeys.iterator();
String metadataKey;
String metadataValue;
while (variableMetadataKeysIterator.hasNext()) {
metadataKey = variableMetadataKeysIterator.next();
metadataValue = variableMetadata.get(metadataKey).toString();
switch (metadataKey) {
case "dataType" :
break;
case "missing_value" :
case "_FillValue" :
case "valid_min" :
case "valid_max" :
// TODO: valid range?
// TODO: flag_values?
String[] arrayString = {metadataValue};
DataType varType = ncType;
if (varType == DataType.CHAR)
varType = DataType.STRING;
Array array = Array.makeArray(varType, arrayString);
Attribute attribute = new Attribute(metadataKey, array);
ncFileWriter.addVariableAttribute(theVar, attribute);
break;
default :
ncFileWriter.addVariableAttribute(theVar, new Attribute(metadataKey, metadataValue));
break;
}
}
String columnId = sessionStorageKey.replace("variableName", "");
ncFileWriter.addVariableAttribute(theVar, new Attribute("_columnId", columnId));
if (getNonCoordVarList().contains(sessionStorageKey)) {
// we can have coord like variables (time only, date only, etc) that are not
// stored as coordinate variables, but they don't need the coordAttr. To filter
// these out, check to see if coordVarType is "", which is the default.
if (coordVarType == "") {
ncFileWriter.addVariableAttribute(theVar, new Attribute("coordinates", coordAttr));
}
}
if (coordType != null) {
if (getBuildTimeTriggers().contains(coordType)) {
ncFileWriter.addVariableAttribute(theVar, new Attribute("timeRelatedVariable", "true"));
}
}
return ncFileWriter;
}
protected NetcdfFileWriter createTimeDimension(NetcdfFileWriter ncFileWriter, int parseFileDataSize) {
ncFileWriter.addDimension(null, "dateTime", parseFileDataSize);
if (getShapeAttr().equals("")) {
setShapeAttr("dateTime");
} else {
setShapeAttr(getShapeAttr() + " dateTime");
}
return ncFileWriter;
}
protected NetcdfFileWriter createRosettaInfo(NetcdfFileWriter ncFileWriter, String rosettaJson) {
//todo get version info from war file, similar to ServerInfoBean.java
Variable theVar = ncFileWriter.addStringVariable(null,"Rosetta", new ArrayList<Dimension>(),
rosettaJson.length());
theVar.addAttribute(new Attribute("long_name", "Rosetta front-end sessionStorage JSON String"));
theVar.addAttribute(new Attribute("version", "0.2"));
return ncFileWriter;
}
protected void checkDownloadDir(String downloadDirPath) throws IOException {
File downloadTarget = new File(downloadDirPath);
if (!downloadTarget.exists()) {
logger.warn("created download path");
if (!downloadTarget.mkdirs()) {
throw new IOException("Unable to create download directory " + downloadTarget.getAbsolutePath());
}
}
}
protected NetcdfFileWriter createNetcdfFileHeader(AsciiFile file, String ncFilePath, List<List<String>> parseFileData, Boolean hasRelTime) throws IOException, InvalidRangeException {
// create ncFileWriter
NetcdfFileWriter ncFileWriter = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, ncFilePath);
// create dimensions based on the coordVars found above
logger.warn("create dimensions in ncml/n");
ncFileWriter = createDimensions(ncFileWriter, parseFileData);
// now, if relTime was not supplied as a coordVarType, then we know we will need to build a time variable
// later. Let's create a "time" dimension that will go with our (to-be-created-later) time variable
if (!hasRelTime) {
ncFileWriter = createTimeDimension(ncFileWriter, parseFileData.size());
}
// create a coordinate attribute based on CF1.6 DSG
createCoordinateAttr(hasRelTime);
// CF specific attribute elements
ncFileWriter = addCfAttributes(ncFileWriter);
// global metadata
for (Map.Entry<String, String> entry : getGeneralMetadataMap().entrySet()) {
if (entry.getValue() != null) {
ncFileWriter.addGroupAttribute(null, new Attribute(entry.getKey(), entry.getValue()));
}
}
// platform metadata
// Latitude
if (getPlatformMetadataMap().containsKey("latitude")) {
ncFileWriter = createCoordVarsFromPlatform(ncFileWriter, "latitude");
}
// Longitude
if (getPlatformMetadataMap().containsKey("longitude")) {
ncFileWriter = createCoordVarsFromPlatform(ncFileWriter, "longitude");
}
// Altitude
if (getPlatformMetadataMap().containsKey("altitude")) {
ncFileWriter = createCoordVarsFromPlatform(ncFileWriter, "altitude");
}
// Platform ID
if (getPlatformMetadataMap().containsKey("platformName")) {
ncFileWriter = createPlatformInfo(ncFileWriter, file);
}
// look at coordVars and create the appropriate coordinate variables
ncFileWriter = findAndCreateCoordVars(ncFileWriter, parseFileData);
// create variables for the non-coordinate variables
ncFileWriter = findAndCreateNonCoordVars(ncFileWriter, parseFileData);
// Add rosetta specific info
ncFileWriter = createRosettaInfo(ncFileWriter, file.getJsonStrSessionStorage());
// check to se if we need to construct new dateTime variables
HashMap<String, ArrayList<String>> timeRelatedVars = extractTimeRelatedVars(ncFileWriter, getUsedVarNames());
if (!hasRelTime) {
dateTimeBluePrint = new DateTimeBluePrint(timeRelatedVars, ncFileWriter);
if (!dateTimeBluePrint.isEmpty()) {
ncFileWriter = dateTimeBluePrint.createNewVars(ncFileWriter);
}
}
ncFileWriter.create();
return ncFileWriter;
}
protected NetcdfFileWriter createDimensions(NetcdfFileWriter ncFileWriter, List<List<String>> parseFileData) {
Iterator<String> coordVarNameIterator;
// we only want one time related dimension. So, if relTime is supplied, then
// we only want to use it; if time spans multiple cols, then we will create
// a "time" dimension, and assemble the "time" coordinate variable later
setShapeAttr("");
for (String coordType : getCoordVars().keySet()) {
// don't create a dimension for the partial time variables that will need to
// be assembled.
if (!getBuildTimeTriggers().contains(coordType)) {
// coordAttr is the attribute that defines the coord system for variables
if (getShapeAttr().equals("")) {
setShapeAttr(coordType);
} else {
setShapeAttr(getShapeAttr() + " " + coordType);
}
coordVarNameIterator = getCoordVars().get(coordType).iterator();
while (coordVarNameIterator.hasNext()) {
// set dimension name based on coordType
ncFileWriter.addDimension(null, coordType, parseFileData.size());
coordVarNameIterator.next();
}
}
}
return ncFileWriter;
}
protected NetcdfFileWriter writeUserVarData(List<List<String>> outerList, NetcdfFileWriter ncFileWriter) throws IOException, InvalidRangeException {
for (String var : getAllVarNames()) {
Variable theVar = ncFileWriter.findVariable(var);
if (theVar != null) {
Attribute attr = theVar.findAttributeIgnoreCase("_columnId");
if (attr != null) {
String varName = theVar.getFullName();
DataType dt = theVar.getDataType();
int varIndex = Integer.parseInt(attr.getStringValue());
int len = outerList.size();
if (dt.equals(DataType.FLOAT)) {
ArrayFloat.D1 vals =
new ArrayFloat.D1(outerList.size());
int i = 0;
for (List<String> innerList : outerList) {
float f = Float.parseFloat(
innerList.get(
varIndex));
vals.set(i, f);
i++;
}
ncFileWriter.write(theVar, vals);
} else if (dt.equals(DataType.INT)) {
ArrayInt.D1 vals =
new ArrayInt.D1(outerList.size());
int i = 0;
for (List<String> innerList : outerList) {
int f = Integer.parseInt(
innerList.get(
varIndex));
vals.set(i, f);
i++;
}
ncFileWriter.write(theVar, vals);
} else if (dt.equals(DataType.CHAR)) {
assert theVar.getRank() == 2;
int elementLength = theVar.getDimension(1).getLength();
ArrayChar.D2 vals =
new ArrayChar.D2(outerList.size(), elementLength);
int i = 0;
for (List<String> innerList : outerList) {
String f = innerList.get(varIndex);
vals.setString(i,f);
i++;
}
ncFileWriter.write(theVar, vals);
}
}
}
}
return ncFileWriter;
}
protected abstract NetcdfFileWriter writeCoordVarsFromPlatform(NetcdfFileWriter ncFileWriter, String name) throws IOException, InvalidRangeException;
protected abstract NetcdfFileWriter createCoordVarsFromPlatform(NetcdfFileWriter ncFileWriter, String name) throws IOException, InvalidRangeException;
protected abstract NetcdfFileWriter createPlatformInfo(NetcdfFileWriter ncFileWriter, AsciiFile file);
protected abstract NetcdfFileWriter writePlatformInfo(NetcdfFileWriter ncFileWriter, AsciiFile file);
protected abstract void createCoordinateAttr(boolean hasRelTime);
public List<NetcdfFileManager> asciiToDsg() {
List<NetcdfFileManager> dsgWriters = new ArrayList<NetcdfFileManager>();
dsgWriters.add(new SingleStationTimeSeries());
dsgWriters.add(new SingleStationTrajectory());
return dsgWriters;
}
public String createNetcdfFile(AsciiFile file, List<List<String>> parseFileData, String downloadDirPath) throws IOException {
try {
String ncFilePath = downloadDirPath + File.separator + FilenameUtils.removeExtension(file.getFileName()) + ".nc";
logger.warn("create ncFilePath: " + ncFilePath);
// make sure downloadDir exists and, if not, create it
checkDownloadDir(downloadDirPath);
init(file);
// look for coordinate and non-coordinate variables
Boolean hasRelTime = findCoordAndNonCoordVars();
// write the header of the netCDF file (returns NetcdfFileWriter with define mode False, i.e. ready
// to write data values.
NetcdfFileWriter ncFileWriter = createNetcdfFileHeader(file, ncFilePath, parseFileData, hasRelTime);
// no longer in define mode...now need to actually write out the data!
// Latitude
if (getPlatformMetadataMap().containsKey("latitude")) {
ncFileWriter = writeCoordVarsFromPlatform(ncFileWriter, "latitude");
}
// Longitude
if (getPlatformMetadataMap().containsKey("longitude")) {
ncFileWriter = writeCoordVarsFromPlatform(ncFileWriter, "longitude");
}
// Altitude
if (getPlatformMetadataMap().containsKey("altitude")) {
ncFileWriter = writeCoordVarsFromPlatform(ncFileWriter, "altitude");
}
// Platform ID
if (getPlatformMetadataMap().containsKey("platformName")) {
ncFileWriter = writePlatformInfo(ncFileWriter, file);
}
ncFileWriter = writeRosettaInfo(ncFileWriter, file.getJsonStrSessionStorage());
// must write user data before any new dateTime variables!
ncFileWriter = writeUserVarData(parseFileData, ncFileWriter);
if (getDateTimeBluePrint() != null) {
if (!getDateTimeBluePrint().isEmpty()) {
ncFileWriter = getDateTimeBluePrint().writeNewVariables(ncFileWriter);
}
}
ncFileWriter.close();
Boolean success = new File(ncFilePath).exists();
if (success) {
return ncFilePath;
} else {
logger.error("Error! the netcdf file " + ncFilePath + "was not created.");
return null;
}
} catch (Exception e) {
logger.error(e.getMessage());
logger.error(e.getStackTrace());
return null;
}
}
}
|
package fr.noogotte.useful_commands.command;
import java.util.List;
import org.bukkit.ChatColor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import fr.aumgn.bukkitutils.command.Command;
import fr.aumgn.bukkitutils.command.NestedCommands;
import fr.aumgn.bukkitutils.command.args.CommandArgs;
import fr.aumgn.bukkitutils.util.Util;
import fr.noogotte.useful_commands.component.AfkComponent;
@NestedCommands(name = "useful")
public class AfkCommand extends UsefulCommands {
private AfkComponent afkcomponent;
public AfkCommand(AfkComponent afkcomponent) {
this.afkcomponent = afkcomponent;
}
@Command(name = "afk", min = 0, max = 1)
public void toggleAfk(CommandSender sender, CommandArgs args) {
List<Player> targets = args.getPlayers(0)
.match(sender, "useful.afk.other");
for (Player target : targets) {
if (!afkcomponent.isAfk(target)) {
afkcomponent.addPlayer(target);
target.setDisplayName("(AFK)" + target.getName());
target.setPlayerListName(ChatColor.ITALIC + target.getName());
Util.broadcast(ChatColor.GOLD + target.getName()
+ ChatColor.GREEN + " est maintenant en AFK.");
target.sendMessage(ChatColor.GREEN + "Vous êtes AFK tapez"
+ ChatColor.GOLD + " /afk " + ChatColor.GREEN
+ "pour en resortir");
} else if (afkcomponent.isAfk(target)) {
target.setDisplayName(target.getName());
target.setPlayerListName(target.getName());
afkcomponent.removePlayer(target);
Util.broadcast(ChatColor.GOLD + target.getName()
+ ChatColor.GREEN + " n'est plus en AFK.");
target.sendMessage(ChatColor.GREEN + "Vous n'êtes plus en AFK.");
}
if (!sender.equals(target)) {
sender.sendMessage(ChatColor.GREEN + "Vous avez mis "
+ ChatColor.GOLD + target.getName() + ChatColor.GREEN
+ " en Afk");
}
}
}
}
|
package io.domisum.lib.auxiliumlib.util;
import io.domisum.lib.auxiliumlib.annotations.API;
import io.domisum.lib.auxiliumlib.display.DurationDisplay;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import java.time.Duration;
@API
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
public final class ValidationUtil
{
// COMMON
@API
public static void notNull(Object object, String variableName)
{
if(object == null)
throw new IllegalArgumentException(variableName+" can't be null");
}
// STRING
@API
public static void notBlank(String string, String variableName)
{
notNull(string, variableName);
if(string.isEmpty())
throw new IllegalArgumentException(variableName+" can't be empty");
}
// COMPARISON
@API
public static void greaterZero(double number, String variableName)
{
if(number <= 0)
throw new IllegalArgumentException(variableName+" has to be greater than zero, but was "+number);
}
// DURATION
@API
public static void greaterThan(Duration a, Duration b, String aName, String bName)
{
if(b.compareTo(a) > 0)
throw new IllegalArgumentException(
aName+" ("+DurationDisplay.display(a)+") has to be greater than "+bName+" ("+DurationDisplay.display(b)
+"), but wasn't");
}
// SPECIAL VALUES
@API
public static void validatePortInRange(int port, String portName)
{
final int MAX_PORT_VALUE = 65535;
inIntervalInclIncl(1, MAX_PORT_VALUE, port, portName);
}
// INTERVAL INT
@API
public static void inIntervalInclIncl(int start, int end, int value, String variableName)
{
inInterval(start, true, end, true, value, variableName);
}
@API
public static void inIntervalExclIncl(int start, int end, int value, String variableName)
{
inInterval(start, false, end, true, value, variableName);
}
@API
public static void inIntervalExclExcl(int start, int end, int value, String variableName)
{
inInterval(start, false, end, false, value, variableName);
}
private static void inInterval(int start, boolean startInclusive, int end, boolean endInclusive, int value, String variableName)
{
if(end < start)
throw new IllegalArgumentException("start and end are the wrong way around");
if((value < start) || (value > end))
throw new IllegalArgumentException(variableName+" has to be in interval "+
displayInterval(start, startInclusive, end, endInclusive)+", but was "+value);
if((value == start) && !startInclusive)
throw new IllegalArgumentException(variableName+" has to be in interval "+
displayInterval(start, startInclusive, end, endInclusive)+", but was "+value+" (is equal to start and therefore excluded)");
if((value == end) && !endInclusive)
throw new IllegalArgumentException(variableName+" has to be in interval "+
displayInterval(start, startInclusive, end, endInclusive)+", but was "+value+" (is equal to end and therefore excluded)");
}
private static String displayInterval(int start, boolean startInclusive, int end, boolean endInclusive)
{
String startBracket = startInclusive ? "[" : "]";
String endBracket = endInclusive ? "]" : "[";
return startBracket+start+" to "+end+endBracket;
}
// INTERVAL DOUBLE
@API
public static void inIntervalInclIncl(double start, double end, double value, String variableName)
{
inInterval(start, true, end, true, value, variableName);
}
@API
public static void inIntervalExclIncl(double start, double end, double value, String variableName)
{
inInterval(start, false, end, true, value, variableName);
}
@API
public static void inIntervalExclExcl(double start, double end, double value, String variableName)
{
inInterval(start, false, end, false, value, variableName);
}
private static void inInterval(double start, boolean startInclusive, double end, boolean endInclusive, double value, String variableName)
{
if(end < start)
throw new IllegalArgumentException("start and end are the wrong way around");
if((value < start) || (value > end))
throw new IllegalArgumentException(variableName+" has to be in interval "+
displayInterval(start, startInclusive, end, endInclusive)+", but was "+value);
if((value == start) && !startInclusive)
throw new IllegalArgumentException(variableName+" has to be in interval "+
displayInterval(start, startInclusive, end, endInclusive)+", but was "+value+" (is equal to start and therefore excluded)");
if((value == end) && !endInclusive)
throw new IllegalArgumentException(variableName+" has to be in interval "+
displayInterval(start, startInclusive, end, endInclusive)+", but was "+value+" (is equal to end and therefore excluded)");
}
private static String displayInterval(double start, boolean startInclusive, double end, boolean endInclusive)
{
String startBracket = startInclusive ? "[" : "]";
String endBracket = endInclusive ? "]" : "[";
return startBracket+start+" to "+end+endBracket;
}
}
|
package io.github.aquerr.eaglefactions.logic;
import io.github.aquerr.eaglefactions.EagleFactions;
import io.github.aquerr.eaglefactions.PluginInfo;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.command.SendCommandEvent;
import org.spongepowered.api.event.filter.cause.Root;
import org.spongepowered.api.scheduler.Task;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.format.TextColors;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
public class PVPLogger
{
private Map<UUID, Integer> _attackedPlayers;
private boolean _isActive;
private int _blockTime;
private List<String> _blockedCommandsDuringFight;
public PVPLogger()
{
_isActive = MainLogic.isPVPLoggerActive();
if (_isActive)
{
_attackedPlayers = new HashMap<>();
_blockTime = MainLogic.getPVPLoggerTime();
_blockedCommandsDuringFight = MainLogic.getBlockedCommandsDuringFight();
}
}
public boolean isActive()
{
return _isActive;
}
public int getBlockTime()
{
return _blockTime;
}
public boolean shouldBlockCommand(Player player, String usedCommand)
{
if (isPlayerBlocked(player))
{
if (usedCommand.charAt(0) == '/')
{
usedCommand = usedCommand.substring(1);
}
usedCommand = usedCommand.toLowerCase();
for (String blockedCommand : _blockedCommandsDuringFight)
{
if (blockedCommand.charAt(0) == '/')
{
blockedCommand = blockedCommand.substring(1);
}
if (blockedCommand.equals("*") || usedCommand.equals(blockedCommand) || usedCommand.startsWith(blockedCommand))
{
return true;
}
}
}
return false;
}
public void addOrUpdatePlayer(Player player)
{
//Update player's time if it already in a list.
if (_attackedPlayers.containsKey(player.getUniqueId()))
{
_attackedPlayers.replace(player.getUniqueId(), getBlockTime());
}
else
{
_attackedPlayers.put(player.getUniqueId(), getBlockTime());
player.sendMessage(Text.of(PluginInfo.PluginPrefix, TextColors.RED, "PVPLogger has turned on! You will die if you disconnect in " + getBlockTime() + "s!"));
Task.Builder taskBuilder = Sponge.getScheduler().createTaskBuilder();
taskBuilder.interval(1, TimeUnit.SECONDS).execute(new Consumer<Task>()
{
@Override
public void accept(Task task)
{
if (_attackedPlayers.containsKey(player.getUniqueId()))
{
int seconds = _attackedPlayers.get(player.getUniqueId());
if (seconds <= 0)
{
player.sendMessage(Text.of(PluginInfo.PluginPrefix, TextColors.GREEN, "PVPLogger has turned off for you! You can now disconnect safely."));
task.cancel();
}
else
{
_attackedPlayers.replace(player.getUniqueId(), seconds, seconds - 1);
}
}
else
{
task.cancel();
}
}
}).submit(EagleFactions.getEagleFactions());
}
}
public boolean isPlayerBlocked(Player player)
{
if (_attackedPlayers.containsKey(player.getUniqueId())) return true;
return false;
}
public void removePlayer(Player player)
{
if (_attackedPlayers.containsKey(player.getUniqueId()))
{
_attackedPlayers.remove(player.getUniqueId());
}
}
public int getPlayerBlockTime(Player player)
{
return _attackedPlayers.getOrDefault(player.getUniqueId(), 0);
}
}
|
package io.github.mzmine.datamodel.data.types;
import java.lang.reflect.ParameterizedType;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import io.github.mzmine.datamodel.RawDataFile;
import io.github.mzmine.datamodel.data.ModularDataModel;
import io.github.mzmine.datamodel.data.ModularFeatureListRow;
import io.github.mzmine.datamodel.data.types.fx.DataTypeCellFactory;
import io.github.mzmine.datamodel.data.types.fx.DataTypeCellValueFactory;
import io.github.mzmine.datamodel.data.types.fx.EditableDataTypeCellFactory;
import io.github.mzmine.datamodel.data.types.modifiers.EditableColumnType;
import io.github.mzmine.datamodel.data.types.modifiers.NullColumnType;
import io.github.mzmine.datamodel.data.types.modifiers.SubColumnsFactory;
import javafx.scene.control.TreeTableColumn;
/**
* Class of data types: Provides formatters. Should only be added to one {@link ModularDataModel}
*
* @author Robin Schmid (robinschmid@uni-muenster.de)
*
* @param <T>
*/
public abstract class DataType<T> {
protected Logger logger = Logger.getLogger(this.getClass().getName());
protected ModularDataModel model;
public DataType() {}
/**
* The current data model (should only be added to one data model). Data model registers itself.
*
* @return
*/
public ModularDataModel getDataModel() {
return model;
}
/**
* Set current data model - should only be added to one data model. Data model registers itself.
*
* @param model
*/
public void setDataModel(ModularDataModel model) {
this.model = model;
}
/**
* A formatted string representation of the value
*
* @return the formatted representation of the value (or an empty String)
*/
@Nonnull
public String getFormattedString(T value) {
if (value != null)
return value.toString();
else
return "";
}
/**
* The header string (name) of this data type
*
* @return
*/
@Nonnull
public abstract String getHeaderString();
/**
* Creates a TreeTableColumn or null if the value is not represented in a column. A
* {@link SubColumnsFactory} DataType can also add multiple sub columns to the main column
* generated by this class.
*
* @param raw null if this is a FeatureListRow column. For Feature columns: the raw data file
* specifies the feature.
*
* @return the TreeTableColumn or null if this DataType.value is not represented in a column
*/
@Nullable
public TreeTableColumn<ModularFeatureListRow, T> createColumn(final @Nullable RawDataFile raw) {
if (this instanceof NullColumnType)
return null;
// create column
TreeTableColumn<ModularFeatureListRow, T> col = new TreeTableColumn<>(getHeaderString());
if (this instanceof SubColumnsFactory) {
col.setSortable(false);
// add sub columns (no value factory needed for parent column)
List<TreeTableColumn<ModularFeatureListRow, ?>> children =
((SubColumnsFactory) this).createSubColumns(raw);
col.getColumns().addAll(children);
return col;
} else {
col.setSortable(true);
// define observable
col.setCellValueFactory(new DataTypeCellValueFactory<>(raw, this));
// value representation
if (this instanceof EditableColumnType) {
col.setCellFactory(new EditableDataTypeCellFactory<>(raw, this));
col.setEditable(true);
col.setOnEditCommit(event -> {
Object data = event.getNewValue();
if (data != null) {
if (raw == null)
event.getRowValue().getValue().set(this, data);
else
event.getRowValue().getValue().getFeatures().get(raw).set(this, data);
}
});
} else
col.setCellFactory(new DataTypeCellFactory<>(raw, this));
}
return col;
}
public boolean checkValidValue(Object value) {
if (value == null)
return true;
else {
try {
// get class of template T (value class)
Class valueClass = (Class) ((ParameterizedType) getClass().getGenericSuperclass())
.getActualTypeArguments()[0];
return value.getClass().equals(valueClass);
} catch (Exception e) {
logger.log(Level.WARNING, "Cannot reflect template class (value class)", e);
// the check system is broken so better return true
return true;
}
}
}
public T cast(Object value) {
if (checkValidValue(value)) {
return (T) value;
}
return null;
}
// TODO dirty hack to make this a "singleton"
@Override
public boolean equals(Object obj) {
return obj != null && obj.getClass().equals(this.getClass());
}
@Override
public int hashCode() {
return getClass().hashCode();
}
}
|
package net.blay09.mods.eirairc.handler;
import net.blay09.mods.eirairc.EiraIRC;
import net.blay09.mods.eirairc.api.IRCChannel;
import net.blay09.mods.eirairc.api.IRCConnection;
import net.blay09.mods.eirairc.api.IRCContext;
import net.blay09.mods.eirairc.api.IRCUser;
import net.blay09.mods.eirairc.api.bot.BotProfile;
import net.blay09.mods.eirairc.api.bot.IRCBot;
import net.blay09.mods.eirairc.api.event.RelayChat;
import net.blay09.mods.eirairc.command.base.IRCCommandHandler;
import net.blay09.mods.eirairc.config.ChannelConfig;
import net.blay09.mods.eirairc.config.CompatibilityConfig;
import net.blay09.mods.eirairc.config.DisplayConfig;
import net.blay09.mods.eirairc.config.ServerConfig;
import net.blay09.mods.eirairc.irc.IRCConnectionImpl;
import net.blay09.mods.eirairc.util.ConfigHelper;
import net.blay09.mods.eirairc.util.IRCFormatting;
import net.blay09.mods.eirairc.util.MessageFormat;
import net.blay09.mods.eirairc.util.Utils;
import net.minecraft.client.Minecraft;
import net.minecraft.command.ICommandSender;
import net.minecraft.command.server.CommandBroadcast;
import net.minecraft.command.server.CommandEmote;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.ChatComponentText;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.IChatComponent;
import net.minecraftforge.event.CommandEvent;
import net.minecraftforge.event.ServerChatEvent;
import net.minecraftforge.event.entity.living.LivingDeathEvent;
import net.minecraftforge.event.entity.player.PlayerEvent;
import cpw.mods.fml.common.eventhandler.EventPriority;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerLoggedInEvent;
import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerLoggedOutEvent;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class MCEventHandler {
@SubscribeEvent
public void onPlayerLogin(PlayerLoggedInEvent event) {
String name = Utils.getNickIRC(event.player);
String ircMessage = Utils.getLocalizedMessage("irc.display.mc.joinMsg", name);
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
ServerConfig serverConfig = ConfigHelper.getServerConfig(connection);
for(IRCChannel channel : connection.getChannels()) {
if(!bot.isReadOnly(channel) && bot.getBoolean(channel, BotProfile.KEY_RELAYMCJOINLEAVE, false)) {
channel.message(ircMessage);
}
if(channel.getTopic() != null) {
Utils.sendLocalizedMessage(event.player, "irc.display.irc.topic", channel.getName(), channel.getTopic());
}
ChannelConfig channelConfig = serverConfig.getChannelConfig(channel);
if(channelConfig.isAutoWho()) {
Utils.sendUserList(event.player, connection, channel);
}
}
}
}
@SubscribeEvent
public void onServerCommand(CommandEvent event) {
if(event.command instanceof CommandEmote) {
if(event.sender instanceof EntityPlayer) {
String emote = Utils.joinStrings(event.parameters, " ").trim();
if(emote.length() == 0) {
return;
}
String mcAlias = Utils.getNickGame((EntityPlayer) event.sender);
IChatComponent chatComponent = new ChatComponentText("* " + mcAlias + " " + emote);
if(DisplayConfig.emoteColor != null) {
chatComponent.getChatStyle().setColor(Utils.getColorFormatting(DisplayConfig.emoteColor));
}
Utils.addMessageToChat(chatComponent);
if(!MinecraftServer.getServer().isSinglePlayer()) {
relayChatServer(event.sender, emote, true, false, null);
}
event.setCanceled(true);
}
} else if(event.command instanceof CommandBroadcast) {
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
for(IRCChannel channel : connection.getChannels()) {
String ircMessage = MessageFormat.formatMessage(ConfigHelper.getDisplayFormat(bot.getDisplayFormat(channel)).ircBroadcastMessage, channel, event.sender, Utils.joinStrings(event.parameters, " "), MessageFormat.Target.IRC, MessageFormat.Mode.Message);
if(!bot.isReadOnly(channel) && bot.getBoolean(channel, BotProfile.KEY_RELAYBROADCASTS, true)) {
channel.message(ircMessage);
}
}
}
}
}
@SideOnly(Side.CLIENT)
public boolean onClientChat(String text) {
if(text.startsWith("/")) {
if(text.startsWith("/me") && text.length() > 3) {
return onClientEmote(text.substring(3));
}
return false;
}
EntityPlayer sender = Minecraft.getMinecraft().thePlayer;
if(EiraIRC.instance.getConnectionCount() > 0 && IRCCommandHandler.onChatCommand(sender, text, false)) {
return true;
}
if(CompatibilityConfig.clientBridge) {
relayChatClient(text, false, false, null, true);
return false;
}
String chatTarget = EiraIRC.instance.getChatSessionHandler().getChatTarget();
if(chatTarget == null) {
return false;
}
String[] target = chatTarget.split("/");
IRCConnection connection = EiraIRC.instance.getConnection(target[0]);
if(connection != null) {
IRCBot bot = connection.getBot();
IRCContext context;
IChatComponent chatComponent;
if(target[1].startsWith("
IRCChannel targetChannel = connection.getChannel(target[1]);
if(targetChannel == null) {
return true;
}
context = targetChannel;
chatComponent = MessageFormat.formatChatComponent(ConfigHelper.getDisplayFormat(bot.getDisplayFormat(targetChannel)).mcSendChannelMessage, context, sender, text, MessageFormat.Target.IRC, MessageFormat.Mode.Message);
} else {
IRCUser targetUser = connection.getUser(target[1]);
if(targetUser == null) {
return true;
}
context = targetUser;
chatComponent = MessageFormat.formatChatComponent(ConfigHelper.getDisplayFormat(bot.getDisplayFormat(targetUser)).mcSendPrivateMessage, context, sender, text, MessageFormat.Target.IRC, MessageFormat.Mode.Message);
}
relayChatClient(text, false, false, context, false);
Utils.addMessageToChat(chatComponent);
}
return true;
}
@SideOnly(Side.CLIENT)
public boolean onClientEmote(String text) {
EntityPlayer sender = Minecraft.getMinecraft().thePlayer;
if(CompatibilityConfig.clientBridge) {
relayChatClient(text, true, false, null, true);
return false;
}
String chatTarget = EiraIRC.instance.getChatSessionHandler().getChatTarget();
if(chatTarget == null) {
return false;
}
String[] target = chatTarget.split("/");
IRCConnection connection = EiraIRC.instance.getConnection(target[0]);
if(connection != null) {
IRCBot bot = connection.getBot();
IRCContext context;
EnumChatFormatting emoteColor;
IChatComponent chatComponent;
if(target[1].startsWith("
IRCChannel targetChannel = connection.getChannel(target[1]);
if(targetChannel == null) {
return true;
}
context = targetChannel;
emoteColor = Utils.getColorFormatting(ConfigHelper.getEmoteColor(targetChannel));
chatComponent = MessageFormat.formatChatComponent(ConfigHelper.getDisplayFormat(bot.getDisplayFormat(targetChannel)).mcSendChannelEmote, context, sender, text, MessageFormat.Target.IRC, MessageFormat.Mode.Emote);
} else {
IRCUser targetUser = connection.getUser(target[1]);
if(targetUser == null) {
return true;
}
context = targetUser;
emoteColor = Utils.getColorFormatting(ConfigHelper.getEmoteColor(targetUser));
chatComponent = MessageFormat.formatChatComponent(ConfigHelper.getDisplayFormat(bot.getDisplayFormat(targetUser)).mcSendPrivateEmote, context, sender, text, MessageFormat.Target.IRC, MessageFormat.Mode.Emote);
}
relayChatClient(text, true, false, context, false);
if(emoteColor != null) {
chatComponent.getChatStyle().setColor(emoteColor);
}
Utils.addMessageToChat(chatComponent);
}
return true;
}
@SubscribeEvent
public void onServerChat(ServerChatEvent event) {
IChatComponent senderComponent = event.player.func_145748_c_();
senderComponent.getChatStyle().setColor(Utils.getColorFormattingForPlayer(event.player));
event.component = new ChatComponentTranslation("chat.type.text", senderComponent, event.message);
if(!MinecraftServer.getServer().isSinglePlayer()) {
if(IRCCommandHandler.onChatCommand(event.player, event.message, true)) {
event.setCanceled(true);
return;
}
relayChatServer(event.player, event.message, false, false, null);
}
}
@SubscribeEvent
@SideOnly(Side.CLIENT)
public void relayChatClient(RelayChat event) {
relayChatClient(event.message, event.isEmote, event.isNotice, event.target, CompatibilityConfig.clientBridge);
}
private void relayChatClient(String message, boolean isEmote, boolean isNotice, IRCContext target, boolean clientBridge) {
if(target != null) {
IRCConnection connection = target.getConnection();
IRCBot bot = connection.getBot();
if(!bot.isReadOnly(target)) {
String ircMessage = message;
if(isEmote) {
ircMessage = IRCConnectionImpl.EMOTE_START + ircMessage + IRCConnectionImpl.EMOTE_END;
}
if(isNotice) {
target.notice(ircMessage);
} else {
target.message(ircMessage);
}
}
} else {
if(clientBridge) {
String ircMessage = message;
if(isEmote) {
ircMessage = IRCConnectionImpl.EMOTE_START + ircMessage + IRCConnectionImpl.EMOTE_END;
}
if(!CompatibilityConfig.clientBridgeMessageToken.isEmpty()) {
ircMessage = ircMessage + " " + CompatibilityConfig.clientBridgeMessageToken;
}
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
for(IRCChannel channel : connection.getChannels()) {
if(!bot.isReadOnly(channel)) {
if(isNotice) {
channel.notice(ircMessage);
} else {
channel.message(ircMessage);
}
}
}
}
} else {
String chatTarget = EiraIRC.instance.getChatSessionHandler().getChatTarget();
if(chatTarget == null) {
return;
}
String[] targetArr = chatTarget.split("/");
IRCConnection connection = EiraIRC.instance.getConnection(targetArr[0]);
if(connection != null) {
IRCBot bot = connection.getBot();
IRCContext context;
if(targetArr[1].startsWith("
IRCChannel targetChannel = connection.getChannel(targetArr[1]);
if(targetChannel == null) {
return;
}
context = targetChannel;
} else {
IRCUser targetUser = connection.getUser(targetArr[1]);
if (targetUser == null) {
return;
}
context = targetUser;
}
if(!bot.isReadOnly(context)) {
String ircMessage = message;
if(isEmote) {
ircMessage = IRCConnectionImpl.EMOTE_START + ircMessage + IRCConnectionImpl.EMOTE_END;
}
if(isNotice) {
context.notice(ircMessage);
} else {
context.message(ircMessage);
}
}
}
}
}
}
@SubscribeEvent
@SideOnly(Side.SERVER)
public void relayChatServer(RelayChat event) {
relayChatServer(event.sender, event.message, event.isEmote, event.isNotice, event.target);
}
private void relayChatServer(ICommandSender sender, String message, boolean isEmote, boolean isNotice, IRCContext target) {
if(target != null) {
IRCConnection connection = target.getConnection();
IRCBot bot = connection.getBot();
if(!bot.isReadOnly(target)) {
String format = MessageFormat.getMessageFormat(bot, target, isEmote);
String ircMessage = MessageFormat.formatMessage(format, target, sender, message, MessageFormat.Target.IRC, (isEmote ? MessageFormat.Mode.Emote : MessageFormat.Mode.Message));
if(isEmote) {
ircMessage = IRCConnectionImpl.EMOTE_START + ircMessage + IRCConnectionImpl.EMOTE_END;
}
if(isNotice) {
target.notice(ircMessage);
} else {
target.message(ircMessage);
}
}
} else {
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
for(IRCChannel channel : connection.getChannels()) {
String format = MessageFormat.getMessageFormat(bot, channel, isEmote);
String ircMessage = MessageFormat.formatMessage(format, channel, sender, message, MessageFormat.Target.IRC, (isEmote ? MessageFormat.Mode.Emote : MessageFormat.Mode.Message));
if(isEmote) {
ircMessage = IRCConnectionImpl.EMOTE_START + ircMessage + IRCConnectionImpl.EMOTE_END;
}
if(!bot.isReadOnly(channel)) {
if(isNotice) {
channel.notice(ircMessage);
} else {
channel.message(ircMessage);
}
}
}
}
}
}
@SubscribeEvent
public void onPlayerDeath(LivingDeathEvent event) {
if(event.entityLiving instanceof EntityPlayer) {
String name = Utils.getNickIRC((EntityPlayer) event.entityLiving);
String ircMessage = event.entityLiving.func_110142_aN().func_151521_b().getUnformattedText();
ircMessage = ircMessage.replaceAll(event.entityLiving.getCommandSenderName(), name);
ircMessage = IRCFormatting.toIRC(ircMessage, !DisplayConfig.enableIRCColors);
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
for(IRCChannel channel : connection.getChannels()) {
if(!bot.isReadOnly(channel) && bot.getBoolean(channel, BotProfile.KEY_RELAYDEATHMESSAGES, false)) {
channel.message(ircMessage);
}
}
}
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerNameFormat(PlayerEvent.NameFormat event) {
event.displayname = Utils.getNickGame(event.entityPlayer);
}
@SubscribeEvent
public void onPlayerLogout(PlayerLoggedOutEvent event) {
String name = Utils.getNickIRC(event.player);
String ircMessage = Utils.getLocalizedMessage("irc.display.mc.partMsg", name);
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
for(IRCChannel channel : connection.getChannels()) {
if(!bot.isReadOnly(channel) && bot.getBoolean(channel, BotProfile.KEY_RELAYMCJOINLEAVE, false)) {
channel.message(ircMessage);
}
}
}
}
public void onPlayerNickChange(String oldNick, String newNick) {
String message = Utils.getLocalizedMessage("irc.display.mc.nickChange", oldNick, newNick);
Utils.addMessageToChat(message);
for(IRCConnection connection : EiraIRC.instance.getConnections()) {
IRCBot bot = connection.getBot();
for(IRCChannel channel : connection.getChannels()) {
if(!bot.isReadOnly(channel)) {
channel.message(message);
}
}
}
}
}
|
package net.darkhax.bookshelf.handler;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import com.google.gson.stream.JsonReader;
import net.darkhax.bookshelf.lib.Constants;
import net.darkhax.bookshelf.lib.util.RenderUtils;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.translation.I18n;
/**
* This class is used to handle my supporters data. This class is not intended for other mod
* authors to access in their code. If you want, you can copy and modify this code to add
* support for your own supporters and rewards.
*/
public class SupporterHandler {
private static final ResourceLocation MISSING_CAPE = new ResourceLocation("bookshelf", "textures/entity/player/missing_cape.png");
private static final ResourceLocation MISSING_ELYTRA = new ResourceLocation("bookshelf", "textures/entity/player/missing_elytra.png");
private static final List<SupporterData> DATA = new ArrayList<SupporterData>();
private static final String SUPPORTER_URL = "https://raw.githubusercontent.com/Darkhax-Minecraft/Bookshelf/master/supporters.json";
public static void readSupporterData () {
try {
// Reads the json file and makes it usable.
final HttpURLConnection connection = (HttpURLConnection) new URL(SUPPORTER_URL).openConnection();
final JsonReader reader = new JsonReader(new InputStreamReader((InputStream) connection.getContent()));
reader.beginObject();
// Starts a loop through all of the player entries.
while (reader.hasNext()) {
// Gets the name of the next player entry and skips it.
reader.nextName();
// final String entryName = reader.nextName();
UUID playerID = null;
String type = null;
boolean confirmed = false;
boolean wantsHead = true;
boolean wantsWawla = false;
String elytraTexture = null;
String capeTexture = null;
// Opens up the player entry object
reader.beginObject();
// Loops through the values in the newly opened object
while (reader.hasNext()) {
final String name = reader.nextName();
if (name.equals("playerID"))
playerID = UUID.fromString(reader.nextString());
else if (name.equals("type"))
type = reader.nextString();
else if (name.equals("confirmed"))
confirmed = reader.nextBoolean();
else if (name.equals("wantHead"))
wantsHead = reader.nextBoolean();
else if (name.equals("wantWawla"))
wantsWawla = reader.nextBoolean();
else if (name.equals("elytraTexture"))
elytraTexture = reader.nextString();
else if (name.equals("capeTexture"))
capeTexture = reader.nextString();
// Skips values we don't care about so they don't break us.
else
reader.skipValue();
}
// Adds the data that was read to the list.
DATA.add(new SupporterData(playerID, type, confirmed, wantsHead, wantsWawla, elytraTexture, capeTexture));
// Ends the current player object, allowing us to read the next player object.
reader.endObject();
}
// Exits out of the json array.
reader.endObject();
// Closes the reader.
reader.close();
}
catch (final MalformedURLException e) {
Constants.LOG.error("Could not access supporter data. " + e.getMessage());
}
catch (final IOException e) {
Constants.LOG.error("Could not access supporter data. " + e.getMessage());
}
}
/**
* Checks if a player is a valid supporter supporter.
*
* @param player The player to check for.
* @return Whether or not the player is a supporter.
*/
public static boolean isSupporter (EntityPlayer player) {
for (final SupporterData supporter : DATA)
if (supporter.getPlayerID().equals(player.getUniqueID()))
return true;
return false;
}
/**
* Attempts to get the supporterData for a player.
*
* @param player The player to get data for.
* @return The supporter data, or null if the player is not a supporter.
*/
public static SupporterData getSupporterData (EntityPlayer player) {
for (final SupporterData supporter : DATA)
if (supporter.getPlayerID().equals(player.getUniqueID()))
return supporter;
return null;
}
/**
* Gets an array containing all entries in the list of supporters.
*
* @return An array of all supporter entries.
*/
public static SupporterData[] getSupporters () {
return DATA.toArray(new SupporterData[DATA.size()]);
}
public static class SupporterData {
/**
* The unique identifier for the supporter player.
*/
private final UUID PLAYER_ID;
/**
* The type of supporter. Currently supports dev, patreon and contributor.
*/
private final String TYPE;
/**
* Whether or not this supporter has completed a payment.
*/
private final boolean CONFIRMED;
/**
* Whether or not this supporter wants their head to show up in the creative tab.
*/
private final boolean WANTS_HEAD;
/**
* Whether or not this supporter wants additional tooltip info when using Wawla on
* them.
*/
private final boolean WANTS_WAWLA;
/**
* The URL for the custom Elytra texture.
*/
private final String ELYTRA_TEXTURE;
/**
* The URL for the cape texture for the supporter.
*/
private final String CAPE_TEXTURE;
protected SupporterData(UUID playerID, String type, boolean confirmed, boolean wantsHead, boolean wantsWawla, String elytraTexture, String capeTexture) {
this.PLAYER_ID = playerID;
this.TYPE = type;
this.CONFIRMED = confirmed;
this.WANTS_HEAD = wantsHead;
this.WANTS_WAWLA = wantsWawla;
this.ELYTRA_TEXTURE = elytraTexture;
this.CAPE_TEXTURE = capeTexture;
}
/**
* Gets the supporter's unique identifier.
*
* @return The supporter's unique identifier.
*/
public UUID getPlayerID () {
return this.PLAYER_ID;
}
/**
* Checks if the supporter has completed a payment.
*
* @return Whether or not the supporter has completed a payment.
*/
public boolean isConfirmed () {
return this.CONFIRMED;
}
/**
* Checks if the supporter wants their head in the skull creative tab.
*
* @return Whether or not the supporter wants their head in the creative tab.
*/
public boolean wantsHead () {
return this.WANTS_HEAD;
}
/**
* Checks if the supporter wants a supporter tooltip when using Wawla.
*
* @return Whether or not the supporter wants the tooltip.
*/
public boolean wantsWawla () {
return this.WANTS_WAWLA;
}
/**
* Gets the URL for the custom Cape texture for the supporter.
*
* @return A url that points to the custom Cape texture that the supporter wants.
*/
public String getCapeTextureURL () {
return this.CAPE_TEXTURE;
}
/**
* Gets the URL for the custom Elytra texture for the supporter.
*
* @return A url that points to the custom Elytra texture that the supporter wants.
*/
public String getElytraTextureURL () {
return this.ELYTRA_TEXTURE;
}
/**
* Gets the custom Cape texture for the supporter.
*
* @return A ResourceLocation that points to the custom Cape texture that the supporter
* wants.
*/
public ResourceLocation getCapeTexture () {
return this.CAPE_TEXTURE != null && !this.CAPE_TEXTURE.isEmpty() ? RenderUtils.downloadResourceLocation(this.CAPE_TEXTURE, new ResourceLocation("bookshelf", "cape/" + this.PLAYER_ID.toString()), MISSING_CAPE, null) : null;
}
/**
* Gets the custom Elytra texture for the supporter.
*
* @return A ResourceLocation that points to the custom Elytra texture that the
* supporter wants.
*/
public ResourceLocation getElytraTexture () {
return this.ELYTRA_TEXTURE != null && !this.ELYTRA_TEXTURE.isEmpty() ? RenderUtils.downloadResourceLocation(this.ELYTRA_TEXTURE, new ResourceLocation("bookshelf", "elytra/" + this.getPlayerID().toString()), MISSING_ELYTRA, null) : null;
}
/**
* Gets the type of supporter. Currently dev, contributor, patreon and other.
*
* @return The type of supporter.
*/
public String getType () {
return this.TYPE;
}
/**
* Gets the localized name, including colors and other formatting.
*
* @return The localized display name for the supporter type.
*/
public String getLocalizedType () {
return I18n.translateToLocal("supporters." + this.TYPE);
}
}
}
|
package net.floodlightcontroller.flowcache;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.INetMapStorage;
import net.floodlightcontroller.core.INetMapTopologyObjects.IFlowEntry;
import net.floodlightcontroller.core.INetMapTopologyObjects.IFlowPath;
import net.floodlightcontroller.core.INetMapTopologyObjects.IPortObject;
import net.floodlightcontroller.core.INetMapTopologyObjects.ISwitchObject;
import net.floodlightcontroller.core.INetMapTopologyService.ITopoRouteService;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.flowcache.web.FlowWebRoutable;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.util.CallerId;
import net.floodlightcontroller.util.DataPath;
import net.floodlightcontroller.util.DataPathEndpoints;
import net.floodlightcontroller.util.Dpid;
import net.floodlightcontroller.util.FlowEntry;
import net.floodlightcontroller.util.FlowEntryAction;
import net.floodlightcontroller.util.FlowEntryId;
import net.floodlightcontroller.util.FlowEntryMatch;
import net.floodlightcontroller.util.FlowEntrySwitchState;
import net.floodlightcontroller.util.FlowEntryUserState;
import net.floodlightcontroller.util.FlowId;
import net.floodlightcontroller.util.FlowPath;
import net.floodlightcontroller.util.IPv4Net;
import net.floodlightcontroller.util.MACAddress;
import net.floodlightcontroller.util.OFMessageDamper;
import net.floodlightcontroller.util.Port;
import net.floodlightcontroller.util.SwitchPort;
import net.onrc.onos.util.GraphDBConnection;
import net.onrc.onos.util.GraphDBConnection.Transaction;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFPort;
import org.openflow.protocol.OFType;
import org.openflow.protocol.action.OFAction;
import org.openflow.protocol.action.OFActionOutput;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FlowManager implements IFloodlightModule, IFlowService, INetMapStorage {
public GraphDBConnection conn;
protected IRestApiService restApi;
protected volatile IFloodlightProviderService floodlightProvider;
protected volatile ITopoRouteService topoRouteService;
protected FloodlightModuleContext context;
protected OFMessageDamper messageDamper;
// TODO: Values copied from elsewhere (class LearningSwitch).
// The local copy should go away!
protected static final int OFMESSAGE_DAMPER_CAPACITY = 50000; // TODO: find sweet spot
protected static final int OFMESSAGE_DAMPER_TIMEOUT = 250;
public static final short FLOWMOD_DEFAULT_IDLE_TIMEOUT = 0; // infinity
public static final short FLOWMOD_DEFAULT_HARD_TIMEOUT = 0; // infinite
public static final short PRIORITY_DEFAULT = 100;
// Flow Entry ID generation state
private static Random randomGenerator = new Random();
private static int nextFlowEntryIdPrefix = 0;
private static int nextFlowEntryIdSuffix = 0;
private static long nextFlowEntryId = 0;
private static long measurementFlowId = 100000;
private static String measurementFlowIdStr = "0x186a0"; // 100000
private long modifiedMeasurementFlowTime = 0;
/** The logger. */
private static Logger log = LoggerFactory.getLogger(FlowManager.class);
// The periodic task(s)
private ScheduledExecutorService mapReaderScheduler;
private ScheduledExecutorService shortestPathReconcileScheduler;
final Runnable mapReader = new Runnable() {
public void run() {
long startTime = System.nanoTime();
int counterAllFlowEntries = 0;
int counterMyNotUpdatedFlowEntries = 0;
if (floodlightProvider == null) {
log.debug("FloodlightProvider service not found!");
return;
}
Map<Long, IOFSwitch> mySwitches =
floodlightProvider.getSwitches();
LinkedList<IFlowEntry> addFlowEntries =
new LinkedList<IFlowEntry>();
LinkedList<IFlowEntry> deleteFlowEntries =
new LinkedList<IFlowEntry>();
// Fetch all Flow Entries and select only my Flow Entries
// that need to be updated into the switches.
boolean processed_measurement_flow = false;
Iterable<IFlowEntry> allFlowEntries =
conn.utils().getAllFlowEntries(conn);
for (IFlowEntry flowEntryObj : allFlowEntries) {
counterAllFlowEntries++;
String switchState = flowEntryObj.getSwitchState();
if ((switchState == null) ||
(! switchState.equals("FE_SWITCH_NOT_UPDATED"))) {
continue; // Ignore the entry: nothing to do
}
String dpidStr = flowEntryObj.getSwitchDpid();
if (dpidStr == null)
continue;
Dpid dpid = new Dpid(dpidStr);
IOFSwitch mySwitch = mySwitches.get(dpid.value());
if (mySwitch == null)
continue; // Ignore the entry: not my switch
IFlowPath flowObj =
conn.utils().getFlowPathByFlowEntry(conn, flowEntryObj);
if (flowObj == null)
continue; // Should NOT happen
if (flowObj.getFlowId() == null)
continue; // Invalid entry
// NOTE: For now we process the DELETE before the ADD
// to cover the more common scenario.
// TODO: This is error prone and needs to be fixed!
String userState = flowEntryObj.getUserState();
if (userState == null)
continue;
if (userState.equals("FE_USER_DELETE")) {
// An entry that needs to be deleted.
deleteFlowEntries.add(flowEntryObj);
installFlowEntry(mySwitch, flowObj, flowEntryObj);
} else {
addFlowEntries.add(flowEntryObj);
}
counterMyNotUpdatedFlowEntries++;
// Code for measurement purpose
// TODO: Commented-out for now
/*
{
if (flowObj.getFlowId().equals(measurementFlowIdStr)) {
processed_measurement_flow = true;
}
}
*/
}
// Process the Flow Entries that need to be added
for (IFlowEntry flowEntryObj : addFlowEntries) {
IFlowPath flowObj =
conn.utils().getFlowPathByFlowEntry(conn,
flowEntryObj);
if (flowObj == null)
continue; // Should NOT happen
if (flowObj.getFlowId() == null)
continue; // Invalid entry
Dpid dpid = new Dpid(flowEntryObj.getSwitchDpid());
IOFSwitch mySwitch = mySwitches.get(dpid.value());
if (mySwitch == null)
continue; // Shouldn't happen
installFlowEntry(mySwitch, flowObj, flowEntryObj);
}
// Delete all Flow Entries marked for deletion from the
// Network MAP.
// TODO: We should use the OpenFlow Barrier mechanism
// to check for errors, and delete the Flow Entries after the
// Barrier message is received.
while (! deleteFlowEntries.isEmpty()) {
IFlowEntry flowEntryObj = deleteFlowEntries.poll();
IFlowPath flowObj =
conn.utils().getFlowPathByFlowEntry(conn, flowEntryObj);
if (flowObj == null) {
log.debug("Did not find FlowPath to be deleted");
continue;
}
flowObj.removeFlowEntry(flowEntryObj);
conn.utils().removeFlowEntry(conn, flowEntryObj);
}
conn.endTx(Transaction.COMMIT);
if (processed_measurement_flow) {
long estimatedTime =
System.nanoTime() - modifiedMeasurementFlowTime;
String logMsg = "MEASUREMENT: Pushed Flow delay: " +
(double)estimatedTime / 1000000000 + " sec";
log.debug(logMsg);
}
long estimatedTime = System.nanoTime() - startTime;
double rate = 0.0;
if (estimatedTime > 0)
rate = ((double)counterAllFlowEntries * 1000000000) / estimatedTime;
String logMsg = "MEASUREMENT: Processed AllFlowEntries: " +
counterAllFlowEntries + " MyNotUpdatedFlowEntries: " +
counterMyNotUpdatedFlowEntries + " in " +
(double)estimatedTime / 1000000000 + " sec: " +
rate + " paths/s";
log.debug(logMsg);
}
};
final Runnable shortestPathReconcile = new Runnable() {
public void run() {
long startTime = System.nanoTime();
int counterAllFlowPaths = 0;
int counterMyFlowPaths = 0;
if (floodlightProvider == null) {
log.debug("FloodlightProvider service not found!");
return;
}
Map<Long, IOFSwitch> mySwitches =
floodlightProvider.getSwitches();
LinkedList<IFlowPath> deleteFlows = new LinkedList<IFlowPath>();
boolean processed_measurement_flow = false;
// Fetch and recompute the Shortest Path for those
// Flow Paths this controller is responsible for.
topoRouteService.prepareShortestPathTopo();
Iterable<IFlowPath> allFlowPaths = conn.utils().getAllFlowPaths(conn);
for (IFlowPath flowPathObj : allFlowPaths) {
counterAllFlowPaths++;
if (flowPathObj == null)
continue;
String srcDpidStr = flowPathObj.getSrcSwitch();
if (srcDpidStr == null)
continue;
Dpid srcDpid = new Dpid(srcDpidStr);
// Use the source DPID as a heuristic to decide
// which controller is responsible for maintaining the
// shortest path.
// NOTE: This heuristic is error-prone: if the switch
// goes away and no controller is responsible for that
// switch, then the original Flow Path is not cleaned-up
IOFSwitch mySwitch = mySwitches.get(srcDpid.value());
if (mySwitch == null)
continue; // Ignore: not my responsibility
// Test the Data Path Summary string
String dataPathSummaryStr = flowPathObj.getDataPathSummary();
if (dataPathSummaryStr == null)
continue; // Could be invalid entry?
if (dataPathSummaryStr.isEmpty())
continue; // No need to maintain this flow
// Test whether we need to complete the Flow cleanup,
// if the Flow has been deleted by the user.
String flowUserState = flowPathObj.getUserState();
if ((flowUserState != null)
&& flowUserState.equals("FE_USER_DELETE")) {
Iterable<IFlowEntry> flowEntries = flowPathObj.getFlowEntries();
boolean empty = true; // TODO: an ugly hack
for (IFlowEntry flowEntryObj : flowEntries) {
empty = false;
break;
}
if (empty)
deleteFlows.add(flowPathObj);
}
// Fetch the fields needed to recompute the shortest path
Short srcPortShort = flowPathObj.getSrcPort();
String dstDpidStr = flowPathObj.getDstSwitch();
Short dstPortShort = flowPathObj.getDstPort();
if ((srcPortShort == null) ||
(dstDpidStr == null) ||
(dstPortShort == null)) {
continue;
}
Port srcPort = new Port(srcPortShort);
Dpid dstDpid = new Dpid(dstDpidStr);
Port dstPort = new Port(dstPortShort);
SwitchPort srcSwitchPort = new SwitchPort(srcDpid, srcPort);
SwitchPort dstSwitchPort = new SwitchPort(dstDpid, dstPort);
counterMyFlowPaths++;
// NOTE: Using here the regular getShortestPath() method
// won't work here, because that method calls internally
// "conn.endTx(Transaction.COMMIT)", and that will
// invalidate all handlers to the Titan database.
// If we want to experiment with calling here
// getShortestPath(), we need to refactor that code
// to avoid closing the transaction.
DataPath dataPath =
topoRouteService.getTopoShortestPath(srcSwitchPort,
dstSwitchPort);
if (dataPath == null) {
// We need the DataPath to compare the paths
dataPath = new DataPath();
dataPath.setSrcPort(srcSwitchPort);
dataPath.setDstPort(dstSwitchPort);
}
String newDataPathSummaryStr = dataPath.dataPathSummary();
if (dataPathSummaryStr.equals(newDataPathSummaryStr))
continue; // Nothing changed
reconcileFlow(flowPathObj, dataPath);
}
// Delete all leftover Flows marked for deletion from the
// Network MAP.
while (! deleteFlows.isEmpty()) {
IFlowPath flowPathObj = deleteFlows.poll();
conn.utils().removeFlowPath(conn, flowPathObj);
}
topoRouteService.dropShortestPathTopo();
conn.endTx(Transaction.COMMIT);
if (processed_measurement_flow) {
long estimatedTime =
System.nanoTime() - modifiedMeasurementFlowTime;
String logMsg = "MEASUREMENT: Pushed Flow delay: " +
(double)estimatedTime / 1000000000 + " sec";
log.debug(logMsg);
}
long estimatedTime = System.nanoTime() - startTime;
double rate = 0.0;
if (estimatedTime > 0)
rate = ((double)counterAllFlowPaths * 1000000000) / estimatedTime;
String logMsg = "MEASUREMENT: Processed AllFlowPaths: " +
counterAllFlowPaths + " MyFlowPaths: " +
counterMyFlowPaths + " in " +
(double)estimatedTime / 1000000000 + " sec: " +
rate + " paths/s";
log.debug(logMsg);
}
};
//final ScheduledFuture<?> mapReaderHandle =
//mapReaderScheduler.scheduleAtFixedRate(mapReader, 3, 3, TimeUnit.SECONDS);
//final ScheduledFuture<?> shortestPathReconcileHandle =
//shortestPathReconcileScheduler.scheduleAtFixedRate(shortestPathReconcile, 3, 3, TimeUnit.SECONDS);
@Override
public void init(String conf) {
conn = GraphDBConnection.getInstance(conf);
}
public void finalize() {
close();
}
@Override
public void close() {
conn.close();
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFlowService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService>
getServiceImpls() {
Map<Class<? extends IFloodlightService>,
IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>,
IFloodlightService>();
m.put(IFlowService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>>
getModuleDependencies() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFloodlightProviderService.class);
l.add(ITopoRouteService.class);
l.add(IRestApiService.class);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
this.context = context;
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
topoRouteService = context.getServiceImpl(ITopoRouteService.class);
restApi = context.getServiceImpl(IRestApiService.class);
messageDamper = new OFMessageDamper(OFMESSAGE_DAMPER_CAPACITY,
EnumSet.of(OFType.FLOW_MOD),
OFMESSAGE_DAMPER_TIMEOUT);
// TODO: An ugly hack!
String conf = "/tmp/cassandra.titan";
this.init(conf);
mapReaderScheduler = Executors.newScheduledThreadPool(1);
shortestPathReconcileScheduler = Executors.newScheduledThreadPool(1);
}
private long getNextFlowEntryId() {
// Generate the next Flow Entry ID.
// NOTE: For now, the higher 32 bits are random, and
// the lower 32 bits are sequential.
// In the future, we need a better allocation mechanism.
if ((nextFlowEntryIdSuffix & 0xffffffffL) == 0xffffffffL) {
nextFlowEntryIdPrefix = randomGenerator.nextInt();
nextFlowEntryIdSuffix = 0;
} else {
nextFlowEntryIdSuffix++;
}
long result = (long)nextFlowEntryIdPrefix << 32;
result = result | (0xffffffffL & nextFlowEntryIdSuffix);
return result;
}
@Override
public void startUp(FloodlightModuleContext context) {
restApi.addRestletRoutable(new FlowWebRoutable());
// Initialize the Flow Entry ID generator
nextFlowEntryIdPrefix = randomGenerator.nextInt();
mapReaderScheduler.scheduleAtFixedRate(
mapReader, 3, 3, TimeUnit.SECONDS);
shortestPathReconcileScheduler.scheduleAtFixedRate(
shortestPathReconcile, 3, 3, TimeUnit.SECONDS);
}
/**
* Add a flow.
*
* Internally, ONOS will automatically register the installer for
* receiving Flow Path Notifications for that path.
*
* @param flowPath the Flow Path to install.
* @param flowId the return-by-reference Flow ID as assigned internally.
* @param dataPathSummaryStr the data path summary string if the added
* flow will be maintained internally, otherwise null.
* @return true on success, otherwise false.
*/
@Override
public boolean addFlow(FlowPath flowPath, FlowId flowId,
String dataPathSummaryStr) {
/*
* TODO: Commented-out for now
if (flowPath.flowId().value() == measurementFlowId) {
modifiedMeasurementFlowTime = System.nanoTime();
}
*/
IFlowPath flowObj = null;
boolean found = false;
try {
if ((flowObj = conn.utils().searchFlowPath(conn, flowPath.flowId()))
!= null) {
log.debug("Adding FlowPath with FlowId {}: found existing FlowPath",
flowPath.flowId().toString());
found = true;
} else {
flowObj = conn.utils().newFlowPath(conn);
log.debug("Adding FlowPath with FlowId {}: creating new FlowPath",
flowPath.flowId().toString());
}
} catch (Exception e) {
// TODO: handle exceptions
conn.endTx(Transaction.ROLLBACK);
log.error(":addFlow FlowId:{} failed",
flowPath.flowId().toString());
}
if (flowObj == null) {
log.error(":addFlow FlowId:{} failed: Flow object not created",
flowPath.flowId().toString());
conn.endTx(Transaction.ROLLBACK);
return false;
}
// Set the Flow key:
// - flowId
flowObj.setFlowId(flowPath.flowId().toString());
flowObj.setType("flow");
// Set the Flow attributes:
// - flowPath.installerId()
// - flowPath.dataPath().srcPort()
// - flowPath.dataPath().dstPort()
// - flowPath.matchEthernetFrameType()
// - flowPath.matchSrcIPv4Net()
// - flowPath.matchDstIPv4Net()
// - flowPath.matchSrcMac()
// - flowPath.matchDstMac()
flowObj.setInstallerId(flowPath.installerId().toString());
flowObj.setSrcSwitch(flowPath.dataPath().srcPort().dpid().toString());
flowObj.setSrcPort(flowPath.dataPath().srcPort().port().value());
flowObj.setDstSwitch(flowPath.dataPath().dstPort().dpid().toString());
flowObj.setDstPort(flowPath.dataPath().dstPort().port().value());
if (flowPath.flowEntryMatch().matchEthernetFrameType()) {
flowObj.setMatchEthernetFrameType(flowPath.flowEntryMatch().ethernetFrameType());
}
if (flowPath.flowEntryMatch().matchSrcIPv4Net()) {
flowObj.setMatchSrcIPv4Net(flowPath.flowEntryMatch().srcIPv4Net().toString());
}
if (flowPath.flowEntryMatch().matchDstIPv4Net()) {
flowObj.setMatchDstIPv4Net(flowPath.flowEntryMatch().dstIPv4Net().toString());
}
if (flowPath.flowEntryMatch().matchSrcMac()) {
flowObj.setMatchSrcMac(flowPath.flowEntryMatch().srcMac().toString());
}
if (flowPath.flowEntryMatch().matchDstMac()) {
flowObj.setMatchDstMac(flowPath.flowEntryMatch().dstMac().toString());
}
if (dataPathSummaryStr != null) {
flowObj.setDataPathSummary(dataPathSummaryStr);
} else {
flowObj.setDataPathSummary("");
}
if (found)
flowObj.setUserState("FE_USER_MODIFY");
else
flowObj.setUserState("FE_USER_ADD");
// Flow edges:
// HeadFE
// Flow Entries:
// flowPath.dataPath().flowEntries()
for (FlowEntry flowEntry : flowPath.dataPath().flowEntries()) {
if (addFlowEntry(flowObj, flowEntry) == null) {
conn.endTx(Transaction.ROLLBACK);
return false;
}
}
conn.endTx(Transaction.COMMIT);
// TODO: We need a proper Flow ID allocation mechanism.
flowId.setValue(flowPath.flowId().value());
return true;
}
/**
* Add a flow entry to the Network MAP.
*
* @param flowObj the corresponding Flow Path object for the Flow Entry.
* @param flowEntry the Flow Entry to install.
* @return the added Flow Entry object on success, otherwise null.
*/
private IFlowEntry addFlowEntry(IFlowPath flowObj, FlowEntry flowEntry) {
// Flow edges
// HeadFE (TODO)
// Assign the FlowEntry ID.
if ((flowEntry.flowEntryId() == null) ||
(flowEntry.flowEntryId().value() == 0)) {
long id = getNextFlowEntryId();
flowEntry.setFlowEntryId(new FlowEntryId(id));
}
IFlowEntry flowEntryObj = null;
boolean found = false;
try {
if ((flowEntryObj =
conn.utils().searchFlowEntry(conn, flowEntry.flowEntryId())) != null) {
log.debug("Adding FlowEntry with FlowEntryId {}: found existing FlowEntry",
flowEntry.flowEntryId().toString());
found = true;
} else {
flowEntryObj = conn.utils().newFlowEntry(conn);
log.debug("Adding FlowEntry with FlowEntryId {}: creating new FlowEntry",
flowEntry.flowEntryId().toString());
}
} catch (Exception e) {
log.error(":addFlow FlowEntryId:{} failed",
flowEntry.flowEntryId().toString());
return null;
}
if (flowEntryObj == null) {
log.error(":addFlow FlowEntryId:{} failed: FlowEntry object not created",
flowEntry.flowEntryId().toString());
return null;
}
// Set the Flow Entry key:
// - flowEntry.flowEntryId()
flowEntryObj.setFlowEntryId(flowEntry.flowEntryId().toString());
flowEntryObj.setType("flow_entry");
// Set the Flow Entry Edges and attributes:
// - Switch edge
// - InPort edge
// - OutPort edge
// - flowEntry.flowEntryMatch()
// - flowEntry.flowEntryActions()
// - flowEntry.dpid()
// - flowEntry.flowEntryUserState()
// - flowEntry.flowEntrySwitchState()
// - flowEntry.flowEntryErrorState()
// - flowEntry.matchInPort()
// - flowEntry.matchEthernetFrameType()
// - flowEntry.matchSrcIPv4Net()
// - flowEntry.matchDstIPv4Net()
// - flowEntry.matchSrcMac()
// - flowEntry.matchDstMac()
// - flowEntry.actionOutput()
ISwitchObject sw =
conn.utils().searchSwitch(conn, flowEntry.dpid().toString());
flowEntryObj.setSwitchDpid(flowEntry.dpid().toString());
flowEntryObj.setSwitch(sw);
if (flowEntry.flowEntryMatch().matchInPort()) {
IPortObject inport =
conn.utils().searchPort(conn, flowEntry.dpid().toString(),
flowEntry.flowEntryMatch().inPort().value());
flowEntryObj.setMatchInPort(flowEntry.flowEntryMatch().inPort().value());
flowEntryObj.setInPort(inport);
}
if (flowEntry.flowEntryMatch().matchEthernetFrameType()) {
flowEntryObj.setMatchEthernetFrameType(flowEntry.flowEntryMatch().ethernetFrameType());
}
if (flowEntry.flowEntryMatch().matchSrcIPv4Net()) {
flowEntryObj.setMatchSrcIPv4Net(flowEntry.flowEntryMatch().srcIPv4Net().toString());
}
if (flowEntry.flowEntryMatch().matchDstIPv4Net()) {
flowEntryObj.setMatchDstIPv4Net(flowEntry.flowEntryMatch().dstIPv4Net().toString());
}
if (flowEntry.flowEntryMatch().matchSrcMac()) {
flowEntryObj.setMatchSrcMac(flowEntry.flowEntryMatch().srcMac().toString());
}
if (flowEntry.flowEntryMatch().matchDstMac()) {
flowEntryObj.setMatchDstMac(flowEntry.flowEntryMatch().dstMac().toString());
}
for (FlowEntryAction fa : flowEntry.flowEntryActions()) {
if (fa.actionOutput() != null) {
IPortObject outport =
conn.utils().searchPort(conn,
flowEntry.dpid().toString(),
fa.actionOutput().port().value());
flowEntryObj.setActionOutput(fa.actionOutput().port().value());
flowEntryObj.setOutPort(outport);
}
}
// TODO: Hacks with hard-coded state names!
if (found)
flowEntryObj.setUserState("FE_USER_MODIFY");
else
flowEntryObj.setUserState("FE_USER_ADD");
flowEntryObj.setSwitchState("FE_SWITCH_NOT_UPDATED");
// TODO: Take care of the FlowEntryErrorState.
// Flow Entries edges:
// Flow
// NextFE (TODO)
if (! found) {
flowObj.addFlowEntry(flowEntryObj);
flowEntryObj.setFlow(flowObj);
}
return flowEntryObj;
}
/**
* Delete a previously added flow.
*
* @param flowId the Flow ID of the flow to delete.
* @return true on success, otherwise false.
*/
@Override
public boolean deleteFlow(FlowId flowId) {
/*
* TODO: Commented-out for now
if (flowId.value() == measurementFlowId) {
modifiedMeasurementFlowTime = System.nanoTime();
}
*/
IFlowPath flowObj = null;
// We just mark the entries for deletion,
// and let the switches remove each individual entry after
// it has been removed from the switches.
try {
if ((flowObj = conn.utils().searchFlowPath(conn, flowId))
!= null) {
log.debug("Deleting FlowPath with FlowId {}: found existing FlowPath",
flowId.toString());
} else {
log.debug("Deleting FlowPath with FlowId {}: FlowPath not found",
flowId.toString());
}
} catch (Exception e) {
// TODO: handle exceptions
conn.endTx(Transaction.ROLLBACK);
log.error(":deleteFlow FlowId:{} failed", flowId.toString());
}
if (flowObj == null) {
conn.endTx(Transaction.COMMIT);
return true; // OK: No such flow
}
// Find and mark for deletion all Flow Entries,
// and the Flow itself.
flowObj.setUserState("FE_USER_DELETE");
Iterable<IFlowEntry> flowEntries = flowObj.getFlowEntries();
boolean empty = true; // TODO: an ugly hack
for (IFlowEntry flowEntryObj : flowEntries) {
empty = false;
// flowObj.removeFlowEntry(flowEntryObj);
// conn.utils().removeFlowEntry(conn, flowEntryObj);
flowEntryObj.setUserState("FE_USER_DELETE");
flowEntryObj.setSwitchState("FE_SWITCH_NOT_UPDATED");
}
// Remove from the database empty flows
if (empty)
conn.utils().removeFlowPath(conn, flowObj);
conn.endTx(Transaction.COMMIT);
return true;
}
/**
* Clear the state for a previously added flow.
*
* @param flowId the Flow ID of the flow to clear.
* @return true on success, otherwise false.
*/
@Override
public boolean clearFlow(FlowId flowId) {
IFlowPath flowObj = null;
try {
if ((flowObj = conn.utils().searchFlowPath(conn, flowId))
!= null) {
log.debug("Clearing FlowPath with FlowId {}: found existing FlowPath",
flowId.toString());
} else {
log.debug("Clearing FlowPath with FlowId {}: FlowPath not found",
flowId.toString());
}
} catch (Exception e) {
// TODO: handle exceptions
conn.endTx(Transaction.ROLLBACK);
log.error(":clearFlow FlowId:{} failed", flowId.toString());
}
if (flowObj == null) {
conn.endTx(Transaction.COMMIT);
return true; // OK: No such flow
}
// Remove all Flow Entries
Iterable<IFlowEntry> flowEntries = flowObj.getFlowEntries();
for (IFlowEntry flowEntryObj : flowEntries) {
flowObj.removeFlowEntry(flowEntryObj);
conn.utils().removeFlowEntry(conn, flowEntryObj);
}
// Remove the Flow itself
conn.utils().removeFlowPath(conn, flowObj);
conn.endTx(Transaction.COMMIT);
return true;
}
/**
* Get a previously added flow.
*
* @param flowId the Flow ID of the flow to get.
* @return the Flow Path if found, otherwise null.
*/
@Override
public FlowPath getFlow(FlowId flowId) {
IFlowPath flowObj = null;
try {
if ((flowObj = conn.utils().searchFlowPath(conn, flowId))
!= null) {
log.debug("Get FlowPath with FlowId {}: found existing FlowPath",
flowId.toString());
} else {
log.debug("Get FlowPath with FlowId {}: FlowPath not found",
flowId.toString());
}
} catch (Exception e) {
// TODO: handle exceptions
conn.endTx(Transaction.ROLLBACK);
log.error(":getFlow FlowId:{} failed", flowId.toString());
}
if (flowObj == null) {
conn.endTx(Transaction.COMMIT);
return null; // Flow not found
}
// Extract the Flow state
FlowPath flowPath = extractFlowPath(flowObj);
conn.endTx(Transaction.COMMIT);
return flowPath;
}
/**
* Get all previously added flows by a specific installer for a given
* data path endpoints.
*
* @param installerId the Caller ID of the installer of the flow to get.
* @param dataPathEndpoints the data path endpoints of the flow to get.
* @return the Flow Paths if found, otherwise null.
*/
@Override
public ArrayList<FlowPath> getAllFlows(CallerId installerId,
DataPathEndpoints dataPathEndpoints) {
// TODO: The implementation below is not optimal:
// We fetch all flows, and then return only the subset that match
// the query conditions.
// We should use the appropriate Titan/Gremlin query to filter-out
// the flows as appropriate.
ArrayList<FlowPath> allFlows = getAllFlows();
ArrayList<FlowPath> flowPaths = new ArrayList<FlowPath>();
if (allFlows == null) {
log.debug("Get FlowPaths for installerId{} and dataPathEndpoints{}: no FlowPaths found", installerId, dataPathEndpoints);
return flowPaths;
}
for (FlowPath flow : allFlows) {
// TODO: String-based comparison is sub-optimal.
// We are using it for now to save us the extra work of
// implementing the "equals()" and "hashCode()" methods.
if (! flow.installerId().toString().equals(installerId.toString()))
continue;
if (! flow.dataPath().srcPort().toString().equals(dataPathEndpoints.srcPort().toString())) {
continue;
}
if (! flow.dataPath().dstPort().toString().equals(dataPathEndpoints.dstPort().toString())) {
continue;
}
flowPaths.add(flow);
}
if (flowPaths.isEmpty()) {
log.debug("Get FlowPaths for installerId{} and dataPathEndpoints{}: no FlowPaths found", installerId, dataPathEndpoints);
} else {
log.debug("Get FlowPaths for installerId{} and dataPathEndpoints{}: FlowPaths are found", installerId, dataPathEndpoints);
}
return flowPaths;
}
/**
* Get all installed flows by all installers for given data path endpoints.
*
* @param dataPathEndpoints the data path endpoints of the flows to get.
* @return the Flow Paths if found, otherwise null.
*/
@Override
public ArrayList<FlowPath> getAllFlows(DataPathEndpoints dataPathEndpoints) {
// TODO: The implementation below is not optimal:
// We fetch all flows, and then return only the subset that match
// the query conditions.
// We should use the appropriate Titan/Gremlin query to filter-out
// the flows as appropriate.
ArrayList<FlowPath> flowPaths = new ArrayList<FlowPath>();
ArrayList<FlowPath> allFlows = getAllFlows();
if (allFlows == null) {
log.debug("Get FlowPaths for dataPathEndpoints{}: no FlowPaths found", dataPathEndpoints);
return flowPaths;
}
for (FlowPath flow : allFlows) {
// TODO: String-based comparison is sub-optimal.
// We are using it for now to save us the extra work of
// implementing the "equals()" and "hashCode()" methods.
if (! flow.dataPath().srcPort().toString().equals(dataPathEndpoints.srcPort().toString())) {
continue;
}
if (! flow.dataPath().dstPort().toString().equals(dataPathEndpoints.dstPort().toString())) {
continue;
}
flowPaths.add(flow);
}
if (flowPaths.isEmpty()) {
log.debug("Get FlowPaths for dataPathEndpoints{}: no FlowPaths found", dataPathEndpoints);
} else {
log.debug("Get FlowPaths for dataPathEndpoints{}: FlowPaths are found", dataPathEndpoints);
}
return flowPaths;
}
/**
* Get summary of all installed flows by all installers in a given range
*
* @param flowId the data path endpoints of the flows to get.
* @param maxFlows: the maximum number of flows to be returned
* @return the Flow Paths if found, otherwise null.
*/
@Override
public ArrayList<IFlowPath> getAllFlowsSummary(FlowId flowId, int maxFlows) {
// TODO: The implementation below is not optimal:
// We fetch all flows, and then return only the subset that match
// the query conditions.
// We should use the appropriate Titan/Gremlin query to filter-out
// the flows as appropriate.
//ArrayList<FlowPath> flowPaths = new ArrayList<FlowPath>();
ArrayList<IFlowPath> flowPathsWithoutFlowEntries = getAllFlowsWithoutFlowEntries();
Collections.sort(flowPathsWithoutFlowEntries,
new Comparator<IFlowPath>(){
@Override
public int compare(IFlowPath first, IFlowPath second) {
// TODO Auto-generated method stub
long result = new FlowId(first.getFlowId()).value()
- new FlowId(second.getFlowId()).value();
if (result > 0) return 1;
else if (result < 0) return -1;
else return 0;
}
}
);
return flowPathsWithoutFlowEntries;
/*
ArrayList<FlowPath> allFlows = getAllFlows();
if (allFlows == null) {
log.debug("Get FlowPathsSummary for {} {}: no FlowPaths found", flowId, maxFlows);
return flowPaths;
}
Collections.sort(allFlows);
for (FlowPath flow : allFlows) {
flow.setFlowEntryMatch(null);
// start from desired flowId
if (flow.flowId().value() < flowId.value()) {
continue;
}
// Summarize by making null flow entry fields that are not relevant to report
for (FlowEntry flowEntry : flow.dataPath().flowEntries()) {
flowEntry.setFlowEntryActions(null);
flowEntry.setFlowEntryMatch(null);
}
flowPaths.add(flow);
if (maxFlows != 0 && flowPaths.size() >= maxFlows) {
break;
}
}
if (flowPaths.isEmpty()) {
log.debug("Get FlowPathsSummary {} {}: no FlowPaths found", flowId, maxFlows);
} else {
log.debug("Get FlowPathsSummary for {} {}: FlowPaths were found", flowId, maxFlows);
}
return flowPaths;
*/
}
/**
* Get all installed flows by all installers.
*
* @return the Flow Paths if found, otherwise null.
*/
@Override
public ArrayList<FlowPath> getAllFlows() {
Iterable<IFlowPath> flowPathsObj = null;
ArrayList<FlowPath> flowPaths = new ArrayList<FlowPath>();
try {
if ((flowPathsObj = conn.utils().getAllFlowPaths(conn)) != null) {
log.debug("Get all FlowPaths: found FlowPaths");
} else {
log.debug("Get all FlowPaths: no FlowPaths found");
}
} catch (Exception e) {
// TODO: handle exceptions
conn.endTx(Transaction.ROLLBACK);
log.error(":getAllFlowPaths failed");
}
if ((flowPathsObj == null) || (flowPathsObj.iterator().hasNext() == false)) {
conn.endTx(Transaction.COMMIT);
return flowPaths; // No Flows found
}
for (IFlowPath flowObj : flowPathsObj) {
// Extract the Flow state
FlowPath flowPath = extractFlowPath(flowObj);
if (flowPath != null)
flowPaths.add(flowPath);
}
conn.endTx(Transaction.COMMIT);
return flowPaths;
}
public ArrayList<IFlowPath> getAllFlowsWithoutFlowEntries(){
Iterable<IFlowPath> flowPathsObj = null;
ArrayList<IFlowPath> flowPathsObjArray = new ArrayList<IFlowPath>();
ArrayList<FlowPath> flowPaths = new ArrayList<FlowPath>();
try {
if ((flowPathsObj = conn.utils().getAllFlowPaths(conn)) != null) {
log.debug("Get all FlowPaths: found FlowPaths");
} else {
log.debug("Get all FlowPaths: no FlowPaths found");
}
} catch (Exception e) {
// TODO: handle exceptions
conn.endTx(Transaction.ROLLBACK);
log.error(":getAllFlowPaths failed");
}
if ((flowPathsObj == null) || (flowPathsObj.iterator().hasNext() == false)) {
return new ArrayList<IFlowPath>(); // No Flows found
}
for (IFlowPath flowObj : flowPathsObj){
flowPathsObjArray.add(flowObj);
}
/*
for (IFlowPath flowObj : flowPathsObj) {
//
// Extract the Flow state
//
FlowPath flowPath = extractFlowPath(flowObj);
if (flowPath != null)
flowPaths.add(flowPath);
}
*/
//conn.endTx(Transaction.COMMIT);
return flowPathsObjArray;
}
/**
* Extract Flow Path State from a Titan Database Object @ref IFlowPath.
*
* @param flowObj the object to extract the Flow Path State from.
* @return the extracted Flow Path State.
*/
private FlowPath extractFlowPath(IFlowPath flowObj) {
// Extract the Flow state
String flowIdStr = flowObj.getFlowId();
String installerIdStr = flowObj.getInstallerId();
String srcSwitchStr = flowObj.getSrcSwitch();
Short srcPortShort = flowObj.getSrcPort();
String dstSwitchStr = flowObj.getDstSwitch();
Short dstPortShort = flowObj.getDstPort();
if ((flowIdStr == null) ||
(installerIdStr == null) ||
(srcSwitchStr == null) ||
(srcPortShort == null) ||
(dstSwitchStr == null) ||
(dstPortShort == null)) {
// TODO: A work-around, becauuse of some bogus database objects
return null;
}
FlowPath flowPath = new FlowPath();
flowPath.setFlowId(new FlowId(flowIdStr));
flowPath.setInstallerId(new CallerId(installerIdStr));
flowPath.dataPath().srcPort().setDpid(new Dpid(srcSwitchStr));
flowPath.dataPath().srcPort().setPort(new Port(srcPortShort));
flowPath.dataPath().dstPort().setDpid(new Dpid(dstSwitchStr));
flowPath.dataPath().dstPort().setPort(new Port(dstPortShort));
// Extract the match conditions common for all Flow Entries
{
FlowEntryMatch match = new FlowEntryMatch();
Short matchEthernetFrameType = flowObj.getMatchEthernetFrameType();
if (matchEthernetFrameType != null)
match.enableEthernetFrameType(matchEthernetFrameType);
String matchSrcIPv4Net = flowObj.getMatchSrcIPv4Net();
if (matchSrcIPv4Net != null)
match.enableSrcIPv4Net(new IPv4Net(matchSrcIPv4Net));
String matchDstIPv4Net = flowObj.getMatchDstIPv4Net();
if (matchDstIPv4Net != null)
match.enableDstIPv4Net(new IPv4Net(matchDstIPv4Net));
String matchSrcMac = flowObj.getMatchSrcMac();
if (matchSrcMac != null)
match.enableSrcMac(MACAddress.valueOf(matchSrcMac));
String matchDstMac = flowObj.getMatchDstMac();
if (matchDstMac != null)
match.enableDstMac(MACAddress.valueOf(matchDstMac));
flowPath.setFlowEntryMatch(match);
}
// Extract all Flow Entries
Iterable<IFlowEntry> flowEntries = flowObj.getFlowEntries();
for (IFlowEntry flowEntryObj : flowEntries) {
FlowEntry flowEntry = extractFlowEntry(flowEntryObj);
if (flowEntry == null)
continue;
flowPath.dataPath().flowEntries().add(flowEntry);
}
return flowPath;
}
/**
* Extract Flow Entry State from a Titan Database Object @ref IFlowEntry.
*
* @param flowEntryObj the object to extract the Flow Entry State from.
* @return the extracted Flow Entry State.
*/
private FlowEntry extractFlowEntry(IFlowEntry flowEntryObj) {
String flowEntryIdStr = flowEntryObj.getFlowEntryId();
String switchDpidStr = flowEntryObj.getSwitchDpid();
String userState = flowEntryObj.getUserState();
String switchState = flowEntryObj.getSwitchState();
if ((flowEntryIdStr == null) ||
(switchDpidStr == null) ||
(userState == null) ||
(switchState == null)) {
// TODO: A work-around, becauuse of some bogus database objects
return null;
}
FlowEntry flowEntry = new FlowEntry();
flowEntry.setFlowEntryId(new FlowEntryId(flowEntryIdStr));
flowEntry.setDpid(new Dpid(switchDpidStr));
// Extract the match conditions
FlowEntryMatch match = new FlowEntryMatch();
Short matchInPort = flowEntryObj.getMatchInPort();
if (matchInPort != null)
match.enableInPort(new Port(matchInPort));
Short matchEthernetFrameType = flowEntryObj.getMatchEthernetFrameType();
if (matchEthernetFrameType != null)
match.enableEthernetFrameType(matchEthernetFrameType);
String matchSrcIPv4Net = flowEntryObj.getMatchSrcIPv4Net();
if (matchSrcIPv4Net != null)
match.enableSrcIPv4Net(new IPv4Net(matchSrcIPv4Net));
String matchDstIPv4Net = flowEntryObj.getMatchDstIPv4Net();
if (matchDstIPv4Net != null)
match.enableDstIPv4Net(new IPv4Net(matchDstIPv4Net));
String matchSrcMac = flowEntryObj.getMatchSrcMac();
if (matchSrcMac != null)
match.enableSrcMac(MACAddress.valueOf(matchSrcMac));
String matchDstMac = flowEntryObj.getMatchDstMac();
if (matchDstMac != null)
match.enableDstMac(MACAddress.valueOf(matchDstMac));
flowEntry.setFlowEntryMatch(match);
// Extract the actions
ArrayList<FlowEntryAction> actions = new ArrayList<FlowEntryAction>();
Short actionOutputPort = flowEntryObj.getActionOutput();
if (actionOutputPort != null) {
FlowEntryAction action = new FlowEntryAction();
action.setActionOutput(new Port(actionOutputPort));
actions.add(action);
}
flowEntry.setFlowEntryActions(actions);
flowEntry.setFlowEntryUserState(FlowEntryUserState.valueOf(userState));
flowEntry.setFlowEntrySwitchState(FlowEntrySwitchState.valueOf(switchState));
// TODO: Take care of the FlowEntryMatch, FlowEntryAction set,
// and FlowEntryErrorState.
return flowEntry;
}
/**
* Add and maintain a shortest-path flow.
*
* NOTE: The Flow Path argument does NOT contain flow entries.
*
* @param flowPath the Flow Path with the endpoints and the match
* conditions to install.
* @return the added shortest-path flow on success, otherwise null.
*/
@Override
public FlowPath addAndMaintainShortestPathFlow(FlowPath flowPath) {
// Don't do the shortest path computation here.
// Instead, let the Flow reconciliation thread take care of it.
// We need the DataPath to populate the Network MAP
DataPath dataPath = new DataPath();
dataPath.setSrcPort(flowPath.dataPath().srcPort());
dataPath.setDstPort(flowPath.dataPath().dstPort());
// Prepare the computed Flow Path
FlowPath computedFlowPath = new FlowPath();
computedFlowPath.setFlowId(new FlowId(flowPath.flowId().value()));
computedFlowPath.setInstallerId(new CallerId(flowPath.installerId().value()));
computedFlowPath.setDataPath(dataPath);
computedFlowPath.setFlowEntryMatch(new FlowEntryMatch(flowPath.flowEntryMatch()));
FlowId flowId = new FlowId();
String dataPathSummaryStr = dataPath.dataPathSummary();
if (! addFlow(computedFlowPath, flowId, dataPathSummaryStr))
return null;
// TODO: Mark the flow for maintenance purpose
return (computedFlowPath);
}
/**
* Reconcile a flow.
*
* @param flowObj the flow that needs to be reconciliated.
* @param newDataPath the new data path to use.
* @return true on success, otherwise false.
*/
public boolean reconcileFlow(IFlowPath flowObj, DataPath newDataPath) {
Map<Long, IOFSwitch> mySwitches = floodlightProvider.getSwitches();
// Set the incoming port matching and the outgoing port output
// actions for each flow entry.
for (FlowEntry flowEntry : newDataPath.flowEntries()) {
// Set the incoming port matching
FlowEntryMatch flowEntryMatch = new FlowEntryMatch();
flowEntry.setFlowEntryMatch(flowEntryMatch);
flowEntryMatch.enableInPort(flowEntry.inPort());
// Set the outgoing port output action
ArrayList<FlowEntryAction> flowEntryActions = flowEntry.flowEntryActions();
if (flowEntryActions == null) {
flowEntryActions = new ArrayList<FlowEntryAction>();
flowEntry.setFlowEntryActions(flowEntryActions);
}
FlowEntryAction flowEntryAction = new FlowEntryAction();
flowEntryAction.setActionOutput(flowEntry.outPort());
flowEntryActions.add(flowEntryAction);
}
// Remove the old Flow Entries, and add the new Flow Entries
Iterable<IFlowEntry> flowEntries = flowObj.getFlowEntries();
LinkedList<IFlowEntry> deleteFlowEntries = new LinkedList<IFlowEntry>();
for (IFlowEntry flowEntryObj : flowEntries) {
flowEntryObj.setUserState("FE_USER_DELETE");
flowEntryObj.setSwitchState("FE_SWITCH_NOT_UPDATED");
}
for (FlowEntry flowEntry : newDataPath.flowEntries()) {
addFlowEntry(flowObj, flowEntry);
}
// Set the Data Path Summary
String dataPathSummaryStr = newDataPath.dataPathSummary();
flowObj.setDataPathSummary(dataPathSummaryStr);
return true;
}
/**
* Reconcile all flows in a set.
*
* @param flowObjSet the set of flows that need to be reconciliated.
*/
public void reconcileFlows(Iterable<IFlowPath> flowObjSet) {
if (! flowObjSet.iterator().hasNext())
return;
// TODO: Not implemented/used yet.
}
/**
* Install a Flow Entry on a switch.
*
* @param mySwitch the switch to install the Flow Entry into.
* @param flowObj the flow path object for the flow entry to install.
* @param flowEntryObj the flow entry object to install.
* @return true on success, otherwise false.
*/
public boolean installFlowEntry(IOFSwitch mySwitch, IFlowPath flowObj,
IFlowEntry flowEntryObj) {
String flowEntryIdStr = flowEntryObj.getFlowEntryId();
if (flowEntryIdStr == null)
return false;
FlowEntryId flowEntryId = new FlowEntryId(flowEntryIdStr);
String userState = flowEntryObj.getUserState();
if (userState == null)
return false;
// Create the Open Flow Flow Modification Entry to push
OFFlowMod fm = (OFFlowMod) floodlightProvider.getOFMessageFactory()
.getMessage(OFType.FLOW_MOD);
long cookie = flowEntryId.value();
short flowModCommand = OFFlowMod.OFPFC_ADD;
if (userState.equals("FE_USER_ADD")) {
flowModCommand = OFFlowMod.OFPFC_ADD;
} else if (userState.equals("FE_USER_MODIFY")) {
flowModCommand = OFFlowMod.OFPFC_MODIFY_STRICT;
} else if (userState.equals("FE_USER_DELETE")) {
flowModCommand = OFFlowMod.OFPFC_DELETE_STRICT;
} else {
// Unknown user state. Ignore the entry
log.debug("Flow Entry ignored (FlowEntryId = {}): unknown user state {}",
flowEntryId.toString(), userState);
return false;
}
// Fetch the match conditions.
// NOTE: The Flow matching conditions common for all Flow Entries are
// used ONLY if a Flow Entry does NOT have the corresponding matching
// condition set.
OFMatch match = new OFMatch();
match.setWildcards(OFMatch.OFPFW_ALL);
// Match the Incoming Port
Short matchInPort = flowEntryObj.getMatchInPort();
if (matchInPort != null) {
match.setInputPort(matchInPort);
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_IN_PORT);
}
// Match the Ethernet Frame Type
Short matchEthernetFrameType = flowEntryObj.getMatchEthernetFrameType();
if (matchEthernetFrameType == null)
matchEthernetFrameType = flowObj.getMatchEthernetFrameType();
if (matchEthernetFrameType != null) {
match.setDataLayerType(matchEthernetFrameType);
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_DL_TYPE);
}
// Match the Source IPv4 Network prefix
String matchSrcIPv4Net = flowEntryObj.getMatchSrcIPv4Net();
if (matchSrcIPv4Net == null)
matchSrcIPv4Net = flowObj.getMatchSrcIPv4Net();
if (matchSrcIPv4Net != null) {
match.setFromCIDR(matchSrcIPv4Net, OFMatch.STR_NW_SRC);
}
// Natch the Destination IPv4 Network prefix
String matchDstIPv4Net = flowEntryObj.getMatchDstIPv4Net();
if (matchDstIPv4Net == null)
matchDstIPv4Net = flowObj.getMatchDstIPv4Net();
if (matchDstIPv4Net != null) {
match.setFromCIDR(matchDstIPv4Net, OFMatch.STR_NW_DST);
}
// Match the Source MAC address
String matchSrcMac = flowEntryObj.getMatchSrcMac();
if (matchSrcMac == null)
matchSrcMac = flowObj.getMatchSrcMac();
if (matchSrcMac != null) {
match.setDataLayerSource(matchSrcMac);
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_DL_SRC);
}
// Match the Destination MAC address
String matchDstMac = flowEntryObj.getMatchDstMac();
if (matchDstMac == null)
matchDstMac = flowObj.getMatchDstMac();
if (matchDstMac != null) {
match.setDataLayerDestination(matchDstMac);
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_DL_DST);
}
// Fetch the actions
// TODO: For now we support only the "OUTPUT" actions.
List<OFAction> actions = new ArrayList<OFAction>();
Short actionOutputPort = flowEntryObj.getActionOutput();
if (actionOutputPort != null) {
OFActionOutput action = new OFActionOutput();
// XXX: The max length is hard-coded for now
action.setMaxLength((short)0xffff);
action.setPort(actionOutputPort);
actions.add(action);
}
fm.setIdleTimeout(FLOWMOD_DEFAULT_IDLE_TIMEOUT)
.setHardTimeout(FLOWMOD_DEFAULT_HARD_TIMEOUT)
.setPriority(PRIORITY_DEFAULT)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setCookie(cookie)
.setCommand(flowModCommand)
.setMatch(match)
.setActions(actions)
.setLengthU(OFFlowMod.MINIMUM_LENGTH+OFActionOutput.MINIMUM_LENGTH);
fm.setOutPort(OFPort.OFPP_NONE.getValue());
if ((flowModCommand == OFFlowMod.OFPFC_DELETE) ||
(flowModCommand == OFFlowMod.OFPFC_DELETE_STRICT)) {
if (actionOutputPort != null)
fm.setOutPort(actionOutputPort);
}
// TODO: Set the following flag
// fm.setFlags(OFFlowMod.OFPFF_SEND_FLOW_REM);
// See method ForwardingBase::pushRoute()
// Write the message to the switch
log.debug("MEASUREMENT: Installing flow entry " + userState +
" into switch DPID: " +
mySwitch.getStringId() +
" flowEntryId: " + flowEntryId.toString() +
" srcMac: " + matchSrcMac + " dstMac: " + matchDstMac +
" inPort: " + matchInPort + " outPort: " + actionOutputPort
);
try {
messageDamper.write(mySwitch, fm, null);
mySwitch.flush();
// TODO: We should use the OpenFlow Barrier mechanism
// to check for errors, and update the SwitchState
// for a flow entry after the Barrier message is
// is received.
flowEntryObj.setSwitchState("FE_SWITCH_UPDATED");
} catch (IOException e) {
log.error("Failure writing flow mod from network map", e);
return false;
}
return true;
}
/**
* Install a Flow Entry on a switch.
*
* @param mySwitch the switch to install the Flow Entry into.
* @param flowPath the flow path for the flow entry to install.
* @param flowEntry the flow entry to install.
* @return true on success, otherwise false.
*/
public boolean installFlowEntry(IOFSwitch mySwitch, FlowPath flowPath,
FlowEntry flowEntry) {
// Create the OpenFlow Flow Modification Entry to push
OFFlowMod fm = (OFFlowMod) floodlightProvider.getOFMessageFactory()
.getMessage(OFType.FLOW_MOD);
long cookie = flowEntry.flowEntryId().value();
short flowModCommand = OFFlowMod.OFPFC_ADD;
if (flowEntry.flowEntryUserState() == FlowEntryUserState.FE_USER_ADD) {
flowModCommand = OFFlowMod.OFPFC_ADD;
} else if (flowEntry.flowEntryUserState() == FlowEntryUserState.FE_USER_MODIFY) {
flowModCommand = OFFlowMod.OFPFC_MODIFY_STRICT;
} else if (flowEntry.flowEntryUserState() == FlowEntryUserState.FE_USER_DELETE) {
flowModCommand = OFFlowMod.OFPFC_DELETE_STRICT;
} else {
// Unknown user state. Ignore the entry
log.debug("Flow Entry ignored (FlowEntryId = {}): unknown user state {}",
flowEntry.flowEntryId().toString(),
flowEntry.flowEntryUserState());
return false;
}
// Fetch the match conditions.
// NOTE: The Flow matching conditions common for all Flow Entries are
// used ONLY if a Flow Entry does NOT have the corresponding matching
// condition set.
OFMatch match = new OFMatch();
match.setWildcards(OFMatch.OFPFW_ALL);
FlowEntryMatch flowPathMatch = flowPath.flowEntryMatch();
FlowEntryMatch flowEntryMatch = flowEntry.flowEntryMatch();
// Match the Incoming Port
Port matchInPort = flowEntryMatch.inPort();
if (matchInPort != null) {
match.setInputPort(matchInPort.value());
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_IN_PORT);
}
// Match the Ethernet Frame Type
Short matchEthernetFrameType = flowEntryMatch.ethernetFrameType();
if ((matchEthernetFrameType == null) && (flowPathMatch != null)) {
matchEthernetFrameType = flowPathMatch.ethernetFrameType();
}
if (matchEthernetFrameType != null) {
match.setDataLayerType(matchEthernetFrameType);
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_DL_TYPE);
}
// Match the Source IPv4 Network prefix
IPv4Net matchSrcIPv4Net = flowEntryMatch.srcIPv4Net();
if ((matchSrcIPv4Net == null) && (flowPathMatch != null)) {
matchSrcIPv4Net = flowPathMatch.srcIPv4Net();
}
if (matchSrcIPv4Net != null) {
match.setFromCIDR(matchSrcIPv4Net.toString(), OFMatch.STR_NW_SRC);
}
// Natch the Destination IPv4 Network prefix
IPv4Net matchDstIPv4Net = flowEntryMatch.dstIPv4Net();
if ((matchDstIPv4Net == null) && (flowPathMatch != null)) {
matchDstIPv4Net = flowPathMatch.dstIPv4Net();
}
if (matchDstIPv4Net != null) {
match.setFromCIDR(matchDstIPv4Net.toString(), OFMatch.STR_NW_DST);
}
// Match the Source MAC address
MACAddress matchSrcMac = flowEntryMatch.srcMac();
if ((matchSrcMac == null) && (flowPathMatch != null)) {
matchSrcMac = flowPathMatch.srcMac();
}
if (matchSrcMac != null) {
match.setDataLayerSource(matchSrcMac.toString());
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_DL_SRC);
}
// Match the Destination MAC address
MACAddress matchDstMac = flowEntryMatch.dstMac();
if ((matchDstMac == null) && (flowPathMatch != null)) {
matchDstMac = flowPathMatch.dstMac();
}
if (matchDstMac != null) {
match.setDataLayerDestination(matchDstMac.toString());
match.setWildcards(match.getWildcards() & ~OFMatch.OFPFW_DL_DST);
}
// Fetch the actions
// TODO: For now we support only the "OUTPUT" actions.
fm.setOutPort(OFPort.OFPP_NONE.getValue());
List<OFAction> actions = new ArrayList<OFAction>();
ArrayList<FlowEntryAction> flowEntryActions =
flowEntry.flowEntryActions();
for (FlowEntryAction flowEntryAction : flowEntryActions) {
FlowEntryAction.ActionOutput actionOutput =
flowEntryAction.actionOutput();
if (actionOutput != null) {
short actionOutputPort = actionOutput.port().value();
OFActionOutput action = new OFActionOutput();
// XXX: The max length is hard-coded for now
action.setMaxLength((short)0xffff);
action.setPort(actionOutputPort);
actions.add(action);
if ((flowModCommand == OFFlowMod.OFPFC_DELETE) ||
(flowModCommand == OFFlowMod.OFPFC_DELETE_STRICT)) {
fm.setOutPort(actionOutputPort);
}
}
}
fm.setIdleTimeout(FLOWMOD_DEFAULT_IDLE_TIMEOUT)
.setHardTimeout(FLOWMOD_DEFAULT_HARD_TIMEOUT)
.setPriority(PRIORITY_DEFAULT)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setCookie(cookie)
.setCommand(flowModCommand)
.setMatch(match)
.setActions(actions)
.setLengthU(OFFlowMod.MINIMUM_LENGTH+OFActionOutput.MINIMUM_LENGTH);
// TODO: Set the following flag
// fm.setFlags(OFFlowMod.OFPFF_SEND_FLOW_REM);
// See method ForwardingBase::pushRoute()
// Write the message to the switch
try {
messageDamper.write(mySwitch, fm, null);
mySwitch.flush();
// TODO: We should use the OpenFlow Barrier mechanism
// to check for errors, and update the SwitchState
// for a flow entry after the Barrier message is
// is received.
// TODO: The FlowEntry Object in Titan should be set
// to FE_SWITCH_UPDATED.
} catch (IOException e) {
log.error("Failure writing flow mod from network map", e);
return false;
}
return true;
}
/**
* Remove a Flow Entry from a switch.
*
* @param mySwitch the switch to remove the Flow Entry from.
* @param flowPath the flow path for the flow entry to remove.
* @param flowEntry the flow entry to remove.
* @return true on success, otherwise false.
*/
public boolean removeFlowEntry(IOFSwitch mySwitch, FlowPath flowPath,
FlowEntry flowEntry) {
// The installFlowEntry() method implements both installation
// and removal of flow entries.
return (installFlowEntry(mySwitch, flowPath, flowEntry));
}
/**
* Install a Flow Entry on a remote controller.
*
* TODO: We need it now: Jono
* - For now it will make a REST call to the remote controller.
* - Internally, it needs to know the name of the remote controller.
*
* @param flowPath the flow path for the flow entry to install.
* @param flowEntry the flow entry to install.
* @return true on success, otherwise false.
*/
public boolean installRemoteFlowEntry(FlowPath flowPath,
FlowEntry flowEntry) {
// TODO: We need it now: Jono
// - For now it will make a REST call to the remote controller.
// - Internally, it needs to know the name of the remote controller.
return true;
}
/**
* Remove a flow entry on a remote controller.
*
* @param flowPath the flow path for the flow entry to remove.
* @param flowEntry the flow entry to remove.
* @return true on success, otherwise false.
*/
public boolean removeRemoteFlowEntry(FlowPath flowPath,
FlowEntry flowEntry) {
// The installRemoteFlowEntry() method implements both installation
// and removal of flow entries.
return (installRemoteFlowEntry(flowPath, flowEntry));
}
}
|
package net.pilif0.open_desert.events;
import com.sun.istack.internal.NotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* An event multiplexer that distributes an event to a list of listeners.
* If the event is ever consumed, the distribution stops.
*
* @author Filip Smola
* @version 1.0
*/
public class EventMultiplexer<T extends Event> implements EventListener<T> {
/** The listeners to distribute to */
private List<EventListener<T>> listeners = new ArrayList<>();
@Override
public void handle(T event) {
//Distribute the event to every listener (sequentially)
listeners.forEach(l -> l.handle(event));
}
/**
* Adds the listener to the multiplexer list
*
* @param l The listener to add
*/
public void register(@NotNull EventListener<T> l){
listeners.add(l);
}
/**
* Adds the listeners to the multiplexer list
*
* @param ls The listeners to add
*/
public void register(@NotNull EventListener<T>... ls){
listeners.addAll(Arrays.asList(ls));
}
/**
* Removes the listener from the multiplexer list
*
* @param l The listener to remove
*/
public void remove(@NotNull EventListener<T> l){
listeners.remove(l);
}
/**
* Removes the listeners from the multiplexer list
*
* @param ls The listeners to remove
*/
public void remove(@NotNull EventListener<T>... ls){
listeners.removeAll(Arrays.asList(ls));
}
}
|
package nl.meine.scouting.solparser.writer;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Iterator;
import java.util.List;
import nl.meine.scouting.solparser.entities.Person;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Font;
import org.apache.poi.ss.usermodel.IndexedColors;
import org.apache.poi.ss.usermodel.PatternFormatting;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.util.CellRangeAddress;
/**
*
* @author Meine Toonen
*/
public class ExcelWriter extends SolWriter{
private CellStyle headingStyle;
private CellStyle normalStyle;
private CellStyle zebraStyle;
private FileOutputStream out = null;
private Workbook workbook;
private static final int NUM_ATTRIBUTES_PER_PERSON = 18;
private final short COLOR_UPDATED = IndexedColors.YELLOW.index;
private final short COLOR_NEW = IndexedColors.LIGHT_BLUE.index;
private final short COLOR_OVERVLIEGER = IndexedColors.GREEN.index;
private File previous;
private final static int NUM_LIDNUMMER_CELL = 0;
public ExcelWriter( ){
}
@Override
public void init() {
try {
out = new FileOutputStream(output);
// create a new workbook
workbook = new HSSFWorkbook();
createStyles();
previous = new File("data" + File.separator + "previous.xls");
} catch (FileNotFoundException ex) {
System.err.println("File Read Error" + ex.getLocalizedMessage());
}
}
@Override
public void write() {
// Make sheet for all the persons
/* Sheet all = workbook.createSheet("Allemaal");
createHeading(all);
for (int i = 0; i < allPersons.size(); i++) {
Person person = allPersons.get(i);
createRow(person, all, i);
}
postProcessSheet(all);*/
// create a new sheet
for (String sortKey : sortedPersons.keySet()) {
Sheet sheet = workbook.createSheet(sortKey);
List<Person> personsPerEenheid = sortedPersons.get(sortKey);
createHeading(sheet);
for (int i = 0; i < personsPerEenheid.size(); i++) {
Person person = personsPerEenheid.get(i);
Row r = createRow(person, sheet, i);
}
postProcessSheet(sheet);
}
}
private void postProcessSheet(Sheet sheet) {
// Set the with to auto
int numcells = sheet.getRow(0).getLastCellNum();
for (int i = 0; i < numcells; i++) {
sheet.autoSizeColumn(i);
}
processUpdates(sheet);
}
private Row createRow(Person p, Sheet sheet, int index) {
// Skip the heading
index++;
Row r = sheet.createRow(index);
Cell[] cells = new Cell[NUM_ATTRIBUTES_PER_PERSON];
for (int i = 0; i < NUM_ATTRIBUTES_PER_PERSON; i++) {
Cell c = r.createCell(i);
cells[i] = c;
if (index % 2 == 0) {
c.setCellStyle(zebraStyle);
} else {
c.setCellStyle(normalStyle);
}
}
// Lidnummer Achternaam tussenvoegsel Roepnaam Voorletters geslacht Adres Huisnummer Postcode Woonplaats
cells[0].setCellValue(p.getLidnummer());
cells[1].setCellValue(p.getLid_achternaam());
cells[2].setCellValue(p.getLid_tussenvoegsel());
cells[3].setCellValue(p.getLid_voornaam());
cells[4].setCellValue(p.getLid_initialen());
cells[5].setCellValue(p.getLid_geslacht());
cells[6].setCellValue(p.getLid_straat());
cells[7].setCellValue(p.getLid_huisnummer() + " " + p.getLid_toevoegsel_huisnr());
cells[8].setCellValue(p.getLid_postcode());
cells[9].setCellValue(p.getLid_plaats());
//telefoon mobiel lid mobiel ouders
//mobiel vader mobiel moeder // niet aanwezig!
//e-mail lid e-mail ouders (Jeugd)lid bij Speltak
//Leiding bij Speltak // Volgt uit tabbladen verdeling
//Functie Geboortedatum Lid sinds
cells[10].setCellValue(p.getLid_telefoon());
cells[11].setCellValue(p.getLid_mobiel());
cells[12].setCellValue(p.getLid_mailadres());
cells[13].setCellValue(p.getLid_mailadres_ouder_verzorger());
cells[14].setCellValue(p.getSpeleenheid());
cells[15].setCellValue(p.getFunctie());
cells[16].setCellValue(p.getLid_geboortedatum());
cells[17].setCellValue(p.getFunctie_startdatum());
return r;
}
private void createHeading(Sheet sheet) {
Row r = sheet.createRow(0);
r.createCell(0).setCellValue("Lidnummer");
r.createCell(1).setCellValue("Achternaam");
r.createCell(2).setCellValue("Tussenvoegsel");
r.createCell(3).setCellValue("Voornaam");
r.createCell(4).setCellValue("Initialen");
r.createCell(5).setCellValue("Geslacht");
r.createCell(6).setCellValue("Straat");
r.createCell(7).setCellValue("Adres");
r.createCell(8).setCellValue("Postcode");
r.createCell(9).setCellValue("Plaats");
r.createCell(10).setCellValue("Telefoonnummer");
r.createCell(11).setCellValue("Mobiel");
r.createCell(12).setCellValue("Mail lid");
r.createCell(13).setCellValue("Mail ouder/verzorger");
r.createCell(14).setCellValue("Speltak");
r.createCell(15).setCellValue("Functie");
r.createCell(16).setCellValue("Geboortedatum");
r.createCell(17).setCellValue("Functie startdatm");
Iterator<Cell> it = r.cellIterator();
while (it.hasNext()) {
Cell c = it.next();
c.setCellStyle(headingStyle);
}
sheet.setAutoFilter(new CellRangeAddress(0, 0, 0, 17));
}
private void createStyles() {
headingStyle = workbook.createCellStyle();
Font f = workbook.createFont();
//set font 1 to 12 point type
f.setFontHeightInPoints((short) 12);
// make it bold
//arial is the default font
f.setBoldweight(Font.BOLDWEIGHT_BOLD);
headingStyle.setFont(f);
//set a thin border
headingStyle.setBorderBottom(CellStyle.BORDER_MEDIUM);
headingStyle.setBorderLeft(CellStyle.BORDER_MEDIUM);
headingStyle.setBorderTop(CellStyle.BORDER_MEDIUM);
headingStyle.setBorderRight(CellStyle.BORDER_MEDIUM);
normalStyle = workbook.createCellStyle();
normalStyle.setBorderBottom(CellStyle.BORDER_THIN);
normalStyle.setBorderLeft(CellStyle.BORDER_THIN);
normalStyle.setBorderTop(CellStyle.BORDER_THIN);
normalStyle.setBorderRight(CellStyle.BORDER_THIN);
Font f2 = workbook.createFont();
normalStyle.setFont(f2);
zebraStyle = workbook.createCellStyle();
zebraStyle.cloneStyleFrom(normalStyle);
zebraStyle.setFillForegroundColor(IndexedColors.DARK_YELLOW.index);
zebraStyle.setFillPattern(PatternFormatting.SOLID_FOREGROUND);
}
@Override
public void finalize() throws Throwable {
super.finalize();
try {
workbook.write(out);
} catch (IOException ex) {
System.err.println("File write Error" + ex.getLocalizedMessage());
} finally {
if (out != null) {
try {
out.close();
} catch (IOException ex) {
System.err.println("File close Error" + ex.getLocalizedMessage());
}
}
}
replacePrevious(output);
}
private boolean hasPrevious(){
return previous.exists();
}
private void processUpdates(Sheet sheet){
if(hasPrevious()){
FileInputStream previousStream = null;
try {
previousStream = new FileInputStream(previous);
//Get the workbook instance for XLS file
HSSFWorkbook prevWorkbook = new HSSFWorkbook(previousStream);
Sheet prevSheet = prevWorkbook.getSheet("Allemaal");
for (Iterator<Row> it = sheet.rowIterator(); it.hasNext();) {
Row row = it.next();
if( row.getRowNum() > 0){
String lidnummer = row.getCell(NUM_LIDNUMMER_CELL).getStringCellValue();
Row previousRow = getPreviousLidRow(lidnummer, prevSheet);
processPersonUpdates(row, previousRow);
}
}
} catch (FileNotFoundException ex) {
System.err.println("Could not locate file: "+ ex.getLocalizedMessage());
} catch (IOException ex) {
System.err.println("Problems reading file: "+ ex.getLocalizedMessage());
} finally {
try {
if(previousStream != null){
previousStream.close();
}
} catch (IOException ex) {
System.err.println("Problems closing file: "+ ex.getLocalizedMessage());
}
}
}
}
private Row getPreviousLidRow(String lidnummer, Sheet sheet){
for (Iterator<Row> it = sheet.rowIterator(); it.hasNext();) {
Row row = it.next();
String oldLidnummer = row.getCell(NUM_LIDNUMMER_CELL).getStringCellValue();
if(lidnummer.equals(oldLidnummer)){
return row;
}
}
return null;
}
private void processPersonUpdates( Row newRow, Row oldRow){
for (Iterator<Cell> it = newRow.cellIterator(); it.hasNext();) {
Cell newCell = it.next();
if(oldRow == null){
updateCellColor(newCell, COLOR_NEW);
}else{
int colIndex = newCell.getColumnIndex();
Cell oldCell = oldRow.getCell(colIndex);
String newValue = newCell.getStringCellValue();
String oldValue = oldCell.getStringCellValue();
if(!newValue.equals(oldValue)){
updateCellColor(newCell, COLOR_UPDATED);
}
}
}
}
private void updateCellColor(Cell cell, short color){
CellStyle style = workbook.createCellStyle();;
style.cloneStyleFrom(cell.getCellStyle());
style.setFillForegroundColor(color);
style.setFillPattern(PatternFormatting.SOLID_FOREGROUND);
cell.setCellStyle(style);
}
private void replacePrevious(File source){
InputStream in = null;
try {
File dataDir = new File("data");
if(!dataDir.exists()){
dataDir.mkdir();
}
in = new FileInputStream(source);
OutputStream prevOut = new FileOutputStream(previous);
// Copy the bits from instream to outstream
byte[] buf = new byte[4096];
int len;
while ((len = in.read(buf)) > 0) {
prevOut.write(buf, 0, len);
}
in.close();
prevOut.close();
} catch (FileNotFoundException ex) {
System.err.println("Could not locate file: "+ ex.getLocalizedMessage());
} catch (IOException ex) {
System.err.println("Problems writing file: "+ ex.getLocalizedMessage());
} finally {
try {
if(in != null){
in.close();
}
} catch (IOException ex) {
System.err.println("Problems closing file: "+ ex.getLocalizedMessage());
}
}
}
}
|
package org.adridadou.ethereum.propeller;
import org.adridadou.ethereum.propeller.event.BlockInfo;
import org.adridadou.ethereum.propeller.event.EthereumEventHandler;
import org.adridadou.ethereum.propeller.exception.EthereumApiException;
import org.adridadou.ethereum.propeller.solidity.SolidityContractDetails;
import org.adridadou.ethereum.propeller.solidity.SolidityEvent;
import org.adridadou.ethereum.propeller.solidity.SolidityType;
import org.adridadou.ethereum.propeller.solidity.abi.AbiParam;
import org.adridadou.ethereum.propeller.solidity.converters.SolidityTypeGroup;
import org.adridadou.ethereum.propeller.solidity.converters.decoders.SolidityTypeDecoder;
import org.adridadou.ethereum.propeller.solidity.converters.decoders.list.CollectionDecoder;
import org.adridadou.ethereum.propeller.solidity.converters.encoders.SolidityTypeEncoder;
import org.adridadou.ethereum.propeller.solidity.converters.encoders.list.CollectionEncoder;
import org.adridadou.ethereum.propeller.values.*;
import org.apache.commons.lang.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import rx.Observable;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Collectors;
import static org.adridadou.ethereum.propeller.values.EthValue.wei;
class EthereumProxy {
private static final int ADDITIONAL_GAS_FOR_CONTRACT_CREATION = 15_000;
private static final int ADDITIONAL_GAS_DIRTY_FIX = 200_000;
private static final Logger logger = LoggerFactory.getLogger(EthereumProxy.class);
private final BlockingQueue<TransactionRequest> transactions = new ArrayBlockingQueue<>(10000);
private final Map<TransactionRequest, CompletableFuture<EthHash>> futureMap = new LinkedHashMap<>();
private final EthereumBackend ethereum;
private final EthereumEventHandler eventHandler;
private final EthereumConfig config;
private final Map<EthAddress, Set<EthHash>> pendingTransactions = new HashMap<>();
private final Map<EthAddress, Nonce> nonces = new HashMap<>();
private final Map<SolidityTypeGroup, List<SolidityTypeEncoder>> encoders = new HashMap<>();
private final Map<SolidityTypeGroup, List<SolidityTypeDecoder>> decoders = new HashMap<>();
private final List<Class<? extends CollectionDecoder>> listDecoders = new ArrayList<>();
private final List<Class<? extends CollectionEncoder>> listEncoders = new ArrayList<>();
private final Set<Class<?>> voidClasses = new HashSet<>();
private final ExecutorService executor = Executors.newCachedThreadPool();
private final ReentrantLock lock = new ReentrantLock();
EthereumProxy(EthereumBackend ethereum, EthereumEventHandler eventHandler, EthereumConfig config) {
this.ethereum = ethereum;
this.eventHandler = eventHandler;
this.config = config;
updateNonce();
ethereum.register(eventHandler);
processTransactions();
}
private void processTransactions() {
executor.submit(() -> {
while (true) {
try {
TransactionRequest request = transactions.take();
this.processTransactionRequest(request);
} catch (Throwable e) {
logger.warn("Interrupted error while waiting for transactions to be submitted:", e);
}
}
});
}
private void processTransactionRequest(TransactionRequest request) {
Nonce nonce = getNonce(request.getAccount().getAddress());
EthHash hash = ethereum.submit(request, nonce);
increasePendingTransactionCounter(request.getAccount().getAddress(), hash);
futureMap.get(request).complete(hash);
futureMap.remove(request);
}
EthereumProxy addVoidClass(Class<?> cls) {
voidClasses.add(cls);
return this;
}
EthereumProxy addEncoder(final SolidityTypeGroup typeGroup, final SolidityTypeEncoder encoder) {
List<SolidityTypeEncoder> encoderList = encoders.computeIfAbsent(typeGroup, key -> new ArrayList<>());
encoderList.add(encoder);
return this;
}
EthereumProxy addListDecoder(final Class<? extends CollectionDecoder> decoder) {
listDecoders.add(decoder);
return this;
}
EthereumProxy addListEncoder(final Class<? extends CollectionEncoder> decoder) {
listEncoders.add(decoder);
return this;
}
EthereumProxy addDecoder(final SolidityTypeGroup typeGroup, final SolidityTypeDecoder decoder) {
List<SolidityTypeDecoder> decoderList = decoders.computeIfAbsent(typeGroup, key -> new ArrayList<>());
decoderList.add(decoder);
return this;
}
CompletableFuture<EthAddress> publishWithValue(SolidityContractDetails contract, EthAccount account, EthValue value, Object... constructorArgs) {
return createContractWithValue(contract, account, value, constructorArgs);
}
CompletableFuture<EthAddress> publish(SolidityContractDetails contract, EthAccount account, Object... constructorArgs) {
return createContract(contract, account, constructorArgs);
}
Nonce getNonce(final EthAddress address) {
lock.lock();
nonces.computeIfAbsent(address, ethereum::getNonce);
Integer offset = Optional.ofNullable(pendingTransactions.get(address)).map(Set::size).orElse(0);
lock.unlock();
return nonces.get(address).add(offset);
}
SmartContractByteCode getCode(EthAddress address) {
return ethereum.getCode(address);
}
<T> Observable<T> observeEvents(SolidityEvent<T> eventDefinition, EthAddress contractAddress) {
return observeEventsWithInfo(eventDefinition, contractAddress).map(EventInfo::getResult);
}
<T> Observable<EventInfo<T>> observeEventsWithInfo(SolidityEvent<T> eventDefinition, EthAddress contractAddress) {
return eventHandler.observeTransactions()
.filter(params -> params.getReceipt().map(receipt -> contractAddress.equals(receipt.receiveAddress)).orElse(false))
.flatMap(params -> Observable.from(params.getReceipt().map(receipt -> receipt.events).get().stream().filter(eventDefinition::match)
.map(data -> new EventInfo<>(params.getTransactionHash(), eventDefinition.parseEvent(data, eventDefinition.getEntityClass()))).collect(Collectors.toList())));
}
private CompletableFuture<EthAddress> publishContract(EthValue ethValue, EthData data, EthAccount account) {
return this.sendTxInternal(ethValue, data, account, EthAddress.empty())
.thenCompose(CallDetails::getResult)
.thenApply(receipt -> receipt.contractAddress);
}
CompletableFuture<CallDetails> sendTx(EthValue value, EthData data, EthAccount account, EthAddress address) {
return this.sendTxInternal(value, data, account, address);
}
public SmartContract getSmartContract(SolidityContractDetails details, EthAddress address, EthAccount account) {
return new SmartContract(details, account, address, this, ethereum);
}
private CompletableFuture<EthAddress> createContract(SolidityContractDetails contract, EthAccount account, Object... constructorArgs) {
return createContractWithValue(contract, account, wei(0), constructorArgs);
}
private CompletableFuture<EthAddress> createContractWithValue(SolidityContractDetails contract, EthAccount account, EthValue value, Object... constructorArgs) {
EthData argsEncoded = new SmartContract(contract, account, EthAddress.empty(), this, ethereum).getConstructor(constructorArgs)
.map(constructor -> constructor.encode(constructorArgs))
.orElseGet(() -> {
if (constructorArgs.length > 0) {
throw new EthereumApiException("No constructor found with params (" + printTypes(constructorArgs) + ")");
}
return EthData.empty();
});
return publishContract(value, EthData.of(ArrayUtils.addAll(contract.getBinary().data, argsEncoded.data)), account);
}
private String printTypes(Object[] constructorArgs) {
return Arrays.stream(constructorArgs).map(arg -> {
if (arg == null) {
return "null";
} else {
return arg.getClass().getSimpleName();
}
}).reduce((a, b) -> a + ", " + b).orElse("[no args]");
}
private CompletableFuture<EthHash> submitTransaction(TransactionRequest request) {
CompletableFuture<EthHash> future = new CompletableFuture<>();
transactions.add(request);
futureMap.put(request, future);
return future;
}
private CompletableFuture<CallDetails> sendTxInternal(EthValue value, EthData data, EthAccount account, EthAddress toAddress) {
return eventHandler.ready().thenCompose((v) -> {
GasUsage gasLimit = estimateGas(value, data, account, toAddress);
GasPrice gasPrice = ethereum.getGasPrice();
return submitTransaction(new TransactionRequest(account, toAddress, value, data, gasLimit, gasPrice))
.thenApply(txHash -> new CallDetails(this.waitForResult(txHash), txHash));
});
}
private CompletableFuture<TransactionReceipt> waitForResult(EthHash txHash) {
Objects.requireNonNull(txHash);
long currentBlock = eventHandler.getCurrentBlockNumber();
Observable<TransactionInfo> droppedTxs = eventHandler.observeTransactions()
.filter(params -> params.getReceipt().map(receipt -> Objects.equals(receipt.hash, txHash)).orElse(false) && params.getStatus() == TransactionStatus.Dropped);
Observable<TransactionInfo> timeoutBlock = eventHandler.observeBlocks()
.filter(blockParams -> blockParams.blockNumber > currentBlock + config.blockWaitLimit())
.map(params -> null);
Observable<TransactionInfo> blockTxs = eventHandler.observeBlocks()
.flatMap(params -> Observable.from(params.receipts))
.filter(receipt -> Objects.equals(receipt.hash, txHash))
.map(this::createTransactionParameters);
Observable<TransactionInfo> observeTx = Observable.interval(10, TimeUnit.SECONDS)
.map(x -> getTransactionInfo(txHash))
.filter(tx -> tx
.map(TransactionInfo::getStatus)
.map(TransactionStatus.Executed::equals).orElse(false))
.filter(Optional::isPresent)
.map(Optional::get);
CompletableFuture<TransactionReceipt> futureResult = new CompletableFuture<>();
Observable.merge(droppedTxs, blockTxs, timeoutBlock, observeTx)
.map(params -> {
if (params == null) {
throw new EthereumApiException("the transaction has not been included in the last " + config.blockWaitLimit() + " blocks");
}
TransactionReceipt receipt = params.getReceipt().orElseThrow(() -> new EthereumApiException("no Transaction receipt found!"));
if (params.getStatus() == TransactionStatus.Dropped) {
throw new EthereumApiException("the transaction has been dropped! - " + receipt.error);
}
Optional<TransactionReceipt> result = checkForErrors(receipt);
return result.orElseThrow(() -> new EthereumApiException("error with the transaction " + receipt.hash + ". error:" + receipt.error));
}).first().forEach(futureResult::complete);
return futureResult;
}
private GasUsage estimateGas(EthValue value, EthData data, EthAccount account, EthAddress toAddress) {
GasUsage gasLimit = ethereum.estimateGas(account, toAddress, value, data);
//if it is a contract creation
if (toAddress.isEmpty()) {
gasLimit = gasLimit.add(ADDITIONAL_GAS_FOR_CONTRACT_CREATION);
}
return gasLimit.add(ADDITIONAL_GAS_DIRTY_FIX);
}
private TransactionInfo createTransactionParameters(TransactionReceipt receipt) {
return new TransactionInfo(receipt.hash, receipt, TransactionStatus.Executed);
}
private Optional<TransactionReceipt> checkForErrors(final TransactionReceipt receipt) {
if (receipt.isSuccessful) {
return Optional.of(receipt);
} else {
return Optional.empty();
}
}
private void updateNonce() {
eventHandler.observeTransactions()
.filter(tx -> tx.getStatus() == TransactionStatus.Dropped)
.forEach(params -> {
TransactionReceipt receipt = params.getReceipt().orElseThrow(() -> new EthereumApiException("no Transaction receipt found!"));
EthAddress currentAddress = receipt.sender;
EthHash hash = receipt.hash;
Optional.ofNullable(pendingTransactions.get(currentAddress)).ifPresent(hashes -> {
hashes.remove(hash);
nonces.put(currentAddress, ethereum.getNonce(currentAddress));
});
});
eventHandler.observeBlocks()
.forEach(params -> {
lock.lock();
params.receipts
.forEach(receipt -> Optional.ofNullable(pendingTransactions.get(receipt.sender))
.ifPresent(hashes -> {
hashes.remove(receipt.hash);
nonces.put(receipt.sender, ethereum.getNonce(receipt.sender));
}));
lock.unlock();
});
}
EthereumEventHandler events() {
return eventHandler;
}
boolean addressExists(final EthAddress address) {
return ethereum.addressExists(address);
}
EthValue getBalance(final EthAddress address) {
return ethereum.getBalance(address);
}
private void increasePendingTransactionCounter(EthAddress address, EthHash hash) {
Set<EthHash> hashes = pendingTransactions.computeIfAbsent(address, (key) -> Collections.synchronizedSet(new HashSet<>()));
hashes.add(hash);
pendingTransactions.put(address, hashes);
}
List<SolidityTypeEncoder> getEncoders(AbiParam abiParam) {
SolidityType type = SolidityType.find(abiParam.getType())
.orElseThrow(() -> new EthereumApiException("unknown type " + abiParam.getType()));
if (abiParam.isArray()) {
return listEncoders.stream().map(cls -> {
try {
if (abiParam.isDynamic()) {
return cls.getConstructor(List.class).newInstance(getEncoders(type, abiParam));
}
return cls.getConstructor(List.class, Integer.class).newInstance(getEncoders(type, abiParam), abiParam.getArraySize());
} catch (InstantiationException | NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new EthereumApiException("error while preparing list encoders", e);
}
}).collect(Collectors.toList());
}
return getEncoders(type, abiParam);
}
private List<SolidityTypeEncoder> getEncoders(final SolidityType type, AbiParam abiParam) {
return Optional.ofNullable(encoders.get(SolidityTypeGroup.resolveGroup(type))).orElseThrow(() -> new EthereumApiException("no encoder found for solidity type " + abiParam.getType()));
}
List<SolidityTypeDecoder> getDecoders(AbiParam abiParam) {
SolidityType type = SolidityType.find(abiParam.getType())
.orElseThrow(() -> new EthereumApiException("unknown type " + abiParam.getType()));
SolidityTypeGroup typeGroup = SolidityTypeGroup.resolveGroup(type);
if (abiParam.isArray() || type.equals(SolidityType.BYTES)) {
return listDecoders.stream().map(cls -> {
try {
return cls.getConstructor(List.class, Integer.class).newInstance(decoders.get(typeGroup), abiParam.getArraySize());
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new EthereumApiException("error while creating a List decoder");
}
}).collect(Collectors.toList());
}
return Optional.ofNullable(decoders.get(typeGroup))
.orElseThrow(() -> new EthereumApiException("no decoder found for solidity type " + abiParam.getType()));
}
public <T> boolean isVoidType(Class<T> cls) {
return voidClasses.contains(cls);
}
public <T> List<T> getEventsAtBlock(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, Long blockNumber) {
return getEventsAtBlock(eventDefinition, address, cls, ethereum.getBlock(blockNumber));
}
public <T> List<T> getEventsAtBlock(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, EthHash blockHash) {
return getEventsAtBlock(eventDefinition, address, cls, ethereum.getBlock(blockHash));
}
private <T> List<T> getEventsAtBlock(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, BlockInfo blockInfo) {
return getEventsAtBlockWithInfo(eventDefinition, address, cls, blockInfo).stream()
.map(EventInfo::getResult)
.collect(Collectors.toList());
}
public <T> List<EventInfo<T>> getEventsAtBlockWithInfo(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, Long blockNumber) {
return getEventsAtBlockWithInfo(eventDefinition, address, cls, ethereum.getBlock(blockNumber));
}
public <T> List<EventInfo<T>> getEventsAtBlockWithInfo(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, EthHash blockHash) {
return getEventsAtBlockWithInfo(eventDefinition, address, cls, ethereum.getBlock(blockHash));
}
private <T> List<EventInfo<T>> getEventsAtBlockWithInfo(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, BlockInfo blockInfo) {
return blockInfo.receipts.stream()
.filter(params -> address.equals(params.receiveAddress))
.flatMap(params -> params.events.stream())
.filter(eventDefinition::match)
.map(data -> new EventInfo<>(data.getTransactionHash(), (T) eventDefinition.parseEvent(data, cls))).collect(Collectors.toList());
}
public <T> List<T> getEventsAtTransaction(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, EthHash transactionHash) {
return getEventsAtTransactionWithInfo(eventDefinition, address, cls, transactionHash).stream()
.map(EventInfo::getResult).collect(Collectors.toList());
}
public <T> List<EventInfo<T>> getEventsAtTransactionWithInfo(SolidityEvent eventDefinition, EthAddress address, Class<T> cls, EthHash transactionHash) {
TransactionReceipt receipt = ethereum.getTransactionInfo(transactionHash).flatMap(TransactionInfo::getReceipt).orElseThrow(() -> new EthereumApiException("no Transaction receipt found!"));
if (address.equals(receipt.receiveAddress)) {
return receipt.events.stream().filter(eventDefinition::match)
.map(data -> new EventInfo<>(data.getTransactionHash(), (T) eventDefinition.parseEvent(data, cls)))
.collect(Collectors.toList());
}
return new ArrayList<>();
}
public long getCurrentBlockNumber() {
return eventHandler.getCurrentBlockNumber();
}
public Optional<TransactionInfo> getTransactionInfo(EthHash hash) {
return ethereum.getTransactionInfo(hash);
}
}
|
package org.asciidoc.intellij.formatting;
import com.intellij.formatting.Alignment;
import com.intellij.formatting.Block;
import com.intellij.formatting.Indent;
import com.intellij.formatting.Spacing;
import com.intellij.lang.ASTNode;
import com.intellij.lang.Language;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.formatter.WhiteSpaceFormattingStrategy;
import com.intellij.psi.formatter.WhiteSpaceFormattingStrategyFactory;
import com.intellij.psi.formatter.common.AbstractBlock;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import org.asciidoc.intellij.codeStyle.AsciiDocCodeStyleSettings;
import org.asciidoc.intellij.lexer.AsciiDocTokenTypes;
import org.asciidoc.intellij.parser.AsciiDocElementTypes;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class AsciiDocBlock extends AbstractBlock {
private boolean verse = false;
private boolean table = false;
private boolean hardbreaks = false;
private final CodeStyleSettings settings;
private final Map<String, WhiteSpaceFormattingStrategy> wssCache;
AsciiDocBlock(@NotNull ASTNode node, CodeStyleSettings settings) {
super(node, null, Alignment.createAlignment());
this.settings = settings;
this.wssCache = new HashMap<>();
}
private AsciiDocBlock(@NotNull ASTNode node, CodeStyleSettings settings, boolean verse, boolean table, boolean hardbreaks, Map<String, WhiteSpaceFormattingStrategy> wss, Alignment alignment) {
super(node, null, alignment);
this.settings = settings;
this.verse = verse;
this.table = table;
this.hardbreaks = hardbreaks;
this.wssCache = wss;
}
@Override
protected List<Block> buildChildren() {
final List<Block> result = new ArrayList<>();
if (!settings.getCustomSettings(AsciiDocCodeStyleSettings.class).FORMATTING_ENABLED) {
return result;
}
if (myNode.getPsi() instanceof org.asciidoc.intellij.psi.AsciiDocBlock) {
// if this is inside a verse, pass this information down to all children
org.asciidoc.intellij.psi.AsciiDocBlock block = (org.asciidoc.intellij.psi.AsciiDocBlock) myNode.getPsi();
if (block.getType() == org.asciidoc.intellij.psi.AsciiDocBlock.Type.VERSE) {
verse = true;
}
if (block.getType() == org.asciidoc.intellij.psi.AsciiDocBlock.Type.VERSE) {
table = true;
}
if ("%hardbreaks".equals(block.getStyle())) {
hardbreaks = true;
}
}
ASTNode child = myNode.getFirstChildNode();
while (child != null) {
if (!(child instanceof PsiWhiteSpace)) {
// every child will align with the the parent, no additional indents due to alignment
// as leading blanks in Asciidoc in a line can either change the meaning
// verse blocks with have their own alignment so that they can add spaces as needed to the beginning of the line
result.add(new AsciiDocBlock(child, settings, verse, table, hardbreaks, wssCache, verse ? Alignment.createAlignment() : getAlignment()));
} else {
Language language = ((PsiWhiteSpace) child).getLanguage();
WhiteSpaceFormattingStrategy myWhiteSpaceStrategy = wssCache.computeIfAbsent(language.getID(),
s -> WhiteSpaceFormattingStrategyFactory.getStrategy(language));
// double-check for a whitespace problem in lexer before re-formatting,
// otherwise non-whitespace characters might get lost!
CharSequence text = child.getChars();
int end = text.length();
if (myWhiteSpaceStrategy.check(text, 0, end) != end) {
StringBuilder tree = new StringBuilder(childToString(child));
ASTNode node = child.getTreeParent();
while (node != null) {
tree.insert(0, childToString(node) + " > ");
node = node.getTreeParent();
}
throw new IllegalStateException("Whitespace element contains non-whitespace-characters: '" +
replaceNewlinesForPrinting(child.getText()) + "' at offset " + child.getStartOffset() + ", tree: " + tree);
}
}
child = child.getTreeNext();
}
return result;
}
private String replaceNewlinesForPrinting(String text) {
return text.replaceAll("\\\\", "\\\\\\\\").replaceAll("\n", "\\\\n");
}
@NotNull
private String childToString(ASTNode child) {
return child.getElementType() + ":" + child.getPsi().getLanguage().getDisplayName();
}
@Nullable
@Override
public Alignment getAlignment() {
return super.getAlignment();
}
@Nullable
@Override
public Spacing getSpacing(@Nullable Block child1, @NotNull Block child2) {
if (child1 == null) {
return null;
}
// keep blank lines before and after comments as is
if (isComment(child1) || isComment(child2)) {
return Spacing.createSpacing(0, 999, 0, true, 999);
}
// keep blank lines after block start
if (isBlockStart(child1)) {
return Spacing.createSpacing(0, 999, 0, true, 999);
}
// no blank line after title and block attribute
if (isTitleInsideTitle(child1) && !isTitleInsideTitle(child2)) {
return Spacing.createSpacing(0, 0, 1, true, 0);
}
// no blank line after title and block attribute
if (!verse && !table && (isBlockAttribute(child1) || isBlockIdEnd(child1))) {
return Spacing.createSpacing(0, 0, 1, true, 0);
}
// blank line(s) before and after a heading
if (!verse && !table && (isSection(child1) && !isPartOfSameHeading(child1, child2) && !isAttributeDeclaration(child2) && !isHeader(child2))) {
int minBlankLinesAfterHeader = settings.getCustomSettings(AsciiDocCodeStyleSettings.class).BLANK_LINES_AFTER_HEADER;
int maxBlankLinesAfterHeader = settings.getCustomSettings(AsciiDocCodeStyleSettings.class).BLANK_LINES_KEEP_AFTER_HEADER;
return Spacing.createSpacing(0, 0, minBlankLinesAfterHeader + 1, true, maxBlankLinesAfterHeader);
}
if (settings.getCustomSettings(AsciiDocCodeStyleSettings.class).ONE_SENTENCE_PER_LINE) {
// ensure a new line at the end of the sentence
if (!verse && !table && !hardbreaks && isEndOfSentence(child1) && isPartOfSentence(child2)
&& !isTitleInsideTitle(child1)) {
return Spacing.createSpacing(0, 0, 1, true, 1);
}
// ensure exactly one space between parts of one sentence. Remove any newlines
if (!verse && !table && !hardbreaks && isPartOfSentence(child1) && isPartOfSentence(child2) && !hasBlankLineBetween(child1, child2)) {
// if there is a newline, create at least one space
int minSpaces = hasNewlinesBetween(child1, child2) ? 1 : 0;
return Spacing.createSpacing(minSpaces, 1, 0, false, 0);
}
}
// have one at least blank line before each bullet or enumeration,
// but not if previous line starts with one as well (special case compact single line enumerations)
/* disabled, as it only tackles single enumeration items
if (!verse && !table && ((isEnumeration(child2) || isBullet(child2)) && !lineStartsWithEnumeration(child1) && !isContinuation(child1))) {
return Spacing.createSpacing(0, 0, 2, false, 0);
}
*/
// one space after enumeration or bullet
if (isEnumeration(child1) || isBullet(child1)) {
return Spacing.createSpacing(1, 1, 0, false, 0);
}
// no space before or after separator in block attributes
if (isSeparator(child1) || isSeparator(child2)) {
return Spacing.createSpacing(0, 0, 0, false, 0);
}
// if a block starts within a cell, start a new line for the block
if (isCellStart(child1) && isBlock(child2)) {
return Spacing.createSpacing(0, 0, 1, false, 0);
}
// before and after a block have one blank line, but not with if there is an continuation ("+")
if (!table && isBlock(child2) && !isContinuation(child1) && !isBlockStart(child1)) {
return Spacing.createSpacing(0, 0, 2, false, 0);
}
if (!table && isBlock(child1) && !isContinuation(child2) && !isBlockEnd(child2) && !isCallOut(child2)) {
return Spacing.createSpacing(0, 0, 2, false, 0);
}
return Spacing.createSpacing(0, 999, 0, true, 999);
}
private boolean isPartOfSameHeading(Block child1, Block child2) {
ASTNode node1 = ((AsciiDocBlock) child2).getNode();
ASTNode node2 = ((AsciiDocBlock) child1).getNode();
node1 = getHeadingFor(node1);
node2 = getHeadingFor(node2);
return node1 != null && node1 == node2;
}
private ASTNode getHeadingFor(ASTNode node) {
do {
if (node.getElementType() == AsciiDocElementTypes.HEADING) {
break;
}
node = node.getTreeParent();
} while (node != null);
return node;
}
private boolean isCellStart(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.CELLSEPARATOR.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private static boolean isAttributeDeclaration(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocElementTypes.ATTRIBUTE_DECLARATION.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private boolean isBlockStart(Block block) {
return block instanceof AsciiDocBlock &&
(AsciiDocTokenTypes.LISTING_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.PASSTRHOUGH_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.LITERAL_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
) &&
((AsciiDocBlock) block).getNode().getTreeNext() != null;
}
private boolean isBlockEnd(Block block) {
return block instanceof AsciiDocBlock &&
(AsciiDocTokenTypes.LISTING_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.PASSTRHOUGH_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.LITERAL_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
) &&
((AsciiDocBlock) block).getNode().getTreeNext() == null;
}
private boolean isSeparator(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.SEPARATOR.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private boolean hasBlankLineBetween(Block child1, Block child2) {
if (!(child1 instanceof AsciiDocBlock)) {
return false;
}
if (!(child2 instanceof AsciiDocBlock)) {
return false;
}
int newlines = 0;
ASTNode node = ((AsciiDocBlock) child1).getNode().getTreeNext();
while (node != null && node != ((AsciiDocBlock) child2).getNode()) {
if (node instanceof PsiWhiteSpace && "\n".equals(node.getText())) {
newlines++;
if (newlines == 2) {
return true;
}
}
if (!(node instanceof PsiWhiteSpace)) {
return false;
}
node = node.getTreeNext();
}
return false;
}
private boolean hasNewlinesBetween(Block child1, Block child2) {
if (!(child1 instanceof AsciiDocBlock)) {
return false;
}
if (!(child2 instanceof AsciiDocBlock)) {
return false;
}
ASTNode node = ((AsciiDocBlock) child1).getNode().getTreeNext();
while (node != null && node != ((AsciiDocBlock) child2).getNode()) {
if (node instanceof PsiWhiteSpace && "\n".equals(node.getText())) {
return true;
}
node = node.getTreeNext();
}
return false;
}
private boolean isComment(Block block) {
return block instanceof AsciiDocBlock &&
(AsciiDocTokenTypes.COMMENT_BLOCK_DELIMITER.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocTokenTypes.LINE_COMMENT.equals(((AsciiDocBlock) block).getNode().getElementType()));
}
private boolean isBlockAttribute(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocElementTypes.BLOCK_ATTRIBUTES.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private boolean isBlockIdEnd(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.BLOCKIDEND.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private boolean isBlock(Block block) {
return block instanceof AsciiDocBlock &&
(AsciiDocElementTypes.BLOCK.equals(((AsciiDocBlock) block).getNode().getElementType())
|| AsciiDocElementTypes.LISTING.equals(((AsciiDocBlock) block).getNode().getElementType()));
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private boolean isContinuation(Block block) {
return block instanceof AsciiDocBlock &&
((AsciiDocBlock) block).getNode().getText().equals("+");
}
private boolean isEndOfSentence(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.END_OF_SENTENCE.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private boolean isTitleInsideTitle(Block block) {
if (block instanceof AsciiDocBlock) {
AsciiDocBlock adBlock = (AsciiDocBlock) block;
ASTNode node = adBlock.getNode();
do {
if (AsciiDocElementTypes.TITLE.equals(node.getElementType())) {
return true;
}
node = node.getTreeParent();
} while (node != null);
}
return false;
}
private static boolean isSection(Block block) {
return block instanceof AsciiDocBlock &&
(AsciiDocElementTypes.SECTION.equals(((AsciiDocBlock) block).getNode().getElementType())
|| isChildOf(AsciiDocElementTypes.HEADING, block));
}
private static boolean isHeader(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.HEADER.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private static boolean isChildOf(IElementType element, Block block) {
ASTNode node = ((AsciiDocBlock) block).getNode();
do {
if (node.getElementType() == element) {
return true;
}
node = node.getTreeParent();
} while (node != null);
return false;
}
private static boolean isBullet(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.BULLET.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private static boolean isEnumeration(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.ENUMERATION.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private boolean isCallOut(Block block) {
return block instanceof AsciiDocBlock &&
AsciiDocTokenTypes.CALLOUT.equals(((AsciiDocBlock) block).getNode().getElementType());
}
private static final TokenSet TEXT_SET = TokenSet.create(AsciiDocTokenTypes.TEXT, AsciiDocTokenTypes.BOLD, AsciiDocTokenTypes.BOLDITALIC,
AsciiDocTokenTypes.ITALIC, AsciiDocTokenTypes.DOUBLE_QUOTE, AsciiDocTokenTypes.SINGLE_QUOTE, AsciiDocTokenTypes.BOLD_START,
AsciiDocTokenTypes.BOLD_END, AsciiDocTokenTypes.ITALIC_START, AsciiDocTokenTypes.ITALIC_END, AsciiDocTokenTypes.LT,
AsciiDocTokenTypes.GT, AsciiDocTokenTypes.TYPOGRAPHIC_DOUBLE_QUOTE_END, AsciiDocTokenTypes.TYPOGRAPHIC_DOUBLE_QUOTE_START,
AsciiDocTokenTypes.LPAREN, AsciiDocTokenTypes.RPAREN,
AsciiDocTokenTypes.LINKTEXT, AsciiDocTokenTypes.ATTR_NAME,
AsciiDocTokenTypes.TYPOGRAPHIC_SINGLE_QUOTE_END, AsciiDocTokenTypes.TYPOGRAPHIC_SINGLE_QUOTE_START);
private static boolean isPartOfSentence(Block block) {
return block instanceof AsciiDocBlock &&
TEXT_SET.contains(((AsciiDocBlock) block).getNode().getElementType()) &&
!"::".equals(((AsciiDocBlock) block).getNode().getText()) && // should stay on a separate line as reformatting might create property list item
!"--".equals(((AsciiDocBlock) block).getNode().getText()); // should stay on a separate line as it might be part of a quote
}
@Override
public Indent getIndent() {
if (myNode.getElementType() == AsciiDocTokenTypes.ENUMERATION
|| myNode.getElementType() == AsciiDocTokenTypes.BULLET
|| myNode.getElementType() == AsciiDocTokenTypes.DESCRIPTION
|| myNode.getElementType() == AsciiDocElementTypes.LINK
|| myNode.getElementType() == AsciiDocElementTypes.ATTRIBUTE_REF) {
return Indent.getAbsoluteNoneIndent();
}
if (!verse && TEXT_SET.contains(myNode.getElementType())) {
return Indent.getAbsoluteNoneIndent();
}
ASTNode treePrev = myNode.getTreePrev();
if (treePrev instanceof PsiWhiteSpace) {
int spaces = 0;
char[] chars = ((PsiWhiteSpace) treePrev).textToCharArray();
int i = treePrev.getTextLength() - 1;
for (; i >= 0; --i) {
if (chars[i] == ' ') {
spaces++;
} else {
break;
}
}
if (i < 0 || chars[i] == '\n') {
return Indent.getSpaceIndent(spaces, true);
}
}
return Indent.getAbsoluteNoneIndent();
}
@Override
public boolean isLeaf() {
return getSubBlocks().size() == 0;
}
}
|
package org.asteriskjava.pbx.internal.core;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Logger;
import org.asteriskjava.AsteriskVersion;
import org.asteriskjava.manager.AuthenticationFailedException;
import org.asteriskjava.manager.EventTimeoutException;
import org.asteriskjava.manager.ManagerConnectionState;
import org.asteriskjava.manager.TimeoutException;
import org.asteriskjava.manager.event.AbstractChannelEvent;
import org.asteriskjava.pbx.Activity;
import org.asteriskjava.pbx.ActivityCallback;
import org.asteriskjava.pbx.ActivityStatusEnum;
import org.asteriskjava.pbx.AsteriskSettings;
import org.asteriskjava.pbx.Call;
import org.asteriskjava.pbx.Call.OperandChannel;
import org.asteriskjava.pbx.CallDirection;
import org.asteriskjava.pbx.CallerID;
import org.asteriskjava.pbx.Channel;
import org.asteriskjava.pbx.ChannelHangupListener;
import org.asteriskjava.pbx.CompletionAdaptor;
import org.asteriskjava.pbx.DTMFTone;
import org.asteriskjava.pbx.DialPlanExtension;
import org.asteriskjava.pbx.EndPoint;
import org.asteriskjava.pbx.InvalidChannelName;
import org.asteriskjava.pbx.PBX;
import org.asteriskjava.pbx.PBXException;
import org.asteriskjava.pbx.PBXFactory;
import org.asteriskjava.pbx.TechType;
import org.asteriskjava.pbx.Trunk;
import org.asteriskjava.pbx.activities.BlindTransferActivity;
import org.asteriskjava.pbx.activities.BridgeActivity;
import org.asteriskjava.pbx.activities.DialActivity;
import org.asteriskjava.pbx.activities.DialToAgiActivity;
import org.asteriskjava.pbx.activities.HoldActivity;
import org.asteriskjava.pbx.activities.JoinActivity;
import org.asteriskjava.pbx.activities.ParkActivity;
import org.asteriskjava.pbx.activities.SplitActivity;
import org.asteriskjava.pbx.agi.AgiChannelActivityHangup;
import org.asteriskjava.pbx.agi.AgiChannelActivityHold;
import org.asteriskjava.pbx.asterisk.wrap.actions.CommandAction;
import org.asteriskjava.pbx.asterisk.wrap.actions.EventGeneratingAction;
import org.asteriskjava.pbx.asterisk.wrap.actions.HangupAction;
import org.asteriskjava.pbx.asterisk.wrap.actions.ManagerAction;
import org.asteriskjava.pbx.asterisk.wrap.actions.PlayDtmfAction;
import org.asteriskjava.pbx.asterisk.wrap.actions.RedirectAction;
import org.asteriskjava.pbx.asterisk.wrap.events.ManagerEvent;
import org.asteriskjava.pbx.asterisk.wrap.events.ResponseEvents;
import org.asteriskjava.pbx.asterisk.wrap.response.CommandResponse;
import org.asteriskjava.pbx.asterisk.wrap.response.ManagerResponse;
import org.asteriskjava.pbx.internal.activity.BlindTransferActivityImpl;
import org.asteriskjava.pbx.internal.activity.BridgeActivityImpl;
import org.asteriskjava.pbx.internal.activity.DialActivityImpl;
import org.asteriskjava.pbx.internal.activity.DialToAgiActivityImpl;
import org.asteriskjava.pbx.internal.activity.HoldActivityImpl;
import org.asteriskjava.pbx.internal.activity.JoinActivityImpl;
import org.asteriskjava.pbx.internal.activity.ParkActivityImpl;
import org.asteriskjava.pbx.internal.activity.SplitActivityImpl;
import org.asteriskjava.pbx.internal.asterisk.CallerIDImpl;
import org.asteriskjava.pbx.internal.asterisk.MeetmeRoom;
import org.asteriskjava.pbx.internal.asterisk.MeetmeRoomControl;
import org.asteriskjava.pbx.internal.managerAPI.RedirectCall;
public enum AsteriskPBX implements PBX, ChannelHangupListener
{
SELF;
private final Logger logger = Logger.getLogger(AsteriskPBX.class);
private boolean muteSupported;
private boolean bridgeSupport;
private static final int MAX_MEETME_ROOMS = 50;
private LiveChannelManager liveChannels;
AsteriskPBX()
{
try
{
CoherentManagerConnection.init();
this.muteSupported = CoherentManagerConnection.getInstance().isMuteAudioSupported();
this.bridgeSupport = CoherentManagerConnection.getInstance().isBridgeSupported();
try
{
MeetmeRoomControl.init(this, AsteriskPBX.MAX_MEETME_ROOMS);
}
catch (Throwable e)
{
logger.error(e, e);
}
this.liveChannels = new LiveChannelManager();
}
catch (IllegalStateException | IOException | AuthenticationFailedException | TimeoutException e1)
{
logger.error(e1, e1);
}
}
/**
* Call this method when shutting down the PBX interface to allow it to
* cleanup.
*/
@Override
public void shutdown()
{
MeetmeRoomControl.getInstance().stop();
CoherentManagerConnection.getInstance().shutDown();
}
@Override
public boolean isBridgeSupported()
{
return this.bridgeSupport;
}
@Override
public BlindTransferActivity blindTransfer(Call call, Call.OperandChannel channelToTransfer, EndPoint transferTarget,
CallerID toCallerID, boolean autoAnswer, long timeout)
{
final CompletionAdaptor<BlindTransferActivity> completion = new CompletionAdaptor<>();
final BlindTransferActivityImpl transfer = new BlindTransferActivityImpl(call, channelToTransfer, transferTarget,
toCallerID, autoAnswer, timeout, completion);
completion.waitForCompletion(timeout + 2, TimeUnit.SECONDS);
return transfer;
}
@Override
public void blindTransfer(Call call, Call.OperandChannel channelToTransfer, EndPoint transferTarget, CallerID toCallerID,
boolean autoAnswer, long timeout, ActivityCallback<BlindTransferActivity> listener)
{
new BlindTransferActivityImpl(call, channelToTransfer, transferTarget, toCallerID, autoAnswer, timeout, listener);
}
public BlindTransferActivity blindTransfer(Channel agentChannel, EndPoint transferTarget, CallerID toCallerID,
boolean autoAnswer, int timeout, ActivityCallback<BlindTransferActivity> iCallback) throws PBXException
{
return new BlindTransferActivityImpl(agentChannel, transferTarget, toCallerID, autoAnswer, timeout, iCallback);
}
/**
* Utility method to bridge two channels
*
* @param lhsChannel
* @param rhsChannel
* @param direction
* @throws PBXException
*/
public BridgeActivity bridge(final Channel lhsChannel, final Channel rhsChannel) throws PBXException
{
final CompletionAdaptor<BridgeActivity> completion = new CompletionAdaptor<>();
final BridgeActivityImpl bridge = new BridgeActivityImpl(lhsChannel, rhsChannel, completion);
completion.waitForCompletion(10, TimeUnit.SECONDS);
return bridge;
}
@Override
public void split(final Call callToSplit) throws PBXException
{
final CompletionAdaptor<SplitActivity> completion = new CompletionAdaptor<>();
new SplitActivityImpl(callToSplit, completion);
completion.waitForCompletion(10, TimeUnit.SECONDS);
}
@Override
public SplitActivity split(final Call callToSplit, final ActivityCallback<SplitActivity> listener)
{
return new SplitActivityImpl(callToSplit, listener);
}
/**
* Joins two calls not returning until the join completes. The join will
* complete almost immediately as it is a simple bridging of two active
* channels. Each call must only have one active channel
*/
@Override
public JoinActivity join(Call lhs, OperandChannel originatingOperand, Call rhs, OperandChannel acceptingOperand,
CallDirection direction)
{
final CompletionAdaptor<JoinActivity> completion = new CompletionAdaptor<>();
final JoinActivityImpl join = new JoinActivityImpl(lhs, originatingOperand, rhs, acceptingOperand, direction,
completion);
completion.waitForCompletion(10, TimeUnit.SECONDS);
return join;
}
/**
* Joins two calls not returning until the join completes. The join will
* complete almost immediately as it is a simple bridging of two active
* channels. Each call must only have one active channel
*/
@Override
public void join(Call lhs, OperandChannel originatingOperand, Call rhs, OperandChannel acceptingOperand,
CallDirection direction, ActivityCallback<JoinActivity> listener)
{
new JoinActivityImpl(lhs, originatingOperand, rhs, acceptingOperand, direction, listener);
}
@Override
public void conference(final Channel channelOne, final Channel channelTwo, final Channel channelThree)
{
// TODO Auto-generated method stub
}
@Override
public void conference(final Channel channelOne, final Channel channelTwo, final Channel channelThree,
final ActivityCallback<Activity> callback)
{
// TODO Auto-generated method stub
}
@Override
public DialActivity dial(final EndPoint from, final CallerID fromCallerID, final EndPoint to, final CallerID toCallerID)
{
final CompletionAdaptor<DialActivity> completion = new CompletionAdaptor<>();
final DialActivityImpl dialer = new DialActivityImpl(from, to, toCallerID, false, completion, null);
completion.waitForCompletion(3, TimeUnit.MINUTES);
return dialer;
}
public DialLocalToAgiActivity dialLocalToAgi(final EndPoint from, final CallerID fromCallerID,
ActivityCallback<DialLocalToAgiActivity> callback, Map<String, String> channelVarsToSet)
{
return new DialLocalToAgiActivity(from, fromCallerID, callback, channelVarsToSet);
}
public DialActivity dial(final EndPoint from, final CallerID fromCallerID, final EndPoint to, final CallerID toCallerID,
final ActivityCallback<DialActivity> callback, Map<String, String> channelVarsToSet)
{
final DialActivityImpl dialer = new DialActivityImpl(from, to, toCallerID, false, callback, channelVarsToSet);
return dialer;
}
@Override
public void dial(final EndPoint from, final CallerID fromCallerID, final EndPoint to, final CallerID toCallerID,
final ActivityCallback<DialActivity> callback)
{
new DialActivityImpl(from, to, toCallerID, false, callback, null);
}
/**
* Convenience method to hangup the call without having to extract the
* channel yourself.
*/
public void hangup(Call call) throws PBXException
{
this.hangup(call.getOriginatingParty());
}
@Override
public void hangup(final Channel channel) throws PBXException
{
if (channel.isLive())
{
logger.debug("Sending hangup action for channel: " + channel); //$NON-NLS-1$
PBX pbx = PBXFactory.getActivePBX();
if (!pbx.waitForChannelToQuiescent(channel, 3000))
throw new PBXException("Channel: " + channel + " cannot be retrieved as it is still in transition.");
final HangupAction hangup = new HangupAction(channel);
try
{
channel.setCurrentActivityAction(new AgiChannelActivityHangup());
CoherentManagerConnection.sendAction(hangup, 1000);
}
catch (IllegalArgumentException | IllegalStateException | IOException | TimeoutException e)
{
logger.error(e, e);
throw new PBXException(e);
}
}
else
logger.debug("Suppressed hangup for " + channel + " as it was already hungup"); //$NON-NLS-1$ //$NON-NLS-2$
}
@Override
public void hangup(final Channel channel, final ActivityCallback<Activity> callback)
{
throw new UnsupportedOperationException("Not yet implemented."); //$NON-NLS-1$
}
@Override
public HoldActivity hold(final Channel channel)
{
final CompletionAdaptor<HoldActivity> completion = new CompletionAdaptor<>();
HoldActivity activity = null;
try
{
activity = new HoldActivityImpl(channel, completion);
completion.waitForCompletion(10, TimeUnit.SECONDS);
}
catch (final Exception e)
{
logger.error(e, e);
}
return activity;
}
@Override
public boolean isMuteSupported()
{
return this.muteSupported;
}
@Override
public ParkActivity park(final Call call, final Channel parkChannel)
{
final CompletionAdaptor<ParkActivity> completion = new CompletionAdaptor<>();
final ParkActivity activity = new ParkActivityImpl(call, parkChannel, completion);
parkChannel.setParked(true);
completion.waitForCompletion(10, TimeUnit.SECONDS);
return activity;
}
@Override
public void park(final Call call, final Channel parkChannel, final ActivityCallback<ParkActivity> callback)
{
new ParkActivityImpl(call, parkChannel, callback);
}
@Override
public void sendDTMF(final Channel channel, final DTMFTone tone) throws PBXException
{
try
{
if (!waitForChannelToQuiescent(channel, 3000))
throw new PBXException("Channel: " + channel + " cannot play dtmf as it is still in transition.");
CoherentManagerConnection.sendAction(new PlayDtmfAction(channel, tone), 1000);
}
catch (final Exception e)
{
logger.error(e, e);
throw new PBXException(e);
}
}
@Override
public void sendDTMF(final Channel channel, final DTMFTone tone, final ActivityCallback<Activity> callback)
{
// TODO Auto-generated method stub
}
@Override
public void transferToMusicOnHold(final Channel channel) throws PBXException
{
final RedirectCall transfer = new RedirectCall();
transfer.redirect(channel, new AgiChannelActivityHold());
}
public String getManagementContext()
{
final AsteriskSettings settings = PBXFactory.getActiveProfile();
return settings.getManagementContext();
}
@Override
public Channel getChannelByEndPoint(final EndPoint endPoint)
{
return this.liveChannels.getChannelByEndPoint(endPoint);
}
@Override
public void channelHangup(Channel channel, Integer cause, String causeText)
{
this.liveChannels.remove((ChannelProxy) channel);
}
public DialPlanExtension getExtensionPark()
{
final AsteriskSettings settings = PBXFactory.getActiveProfile();
return this.buildDialPlanExtension(settings.getExtensionPark());
}
@Override
public EndPoint getExtensionAgi()
{
final AsteriskSettings settings = PBXFactory.getActiveProfile();
return this.buildDialPlanExtension(settings.getAgiExtension());
}
/**
* Builds an end point from a fully qualified end point name. If the
* endpoint name doesn't have a tech then it is considered invalid and null
* is returned.
*/
@Override
public EndPoint buildEndPoint(final String fullyQualifiedEndPoint)
{
EndPoint endPoint = null;
try
{
endPoint = new EndPointImpl(fullyQualifiedEndPoint);
}
catch (final IllegalArgumentException e)
{
logger.warn(e, e);
}
return endPoint;
}
/**
* Builds an end point from an end point name. If the endpoint name doesn't
* have a tech specified then the defaultTech is used.
*/
@Override
public EndPoint buildEndPoint(final TechType defaultTech, final String endPointName)
{
EndPoint endPoint = null;
try
{
if (endPointName == null || endPointName.trim().length() == 0)
endPoint = new EndPointImpl();
else
endPoint = new EndPointImpl(defaultTech, endPointName);
}
catch (final IllegalArgumentException e)
{
logger.error(e, e);
}
return endPoint;
}
@Override
public EndPoint buildEndPoint(final TechType defaultTech, final Trunk trunk, final String endPointName)
{
return new EndPointImpl(defaultTech, trunk, endPointName);
}
/**
* Builds an end point from an end point name. If the endpoint name doesn't
* have a tech specified then the defaultTech is used.
*/
public DialPlanExtension buildDialPlanExtension(final String extension)
{
DialPlanExtension dialPlan = null;
try
{
dialPlan = new DialPlanExtension(extension);
}
catch (final IllegalArgumentException e)
{
logger.error(e, e);
}
return dialPlan;
}
@Override
public CallerID buildCallerID(final String number, final String name)
{
return new CallerIDImpl(number, name);
}
/**
* Convenience method to build a call id from an event.
*
* @param event
*/
public CallerID buildCallerID(final AbstractChannelEvent event)
{
final String number = event.getCallerIdNum();
final String name = event.getCallerIdName();
return this.buildCallerID(number, name);
}
public Channel registerChannel(final String channelName, final String uniqueID) throws InvalidChannelName
{
Channel proxy = findChannel(cleanChannelName(channelName), null);
if (proxy == null)
{
logger.info("Couldn't find the channel " + channelName + ", creating it");
proxy = internalRegisterChannel(channelName, uniqueID);
}
else
{
if (uniqueID != null && !uniqueID.equals(proxy.getUniqueId()))
{
logger.warn(
"Found the channel(" + proxy.getUniqueId() + "), but with a different uniqueId (" + uniqueID + ")");
}
}
liveChannels.sanityCheck();
return proxy;
}
/**
* This method is not part of the public API. <br>
* <br>
* Use registerChannel instead calling this method with an incorrect or
* stale uniqueId will cause inconsistent behaviour.
*
* @param channelName
* @param uniqueID
* @return
* @throws InvalidChannelName
*/
public Channel internalRegisterChannel(final String channelName, final String uniqueID) throws InvalidChannelName
{
ChannelProxy proxy = null;
synchronized (this.liveChannels)
{
String localUniqueID = (uniqueID == null ? ChannelImpl.UNKNOWN_UNIQUE_ID : uniqueID);
proxy = this.findChannel(cleanChannelName(channelName), localUniqueID);
if (proxy == null)
{
proxy = new ChannelProxy(new ChannelImpl(channelName, localUniqueID));
logger.info("Creating new Channel Proxy " + proxy);
this.liveChannels.add(proxy);
proxy.addHangupListener(this);
}
}
return proxy;
}
/**
* remove white space
*
* @param name
* @return
*/
private String cleanChannelName(final String name)
{
String cleanedName = name.trim().toUpperCase();
return cleanedName;
}
public Channel registerHangupChannel(String channel, String uniqueId) throws InvalidChannelName
{
Channel newChannel = null;
synchronized (this.liveChannels)
{
newChannel = this.findChannel(channel, uniqueId);
if (newChannel == null)
{
// WE don't add this channel to the liveChannels as it is in the
// process
// of being hungup so we don't need to track it.
// If we tried to track it that would likely cause a problem
// as the livechannel manager would never be able to discard it
// it relies on the hangup event which is being processed right
// now.
newChannel = new ChannelProxy(new ChannelImpl(channel, uniqueId));
}
}
return newChannel;
}
public ChannelProxy findChannel(final String channelName, final String uniqueID)
{
return this.liveChannels.findChannel(channelName, uniqueID);
}
public MeetmeRoom acquireMeetmeRoom()
{
return MeetmeRoomControl.getInstance().findAvailableRoom();
}
public void addListener(FilteredManagerListener<ManagerEvent> listener)
{
CoherentManagerConnection connection = CoherentManagerConnection.getInstance();
connection.addListener(listener);
}
public void removeListener(FilteredManagerListener<ManagerEvent> listener)
{
CoherentManagerConnection connection = CoherentManagerConnection.getInstance();
connection.removeListener(listener);
}
public ManagerResponse sendAction(ManagerAction theAction)
throws IllegalArgumentException, IllegalStateException, IOException, TimeoutException
{
return CoherentManagerConnection.sendAction(theAction, 30000);
}
public ManagerResponse sendAction(ManagerAction theAction, int timeout)
throws IllegalArgumentException, IllegalStateException, IOException, TimeoutException
{
return CoherentManagerConnection.sendAction(theAction, timeout);
}
public ResponseEvents sendEventGeneratingAction(EventGeneratingAction action)
throws EventTimeoutException, IllegalArgumentException, IllegalStateException, IOException
{
ResponseEvents events = CoherentManagerConnection.sendEventGeneratingAction(action);
return events;
}
public ResponseEvents sendEventGeneratingAction(EventGeneratingAction action, int timeout)
throws EventTimeoutException, IllegalArgumentException, IllegalStateException, IOException
{
return CoherentManagerConnection.sendEventGeneratingAction(action, timeout);
}
public void setVariable(Channel channel, String name, String value) throws PBXException
{
CoherentManagerConnection.getInstance().setVariable(channel, name, value);
}
public void sendActionNoWait(final ManagerAction action)
{
CoherentManagerConnection.sendActionNoWait(action);
}
public String getVariable(Channel channel, String name)
{
return CoherentManagerConnection.getInstance().getVariable(channel, name);
}
public AsteriskVersion getVersion()
{
return CoherentManagerConnection.getInstance().getVersion();
}
public boolean isConnected()
{
return ((CoherentManagerConnection.managerConnection != null)
&& (CoherentManagerConnection.managerConnection.getState() == ManagerConnectionState.CONNECTED));
}
public boolean isMeetmeInstalled()
{
return MeetmeRoomControl.getInstance().isMeetmeInstalled();
}
@Override
public boolean isChannel(String channelName)
{
boolean isChannel = false;
try
{
internalRegisterChannel(channelName, ChannelImpl.UNKNOWN_UNIQUE_ID);
isChannel = true;
}
catch (InvalidChannelName e)
{
// if we get here then its not avalid channel name.
}
return isChannel;
}
static public String getSIPADDHeader(final boolean inherit, final boolean targetIsSIP)
{
String sipHeader = "SIPADDHEADER"; //$NON-NLS-1$
if (!targetIsSIP || inherit)
{
sipHeader = "__" + sipHeader; //$NON-NLS-1$
}
return sipHeader;
}
/**
* Waits for a channel to become quiescent. A Quiescent channel is one that
* is not in the middle of a name change (e.g. masquerade)
*
* @param channel
* @param timeout the time to wait (in milliseconds) for the channel to
* become quiescent.
*/
@Override
public boolean waitForChannelsToQuiescent(List<Channel> channels, long timeout)
{
while (timeout > 0 && !channelsAreQuiesent(channels))
{
try
{
Thread.sleep(200);
}
catch (InterruptedException e)
{
logger.error(e, e);
}
timeout -= 200;
}
return timeout > 0;
}
private boolean channelsAreQuiesent(List<Channel> channels)
{
boolean ret = true;
for (Channel channel : channels)
{
ret &= channel.isQuiescent();
}
return ret;
}
public boolean moveChannelToAgi(Channel channel) throws PBXException
{
if (!waitForChannelToQuiescent(channel, 3000))
throw new PBXException("Channel: " + channel + " cannot be transfered as it is still in transition.");
boolean isInAgi = channel.isInAgi();
if (!isInAgi)
{
final AsteriskSettings profile = PBXFactory.getActiveProfile();
channel.setCurrentActivityAction(new AgiChannelActivityHold());
final RedirectAction redirect = new RedirectAction(channel, profile.getManagementContext(), getExtensionAgi(),
1);
logger.error("Issuing redirect on channel " + channel + " to move it to the agi");
try
{
final ManagerResponse response = sendAction(redirect, 1000);
if ((response != null) && (response.getResponse().compareToIgnoreCase("success") == 0))//$NON-NLS-1$
{
int limit = 50;
while (!channel.isInAgi() && limit
{
Thread.sleep(100);
}
isInAgi = channel.isInAgi();
if (!isInAgi)
{
logger.error("Failed to move channel");
}
}
}
catch (final Exception e)
{
logger.error(e, e);
}
}
return isInAgi;
}
public void moveChannelTo(Channel channel, String context, String exten, int prio)
{
DialPlanExtension ext = this.buildDialPlanExtension(exten);
channel.setCurrentActivityAction(new AgiChannelActivityHold());
final RedirectAction redirect = new RedirectAction(channel, context, ext, prio);
try
{
sendAction(redirect, 1000);
}
catch (final Exception e)
{
logger.error(e, e);
}
}
@Override
public boolean waitForChannelToQuiescent(Channel channel, int timeout)
{
List<Channel> channels = new LinkedList<>();
channels.add(channel);
return waitForChannelsToQuiescent(channels, timeout);
}
public ChannelProxy getProxyById(String id)
{
return liveChannels.findProxyById(id);
}
public DialToAgiActivityImpl dialToAgi(EndPoint endPoint, CallerID callerID, AgiChannelActivityHold action,
ActivityCallback<DialToAgiActivity> iCallback)
{
final CompletionAdaptor<DialToAgiActivity> completion = new CompletionAdaptor<>();
final DialToAgiActivityImpl dialer = new DialToAgiActivityImpl(endPoint, callerID, false, completion, null, action);
completion.waitForCompletion(3, TimeUnit.MINUTES);
final ActivityStatusEnum status;
if (dialer.isSuccess())
{
status = ActivityStatusEnum.SUCCESS;
}
else
{
status = ActivityStatusEnum.FAILURE;
}
iCallback.progress(dialer, status, status.getDefaultMessage());
return dialer;
}
public String createAgiEntryPoint() throws IOException, AuthenticationFailedException, TimeoutException
{
try
{
AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX();
AsteriskSettings profile = PBXFactory.getActiveProfile();
String agi = profile.getAgiExtension();
pbx.addAsteriskExtension(agi, 1, "AGI(agi://127.0.0.1/activityAgi), into " + profile.getManagementContext());
pbx.addAsteriskExtension(agi, 2, "wait(0.5), into " + profile.getManagementContext());
pbx.addAsteriskExtension(agi, 3, "goto(" + agi + ",1), into " + profile.getManagementContext());
}
catch (Exception e)
{
logger.error(e);
return e.getLocalizedMessage();
}
return null;
}
public String addAsteriskExtension(String extNumber, int priority, String command) throws Exception
{
String ext = "dialplan add extension " + extNumber + "," + priority + "," + command;
CommandAction action = new CommandAction(ext);
CommandResponse response = (CommandResponse) sendAction(action, 30000);
List<String> line = response.getResult();
String answer = line.get(0);
String tmp = "Extension '" + extNumber + "," + priority + ",";
if (answer.substring(0, tmp.length()).compareToIgnoreCase(tmp) == 0)
return "OK";
throw new Exception("InitiateAction.AddExtentionFailed" + ext);
}
@Override
public Trunk buildTrunk(final String trunk)
{
return new Trunk()
{
@Override
public String getTrunkAsString()
{
return trunk;
}
};
}
}
|
package org.graphwalker.io.factory;
public class YEdParsingException extends RuntimeException {
public YEdParsingException(Throwable throwable) {
super(throwable);
}
public YEdParsingException(String message) {
super(message);
}
}
|
package org.jenkinsci.plugins.todos;
import java.io.Serializable;
import java.util.Map;
import org.jenkinsci.plugins.todos.model.TodosReport;
/**
* Display the report summary and top-level details.
*
* @author Michal Turek
*/
public class TodosReportSummary implements Serializable {
/** Serial version UID. */
private static final long serialVersionUID = 0;
/**
* Generate the report summary.
*
* @param report
* current report
* @param previous
* previous report
* @return a string with the summary
*/
public static String createReportSummary(TodosReport report,
TodosReport previous) {
StringBuilder builder = new StringBuilder();
if (report != null) {
builder.append("<a href=\"");
builder.append(TodosBuildAction.URL_NAME);
builder.append("\">");
builder.append(report.getCommentsCount());
if (previous != null) {
printDifference(report.getCommentsCount(),
previous.getCommentsCount(), builder);
}
builder.append(" ");
builder.append(Messages.Todos_ReportSummary_Comments());
builder.append("</a> ");
builder.append(Messages.Todos_ReportSummary_in());
builder.append(" ");
builder.append(report.getFiles().size());
if (previous != null) {
printDifference(report.getFiles().size(), previous.getFiles()
.size(), builder);
}
builder.append(" ");
builder.append(Messages.Todos_ReportSummary_Files());
builder.append(".");
}
return builder.toString();
}
/**
* Build summary details.
*
* @param report
* current report
* @param previous
* previous report
* @return a string with the summary details
*/
public static String createReportSummaryDetails(TodosReport report,
TodosReport previous) {
StringBuilder builder = new StringBuilder();
if (report != null) {
for (Map.Entry<String, Integer> entry : report
.getPatternsToCountMapping().entrySet()) {
builder.append("<li>");
builder.append("<a href=\"");
builder.append(TodosBuildAction.URL_NAME);
// TODO: constants, was /languageResult/
builder.append("/patternResult/");
builder.append(HtmlUtils.encodeUrl(entry.getKey()));
builder.append("\">");
builder.append(HtmlUtils.encodeText(entry.getKey(), true));
builder.append("</a>: ");
builder.append(entry.getValue());
if (previous != null) {
printDifference(
entry.getValue(),
previous.getCommentsWithPatternCount(entry.getKey()),
builder);
}
builder.append(" ");
builder.append(Messages.Todos_ReportSummary_Comments());
builder.append(" ");
builder.append(Messages.Todos_ReportSummary_in());
builder.append(" ");
builder.append(report.getFilesWithPattern(entry.getKey())
.size());
if (previous != null) {
printDifference(
report.getFilesWithPattern(entry.getKey()).size(),
previous.getFilesWithPattern(entry.getKey()).size(),
builder);
}
builder.append(" ");
builder.append(Messages.Todos_ReportSummary_Files());
builder.append(".</li>");
}
}
return builder.toString();
}
/**
* Print the formatted difference of two integers.
*
* @param current
* current value
* @param previous
* previous value
* @param builder
* string builder for output
*/
private static void printDifference(int current, int previous,
StringBuilder builder) {
int difference = current - previous;
if (difference == 0) {
return;
}
// Minus sign is part of the difference variable (negative number)
builder.append((difference > 0) ? " (+" : " (");
builder.append(difference);
builder.append(")");
}
}
|
package org.jspringbot.maven.plugin;
import org.apache.commons.io.IOUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.codehaus.plexus.util.StringUtils;
import org.robotframework.RobotFramework;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.StringTokenizer;
@Mojo(name = "acceptance-test", defaultPhase = LifecyclePhase.INTEGRATION_TEST, threadSafe = true, requiresDependencyResolution = ResolutionScope.TEST)
public class AcceptanceTestMojo extends AbstractMojoWithLoadedClasspath {
/**
* The directory where the test cases are located.
*/
@Parameter(defaultValue = "${project.basedir}/src/test/robotframework/acceptance")
private File testCasesDirectory;
/**
* Sets the name of the top-level tests suites.
*/
@Parameter
private String name;
/**
* Sets the documentation of the top-level tests suites.
*/
@Parameter
private String document;
/**
* Sets free metadata for the top level tests suites.
*/
@Parameter
private List<String> metadata;
/**
* Sets the tags(s) to all executed tests cases.
*/
@Parameter
private List<String> tags;
/**
* Selects the tests cases by name.
*/
@Parameter
private List<String> tests;
/**
* Selects the tests suites by name.
*/
@Parameter
private List<String> suites;
/**
* Selects the tests cases by tags.
*/
@Parameter
private List<String> includes;
/**
* Selects the tests cases by tags.
*/
@Parameter
private List<String> excludes;
/**
* Tests that have the given tags are considered critical.
*/
@Parameter
private List<String> criticalTags;
/**
* Tests that have the given tags are not critical.
*/
@Parameter
private List<String> nonCriticalTags;
/**
* Sets the execution mode for this tests run. Valid modes are ContinueOnFailure, ExitOnFailure,
* SkipTeardownOnExit, DryRun, and Random:<what>.
*/
@Parameter
private String runMode;
/**
* Sets individual variables. Use the format "name:value"
*/
@Parameter
private List<String> variables;
/**
* Sets variables using variables files. Use the format "path:args"
*/
@Parameter
private List<String> variableFiles;
/**
* Configures where generated reports are to be placed.
*/
@Parameter(defaultValue = "${project.build.directory}/robotframework-reports")
private File outputDirectory;
/**
* Sets the path to the generated output file.
*/
@Parameter
private File output;
/**
* Sets the path to the generated log file.
*/
@Parameter
private File log;
/**
* Sets the path to the generated report file.
*/
@Parameter
private File report;
/**
* Sets the path to the generated summary file.
*/
@Parameter
private File summary;
/**
* Sets the path to the generated XUnit compatible result file, relative to outputDirectory. The
* file is in xml format. By default, the file name is derived from the testCasesDirectory
* parameter, replacing blanks in the directory name by underscores.
*/
@Parameter
private File xunitFile;
/**
* A debug file that is written during execution.
*/
@Parameter
private File debugFile;
/**
* Adds a timestamp to all output files.
*/
@Parameter
private boolean timestampOutputs;
/**
* Splits output and log files.
*/
@Parameter
private String splitOutputs;
/**
* Sets a title for the generated tests log.
*/
@Parameter
private String logTitle;
/**
* Sets a title for the generated tests report.
*/
@Parameter
private String reportTitle;
/**
* Sets a title for the generated summary report.
*/
@Parameter
private String summaryTitle;
/**
* Sets background colors for the generated report and summary.
*/
@Parameter
private String reportBackground;
/**
* Sets the threshold level for logging.
*/
@Parameter
private String logLevel;
/**
* Defines how many levels to show in the Statistics by Suite table in outputs.
*/
@Parameter
private String suiteStatLevel;
/**
* Includes only these tags in the Statistics by Tag and Test Details by Tag tables in outputs.
*/
@Parameter
private List<String> tagStatIncludes;
/**
* Excludes these tags from the Statistics by Tag and Test Details by Tag tables in outputs.
*/
@Parameter
private List<String> tagStatExcludes;
/**
* Creates combined statistics based on tags. Use the format "tags:title"
*/
@Parameter
private List<String> combinedTagStats;
/**
* Adds documentation to the specified tags.
*/
@Parameter
private List<String> tagDocs;
/**
* Adds external links to the Statistics by Tag table in outputs. Use the format
* "pattern:link:title"
*/
@Parameter
private List<String> tagStatLinks;
/**
* Sets multiple listeners for monitoring tests execution. Use the format "ListenerWithArgs:arg1:arg2" or
* simply "ListenerWithoutArgs"
*/
@Parameter
private List<String> listeners;
/**
* Sets a single listener for monitoring tests execution, can also be set via commandline using
* -Dlistener=MyListener.
*/
@Parameter(defaultValue = "${listener}")
private String listener;
/**
* Show a warning when an invalid file is skipped.
*/
@Parameter
private boolean warnOnSkippedFiles;
/**
* Width of the monitor output. Default is 78.
*/
@Parameter
private String monitorWidth;
/**
* Using ANSI colors in console. Normally colors work in unixes but not in Windows. Default is
* 'on'.
* <ul>
* <li>'on' - use colors in unixes but not in Windows</li>
* <li>'off' - never use colors</li>
* <li>'force' - always use colors (also in Windows)</li>
* </ul>
* @since 1.1
*/
@Parameter
private String monitorColors;
/**
* Additional locations (directories, ZIPs, JARs) where to search test libraries from when they
* are imported. Maps to Jybot's --pythonpath option. Otherwise if no locations are declared,
* the default location is ${project.basedir}/src/test/resources/robotframework/libraries.
*
* @since 1.1
*/
@Parameter
private File[] extraPathDirectories;
/**
* The default location where extra packages will be searched. Effective if extraPath attribute
* is not used. Cannot be overridden.
*/
@Parameter(defaultValue = "${project.basedir}/src/test/resources/robotframework/libraries", required = true, readonly = true)
private File defaultExtraPath;
/**
* A text file to read more arguments from.
*/
@Parameter
private File argumentFile;
/**
* Skip tests. Bound to -DskipTests. This allows to skip acceptance tests together with all
* other tests.
* @since 1.1
*/
@Parameter(defaultValue = "${skipTests}")
private boolean skipTests;
/**
* Skip acceptance tests executed by this plugin. Bound to -DskipATs. This allows to run tests
* and integration tests, but no acceptance tests.
*
* @since 1.1
*/
@Parameter(defaultValue = "${skipATs}")
private boolean skipATs;
/**
* Skip acceptance tests executed by this plugin together with other integration tests, e.g.
* tests run by the maven-failsafe-plugin. Bound to -DskipITs
*
* @since 1.1
*/
@Parameter(defaultValue = "${skipITs}")
private boolean skipITs;
/**
* Skip tests, bound to -Dmaven.test.skip, which suppresses test compilation as well.
*
* @since 1.1
*/
@Parameter(defaultValue = "${maven.test.skip}")
private boolean skip = false;
/**
* Executes tests also if the top level test suite is empty. Useful e.g. with
* --include/--exclude when it is not an error that no test matches the condition.
* @since 1.1
*/
@Parameter
private boolean runEmptySuite;
/**
* If true, sets the return code to zero regardless of failures in test cases. Error codes are
* returned normally.
*
* @since 1.1
*/
@Parameter
private boolean noStatusReturnCode;
/**
* The default highlight css.
*/
@Parameter(defaultValue = "${highlightCss}")
private String highlightCss = "default";
protected void subclassExecute() throws MojoExecutionException, MojoFailureException {
if (shouldSkipTests()) {
getLog().info("RobotFramework tests are skipped.");
return;
}
String[] runArguments = generateRunArguments();
getLog().debug("robotframework arguments: " + StringUtils.join(runArguments, " "));
int robotRunReturnValue = RobotFramework.run(runArguments);
evaluateReturnCode(robotRunReturnValue);
generateCss();
}
private void generateCss() throws MojoExecutionException {
if(highlightCss == null) {
highlightCss = "default";
}
File cssFile = new File(outputDirectory, "highlight.css");
FileWriter out = null;
InputStream in = null;
try {
in = AcceptanceTestMojo.class.getResourceAsStream(String.format("/%s.css", highlightCss));
out = new FileWriter(cssFile);
IOUtils.copy(in, out);
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
} finally {
IOUtils.closeQuietly(in);
IOUtils.closeQuietly(out);
}
}
protected void evaluateReturnCode(int robotRunReturnValue)
// RC Explanation
// 0 All critical tests passed.
// 1-249 Returned number of critical tests failed.
// 250 250 or more critical failures.
// 251 Help or version information printed.
// 252 Invalid test data or command line options.
// 253 Test execution stopped by user.
// 255 Unexpected internal error.
throws MojoFailureException, MojoExecutionException {
switch (robotRunReturnValue) {
// case 250:
// throw new MojoFailureException( robotRunReturnValue
// + " or more critical test cases failed. Check the logs for details." );
// case 251:
// getLog().info( "Help or version information printed. No tests were executed." );
// break;
case 252:
writeXunitFileWithError("Invalid test data or command line options (Returncode 252).");
break;
case 255:
writeXunitFileWithError("Unexpected internal error (Returncode 255).");
break;
// case 253:
// getLog().info( "Test execution stopped by user." );
// break;
default:
}
}
private void writeXunitFileWithError(String message) {
try {
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
Document document = builder.newDocument();
// <testsuite errors="0" failures="5" tests="5" skip="0" name="Robot-Fail">
Element testsuite = document.createElement("testsuite");
testsuite.setAttribute("errors", "1");
testsuite.setAttribute("failures", "0");
testsuite.setAttribute("tests", "0");
testsuite.setAttribute("name", getTestSuiteName());
Element testcase = document.createElement("testcase");
testcase.setAttribute("classname", "ExecutionError");
testcase.setAttribute("name", message);
Element error = document.createElement("error");
error.setAttribute("message", message);
testcase.appendChild(error);
testsuite.appendChild(testcase);
document.appendChild(testsuite);
Transformer transformer = TransformerFactory.newInstance()
.newTransformer();
Source xmlSource = new DOMSource(document);
final File output;
output = makeAbsolute(outputDirectory, xunitFile);
Result outputTarget = new StreamResult(output);
transformer.transform(xmlSource, outputTarget);
} catch (RuntimeException ex) {
throw ex;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private String getTestSuiteName() {
final String testSuiteName;
if (name != null) {
testSuiteName = name;
} else {
String delim = " -_";
StringTokenizer tokenizer = new StringTokenizer(testCasesDirectory.getName(), delim, true);
StringBuilder sb = new StringBuilder();
while (tokenizer.hasMoreTokens()) {
String tokenOrDelim = tokenizer.nextToken();
if (delim.contains(tokenOrDelim)) {
sb.append(tokenOrDelim);
} else {
sb.append(StringUtils.capitalizeFirstLetter(tokenOrDelim));
}
}
testSuiteName = sb.toString();
}
return testSuiteName;
}
private boolean shouldSkipTests() {
return skipTests || skipITs || skipATs || skip;
}
private String[] generateRunArguments() {
ArrayList<String> generatedArguments = new ArrayList<String>();
// ignore 'all' value includes.
if(includes != null && includes.size() == 1 && includes.iterator().next().equals("all")) {
includes.clear();
}
// support for comma delimited
if(includes != null && includes.size() == 1) {
String include = includes.iterator().next();
if(include.contains(",")) {
includes.clear();
includes.addAll(Arrays.asList(StringUtils.split(include, ",")));
}
}
addFileToArguments(generatedArguments, outputDirectory, "-d");
addFileToArguments(generatedArguments, output, "-o");
addFileToArguments(generatedArguments, log, "-l");
addFileToArguments(generatedArguments, report, "-r");
addFileToArguments(generatedArguments, summary, "-S");
addFileToArguments(generatedArguments, debugFile, "-b");
addFileToArguments(generatedArguments, argumentFile, "-A");
addNonEmptyStringToArguments(generatedArguments, name, "-N");
addNonEmptyStringToArguments(generatedArguments, document, "-D");
addNonEmptyStringToArguments(generatedArguments, runMode, "--runmode");
addNonEmptyStringToArguments(generatedArguments, splitOutputs, "--splitoutputs");
addNonEmptyStringToArguments(generatedArguments, logTitle, "--logtitle");
addNonEmptyStringToArguments(generatedArguments, reportTitle, "--reporttitle");
addNonEmptyStringToArguments(generatedArguments, reportBackground, "--reportbackground");
addNonEmptyStringToArguments(generatedArguments, summaryTitle, "--summarytitle");
addNonEmptyStringToArguments(generatedArguments, logLevel, "-L");
addNonEmptyStringToArguments(generatedArguments, suiteStatLevel, "--suitestatlevel");
addNonEmptyStringToArguments(generatedArguments, monitorWidth, "--monitorwidth");
addNonEmptyStringToArguments(generatedArguments, monitorColors, "--monitorcolors");
addNonEmptyStringToArguments(generatedArguments, listener, "--listener");
addFlagToArguments(generatedArguments, runEmptySuite, "--runemptysuite");
addFlagToArguments(generatedArguments, noStatusReturnCode, "--nostatusrc");
addFlagToArguments(generatedArguments, timestampOutputs, "-T");
addFlagToArguments(generatedArguments, warnOnSkippedFiles, "--warnonskippedfiles");
addListToArguments(generatedArguments, metadata, "-M");
addListToArguments(generatedArguments, tags, "-G");
addListToArguments(generatedArguments, tests, "-t");
addListToArguments(generatedArguments, suites, "-s");
addListToArguments(generatedArguments, includes, "-i");
addListToArguments(generatedArguments, excludes, "-e");
addListToArguments(generatedArguments, criticalTags, "-c");
addListToArguments(generatedArguments, nonCriticalTags, "-n");
addListToArguments(generatedArguments, variables, "-v");
addListToArguments(generatedArguments, variableFiles, "-V");
addListToArguments(generatedArguments, tagStatIncludes, "--tagstatinclude");
addListToArguments(generatedArguments, tagStatExcludes, "--tagstatexclude");
addListToArguments(generatedArguments, combinedTagStats, "--tagstatcombine");
addListToArguments(generatedArguments, tagDocs, "--tagdoc");
addListToArguments(generatedArguments, tagStatLinks, "--tagstatlink");
addListToArguments(generatedArguments, listeners, "--listener");
if (extraPathDirectories == null) {
addFileToArguments(generatedArguments, defaultExtraPath, "-P");
} else {
addFileListToArguments(generatedArguments, Arrays.asList(extraPathDirectories), "-P");
}
if (xunitFile == null) {
String testCasesFolderName = testCasesDirectory.getName();
xunitFile = new File("TEST-" + testCasesFolderName.replace(' ', '_') + ".xml");
}
addFileToArguments(generatedArguments, xunitFile, "-x");
generatedArguments.add(testCasesDirectory.getPath());
return generatedArguments.toArray(new String[generatedArguments.size()]);
}
}
|
package org.littleshoot.proxy.mitm;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLSession;
import org.littleshoot.proxy.MitmManager;
/**
* {@link MitmManager} that uses the given host name to create a dynamic
* certificate for. If a port is given, it will be truncated.
*/
public class HostNameMitmManager implements MitmManager {
private BouncyCastleSslEngineSource sslEngineSource;
public HostNameMitmManager() throws RootCertificateException {
this(new Authority());
}
public HostNameMitmManager(Authority authority)
throws RootCertificateException {
try {
boolean trustAllServers = false;
boolean sendCerts = true;
sslEngineSource = new BouncyCastleSslEngineSource(authority,
trustAllServers, sendCerts);
} catch (final Exception e) {
throw new RootCertificateException(
"Errors during assembling root CA.", e);
}
}
public SSLEngine serverSslEngine() {
return sslEngineSource.newSslEngine();
}
public SSLEngine clientSslEngineFor(SSLSession serverSslSession,
String serverHostAndPort) {
try {
String serverName = serverHostAndPort.split(":")[0];
SubjectAlternativeNameHolder san = new SubjectAlternativeNameHolder();
return sslEngineSource.createCertForHost(serverName, san);
} catch (Exception e) {
throw new FakeCertificateException(
"Creation dynamic certificate failed for "
+ serverHostAndPort, e);
}
}
}
|
package org.opencds.cqf.providers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.cqframework.cql.cql2elm.CqlTranslator;
import org.cqframework.cql.elm.execution.UsingDef;
import org.hl7.fhir.dstu3.model.ActivityDefinition;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.dstu3.model.DomainResource;
import org.hl7.fhir.dstu3.model.Extension;
import org.hl7.fhir.dstu3.model.IdType;
import org.hl7.fhir.dstu3.model.Library;
import org.hl7.fhir.dstu3.model.Measure;
import org.hl7.fhir.dstu3.model.Parameters;
import org.hl7.fhir.dstu3.model.PlanDefinition;
import org.hl7.fhir.dstu3.model.Reference;
import org.hl7.fhir.dstu3.model.Resource;
import org.hl7.fhir.dstu3.model.StringType;
import org.hl7.fhir.dstu3.model.Type;
import org.opencds.cqf.config.STU3LibraryLoader;
import org.opencds.cqf.cql.data.DataProvider;
import org.opencds.cqf.cql.execution.Context;
import org.opencds.cqf.cql.terminology.TerminologyProvider;
import org.opencds.cqf.cql.terminology.fhir.FhirTerminologyProvider;
import org.opencds.cqf.helpers.FhirMeasureBundler;
import org.opencds.cqf.helpers.LibraryHelper;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.rp.dstu3.LibraryResourceProvider;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.client.api.ServerValidationModeEnum;
public class CqlExecutionProvider {
private JpaDataProvider provider;
public CqlExecutionProvider(JpaDataProvider provider) {
this.provider = provider;
}
private LibraryResourceProvider getLibraryResourceProvider() {
return (LibraryResourceProvider)provider.resolveResourceProvider("Library");
}
private List<Reference> cleanReferences(List<Reference> references) {
List<Reference> cleanRefs = new ArrayList<>();
List<Reference> noDupes = new ArrayList<>();
for (Reference reference : references) {
boolean dup = false;
for (Reference ref : noDupes) {
if (ref.equalsDeep(reference))
{
dup = true;
}
}
if (!dup) {
noDupes.add(reference);
}
}
for (Reference reference : noDupes) {
cleanRefs.add(
new Reference(
new IdType(
reference.getReferenceElement().getResourceType(),
reference.getReferenceElement().getIdPart().replace("
reference.getReferenceElement().getVersionIdPart()
)
)
);
}
return cleanRefs;
}
private Iterable<Reference> getLibraryReferences(DomainResource instance) {
List<Reference> references = new ArrayList<>();
if (instance.hasContained()) {
for (Resource resource : instance.getContained()) {
if (resource instanceof Library) {
resource.setId(resource.getIdElement().getIdPart().replace("
getLibraryResourceProvider().getDao().update((Library) resource);
// getLibraryLoader().putLibrary(resource.getIdElement().getIdPart(), getLibraryLoader().toElmLibrary((Library) resource));
}
}
}
if (instance instanceof ActivityDefinition) {
references.addAll(((ActivityDefinition)instance).getLibrary());
}
else if (instance instanceof PlanDefinition) {
references.addAll(((PlanDefinition)instance).getLibrary());
}
else if (instance instanceof Measure) {
references.addAll(((Measure)instance).getLibrary());
}
for (Extension extension : instance.getExtensionsByUrl("http://hl7.org/fhir/StructureDefinition/cqif-library"))
{
Type value = extension.getValue();
if (value instanceof Reference) {
references.add((Reference)value);
}
else {
throw new RuntimeException("Library extension does not have a value of type reference");
}
}
return cleanReferences(references);
}
private String buildIncludes(Iterable<Reference> references) {
StringBuilder builder = new StringBuilder();
for (Reference reference : references) {
if (builder.length() > 0) {
builder.append(" ");
}
builder.append("include ");
// TODO: This assumes the libraries resource id is the same as the library name, need to work this out better
builder.append(reference.getReferenceElement().getIdPart());
if (reference.getReferenceElement().getVersionIdPart() != null) {
builder.append(" version '");
builder.append(reference.getReferenceElement().getVersionIdPart());
builder.append("'");
}
builder.append(" called ");
builder.append(reference.getReferenceElement().getIdPart());
}
return builder.toString();
}
/* Evaluates the given CQL expression in the context of the given resource */
/* If the resource has a library extension, or a library element, that library is loaded into the context for the expression */
public Object evaluateInContext(DomainResource instance, String cql, String patientId) {
Iterable<Reference> libraries = getLibraryReferences(instance);
// Provide the instance as the value of the '%context' parameter, as well as the value of a parameter named the same as the resource
// This enables expressions to access the resource by root, as well as through the %context attribute
String source = String.format("library LocalLibrary using FHIR version '3.0.0' include FHIRHelpers version '3.0.0' called FHIRHelpers %s parameter %s %s parameter \"%%context\" %s define Expression: %s",
buildIncludes(libraries), instance.fhirType(), instance.fhirType(), instance.fhirType(), cql);
// String source = String.format("library LocalLibrary using FHIR version '1.8' include FHIRHelpers version '1.8' called FHIRHelpers %s parameter %s %s parameter \"%%context\" %s define Expression: %s",
// buildIncludes(libraries), instance.fhirType(), instance.fhirType(), instance.fhirType(), cql);
STU3LibraryLoader libraryLoader = LibraryHelper.createLibraryLoader(this.getLibraryResourceProvider());
org.cqframework.cql.elm.execution.Library library = LibraryHelper.translateLibrary(source, libraryLoader.getLibraryManager(), libraryLoader.getModelManager());
Context context = new Context(library);
context.setParameter(null, instance.fhirType(), instance);
context.setParameter(null, "%context", instance);
context.setExpressionCaching(true);
context.registerLibraryLoader(libraryLoader);
context.setContextValue("Patient", patientId);
context.registerDataProvider("http://hl7.org/fhir", provider);
return context.resolveExpressionRef("Expression").evaluate(context);
}
private TerminologyProvider getTerminologyProvider(String url, String user, String pass)
{
if (url != null) {
// TODO: Change to cache-value-sets
return new FhirTerminologyProvider()
.withBasicAuth(user, pass)
.setEndpoint(url, false);
}
else return provider.getTerminologyProvider();
}
private DataProvider getDataProvider(String model, String version)
{
if (model.equals("FHIR") && version.equals("3.0.0"))
{
FhirContext fhirContext = provider.getFhirContext();
fhirContext.getRestfulClientFactory().setServerValidationMode(ServerValidationModeEnum.NEVER);
provider.setFhirContext(fhirContext);
return provider;
}
else if (model.equals("QDM") && version.equals("5.4"))
{
return new Qdm54DataProvider();
}
throw new IllegalArgumentException("Could not resolve data provider for data model: " + model + " using version: " + version);
}
@Operation(name = "$cql")
public Bundle evaluate(
@OperationParam(name= "code") String code,
@OperationParam(name= "patientId") String patientId,
@OperationParam(name= "terminologyServiceUri") String terminologyServiceUri,
@OperationParam(name= "terminologyUser") String terminologyUser,
@OperationParam(name= "terminologyPass") String terminologyPass,
@OperationParam(name= "parameters") Parameters parameters
)
{
CqlTranslator translator;
FhirMeasureBundler bundler = new FhirMeasureBundler();
STU3LibraryLoader libraryLoader = LibraryHelper.createLibraryLoader(this.getLibraryResourceProvider());
try {
translator = LibraryHelper.getTranslator(code, libraryLoader.getLibraryManager(), libraryLoader.getModelManager());
}
catch (IllegalArgumentException iae) {
Parameters result = new Parameters();
result.setId("translation-error");
result.addParameter().setName("value").setValue(new StringType(iae.getMessage()));
return bundler.bundle(Arrays.asList(result));
}
Map<String, List<Integer>> locations = getLocations(translator.getTranslatedLibrary().getLibrary());
org.cqframework.cql.elm.execution.Library library = LibraryHelper.translateLibrary(translator);
Context context = new Context(library);
TerminologyProvider terminologyProvider = getTerminologyProvider(terminologyServiceUri, terminologyUser, terminologyPass);
DataProvider dataProvider;
for (UsingDef using : library.getUsings().getDef())
{
if (using.getLocalIdentifier().equals("System")) continue;
dataProvider = getDataProvider(using.getLocalIdentifier(), using.getVersion());
if (dataProvider instanceof JpaDataProvider)
{
((JpaDataProvider) dataProvider).setTerminologyProvider(terminologyProvider);
((JpaDataProvider) dataProvider).setExpandValueSets(true);
context.registerDataProvider("http://hl7.org/fhir", provider);
context.registerLibraryLoader(libraryLoader);
context.registerTerminologyProvider(terminologyProvider);
}
else
{
((Qdm54DataProvider) dataProvider).setTerminologyProvider(terminologyProvider);
context.registerDataProvider("urn:healthit-gov:qdm:v5_4", dataProvider);
context.registerLibraryLoader(libraryLoader);
context.registerTerminologyProvider(terminologyProvider);
}
}
if (parameters != null)
{
for (Parameters.ParametersParameterComponent pc : parameters.getParameter())
{
context.setParameter(library.getLocalId(), pc.getName(), pc.getValue());
}
}
List<Resource> results = new ArrayList<>();
if (library.getStatements() != null) {
for (org.cqframework.cql.elm.execution.ExpressionDef def : library.getStatements().getDef()) {
context.enterContext(def.getContext());
if (patientId != null && !patientId.isEmpty()) {
context.setContextValue(context.getCurrentContext(), patientId);
}
else {
context.setContextValue(context.getCurrentContext(), "null");
}
Parameters result = new Parameters();
try {
result.setId(def.getName());
String location = String.format("[%d:%d]", locations.get(def.getName()).get(0), locations.get(def.getName()).get(1));
result.addParameter().setName("location").setValue(new StringType(location));
Object res = def instanceof org.cqframework.cql.elm.execution.FunctionDef ? "Definition successfully validated" : def.getExpression().evaluate(context);
if (res == null) {
result.addParameter().setName("value").setValue(new StringType("null"));
}
else if (res instanceof List) {
if (((List) res).size() > 0 && ((List) res).get(0) instanceof Resource) {
result.addParameter().setName("value").setResource(bundler.bundle((Iterable)res));
}
else {
result.addParameter().setName("value").setValue(new StringType(res.toString()));
}
}
else if (res instanceof Iterable) {
result.addParameter().setName("value").setResource(bundler.bundle((Iterable)res));
}
else if (res instanceof Resource) {
result.addParameter().setName("value").setResource((Resource)res);
}
else {
result.addParameter().setName("value").setValue(new StringType(res.toString()));
}
result.addParameter().setName("resultType").setValue(new StringType(resolveType(res)));
}
catch (RuntimeException re) {
result.addParameter().setName("error").setValue(new StringType(re.getMessage()));
re.printStackTrace();
}
results.add(result);
}
}
return bundler.bundle(results);
}
private Map<String, List<Integer>> getLocations(org.hl7.elm.r1.Library library) {
Map<String, List<Integer>> locations = new HashMap<>();
if (library.getStatements() == null) return locations;
for (org.hl7.elm.r1.ExpressionDef def : library.getStatements().getDef()) {
int startLine = def.getTrackbacks().isEmpty() ? 0 : def.getTrackbacks().get(0).getStartLine();
int startChar = def.getTrackbacks().isEmpty() ? 0 : def.getTrackbacks().get(0).getStartChar();
List<Integer> loc = Arrays.asList(startLine, startChar);
locations.put(def.getName(), loc);
}
return locations;
}
private String resolveType(Object result) {
String type = result == null ? "Null" : result.getClass().getSimpleName();
switch (type) {
case "BigDecimal": return "Decimal";
case "ArrayList": return "List";
case "FhirBundleCursor": return "Retrieve";
}
return type;
}
String s = "library BreastCancerScreening version '7.2.000' using QDM version '5.3' include MATGlobalCommonFunctions_QDM version '2.0.000' called Global include AdultOutpatientEncounters_QDM version '1.1.000' called AdultOutpatientEncounters include Hospice_QDM version '1.0.000' called Hospice valueset \"ONC Administrative Sex\": 'urn:oid:2.16.840.1.113762.1.4.1' valueset \"Race\": 'urn:oid:2.16.840.1.114222.4.11.836' valueset \"Ethnicity\": 'urn:oid:2.16.840.1.114222.4.11.837' valueset \"Payer\": 'urn:oid:2.16.840.1.114222.4.11.3591' valueset \"Bilateral Mastectomy\": 'urn:oid:2.16.840.1.113883.3.464.1003.198.12.1005' valueset \"Female\": 'urn:oid:2.16.840.1.113883.3.560.100.2' valueset \"Mammography\": 'urn:oid:2.16.840.1.113883.3.464.1003.108.12.1018' valueset \"Unilateral Mastectomy\": 'urn:oid:2.16.840.1.113883.3.464.1003.198.12.1020' valueset \"History of bilateral mastectomy\": 'urn:oid:2.16.840.1.113883.3.464.1003.198.12.1068' valueset \"Status Post Left Mastectomy\": 'urn:oid:2.16.840.1.113883.3.464.1003.198.12.1069' valueset \"Status Post Right Mastectomy\": 'urn:oid:2.16.840.1.113883.3.464.1003.198.12.1070' valueset \"Left\": 'urn:oid:2.16.840.1.113883.3.464.1003.122.12.1036' valueset \"Right\": 'urn:oid:2.16.840.1.113883.3.464.1003.122.12.1035' valueset \"Unilateral Mastectomy, Unspecified Laterality\": 'urn:oid:2.16.840.1.113883.3.464.1003.198.12.1071' parameter \"Measurement Period\" Interval<DateTime> context Patient /* define \"SDE Ethnicity\": \t[\"Patient Characteristic Ethnicity\": \"Ethnicity\"] define \"SDE Payer\": \t[\"Patient Characteristic Payer\": \"Payer\"] define \"SDE Race\": \t[\"Patient Characteristic Race\": \"Race\"] define \"SDE Sex\": \t[\"Patient Characteristic Sex\": \"ONC Administrative Sex\"] */ define \"Denominator\": \ttrue /* define \"Unilateral Mastectomy Procedure\": \t[\"Procedure, Performed\": \"Unilateral Mastectomy\"] UnilateralMastectomyProcedure \t\twhere UnilateralMastectomyProcedure.relevantPeriod ends before end of \"Measurement Period\" */ define \"Unilateral Mastectomy Procedure\": \t[\"Procedure, Performed\": \"Unilateral Mastectomy\"] UnilateralMastectomyProcedure \t\twhere UnilateralMastectomyProcedure.relevantPeriod ends before day of end of \"Measurement Period\" /* define \"Right Mastectomy\": \t( [\"Diagnosis\": \"Status Post Right Mastectomy\"] \t\tunion ( [\"Diagnosis\": \"Unilateral Mastectomy, Unspecified Laterality\"] UnilateralMastectomyDiagnosis \t\t\t\twhere UnilateralMastectomyDiagnosis.anatomicalLocationSite in \"Right\" \t\t) ) RightMastectomy \t\twhere RightMastectomy.prevalencePeriod starts before end of \"Measurement Period\" */ define \"Right Mastectomy\": \t( [\"Diagnosis\": \"Status Post Right Mastectomy\"] \t\tunion ( [\"Diagnosis\": \"Unilateral Mastectomy, Unspecified Laterality\"] UnilateralMastectomyDiagnosis \t\t\t\twhere UnilateralMastectomyDiagnosis.anatomicalLocationSite in \"Right\" \t\t) ) RightMastectomy \t\twhere RightMastectomy.prevalencePeriod starts before day of end of \"Measurement Period\" /* define \"Left Mastectomy\": \t( [\"Diagnosis\": \"Status Post Left Mastectomy\"] \t\tunion ( [\"Diagnosis\": \"Unilateral Mastectomy, Unspecified Laterality\"] UnilateralMastectomyDiagnosis \t\t\t\twhere UnilateralMastectomyDiagnosis.anatomicalLocationSite in \"Left\" \t\t) ) LeftMastectomy \t\twhere LeftMastectomy.prevalencePeriod starts before end of \"Measurement Period\" */ define \"Left Mastectomy\": \t( [\"Diagnosis\": \"Status Post Left Mastectomy\"] \t\tunion ( [\"Diagnosis\": \"Unilateral Mastectomy, Unspecified Laterality\"] UnilateralMastectomyDiagnosis \t\t\t\twhere UnilateralMastectomyDiagnosis.anatomicalLocationSite in \"Left\" \t\t) ) LeftMastectomy \t\twhere LeftMastectomy.prevalencePeriod starts before day of end of \"Measurement Period\" /* define \"History Bilateral Mastectomy\": \t[\"Diagnosis\": \"History of bilateral mastectomy\"] BilateralMastectomyHistory \t\twhere BilateralMastectomyHistory.prevalencePeriod starts before end of \"Measurement Period\" */ define \"History Bilateral Mastectomy\": \t[\"Diagnosis\": \"History of bilateral mastectomy\"] BilateralMastectomyHistory \t\twhere BilateralMastectomyHistory.prevalencePeriod starts before day of end of \"Measurement Period\" /* define \"Bilateral Mastectomy Procedure\": \t[\"Procedure, Performed\": \"Bilateral Mastectomy\"] BilateralMastectomyPerformed \t\twhere BilateralMastectomyPerformed.relevantPeriod ends before end of \"Measurement Period\" */ define \"Bilateral Mastectomy Procedure\": \t[\"Procedure, Performed\": \"Bilateral Mastectomy\"] BilateralMastectomyPerformed \t\twhere BilateralMastectomyPerformed.relevantPeriod ends before day of end of \"Measurement Period\" define \"Numerator\": \texists ( [\"Diagnostic Study, Performed\": \"Mammography\"] Mammogram \t\t\twhere ( Mammogram.relevantPeriod ends 27 months or less before day of end \"Measurement Period\" ) \t) define \"Denominator Exclusions\": \tHospice.\"Has Hospice\" \t\tor ( Count(\"Unilateral Mastectomy Procedure\") = 2 ) \t\tor ( exists \"Right Mastectomy\" \t\t\t\tand exists \"Left Mastectomy\" \t\t) \t\tor exists \"History Bilateral Mastectomy\" \t\tor exists \"Bilateral Mastectomy Procedure\" define \"Initial Population\": \texists ( [\"Patient Characteristic Sex\": \"Female\"] ) \t\tand exists [\"Patient Characteristic Birthdate\"] BirthDate \t\t\twhere Global.\"CalendarAgeInYearsAt\"(BirthDate.birthDatetime, start of \"Measurement Period\") in Interval[51, 74] \t\t\t\tand exists AdultOutpatientEncounters.\"Qualifying Encounters\"";
}
|
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
public class Bulk {
public String Id;
public String Name;
public Double QuantityPerEach;
public String QuantityUnitPerEach;
public String FormulaBenchMark;
public Double ManualMaxFill;
public Integer Each;
public Double Scrap;
public String AppearanceShadBench;
public String ReferenceCode;
public Double MaxFill;
public String RDHierarchyId;
public RDHierarchy RDHierarchy;
public String NPDId;
public NPD npd;
public ArrayList<BOMHeader> listBOMH;
public Bulk(ResultSet rs) {
LoadData(rs);
}
public Bulk(String id)
{
ResultSet rs = DataManager.Query("SELECT * FROM salesforce.Formula__c where SFID='" + id + "'");
try {
if(rs.next())
{
LoadData(rs);
}
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void LoadData(ResultSet rs)
{
try {
Id = rs.getString("SFID");
Name = rs.getString("name");
QuantityPerEach = rs.getDouble("Quantity_per_Each__c");
QuantityUnitPerEach = rs.getString("quantity_unit_per_each__c");
FormulaBenchMark = rs.getString("formula_benchmark__c");
ManualMaxFill = rs.getDouble("max_fill_manual__c");
Each = rs.getInt("each__c");
Scrap = rs.getDouble("scrap__c");
AppearanceShadBench = rs.getString("appearance_shade_benchmarks__c");
ReferenceCode = rs.getString("reference_code__c");
RDHierarchyId = rs.getString("rd_hierarchy__c");
RDHierarchy rdh = new RDHierarchy(RDHierarchyId);
linkRDHierarchy(rdh);
NPDId = rs.getString("NPD__c");
listBOMH = new ArrayList<BOMHeader>();
LoadBOMHeader();
} catch(SQLException e) {
e.getMessage();
}
}
public void linkRDHierarchy(RDHierarchy rdHi)
{
RDHierarchy = rdHi;
if (ManualMaxFill == 0)
{
if (RDHierarchy.MachineType.equals("Skincare"))
{
MaxFill = QuantityPerEach * (QuantityUnitPerEach.equals("ML") ? RDHierarchy.SGMax : 1);
MaxFill += (MaxFill < 31 ? 2 : 4);
}
else
{
MaxFill = QuantityPerEach * (QuantityUnitPerEach.equals("ML") ? RDHierarchy.SGMax : 1) + 0.6;
}
}
else
{
MaxFill = ManualMaxFill;
}
}
private void LoadBOMHeader()
{
try
{
ResultSet rs = DataManager.Query("SELECT * FROM salesforce.BOM_Header__c where Formula__c = '" + Id + "'");
while (rs.next())
{
BOMHeader newBOMHeader = new BOMHeader(rs);
newBOMHeader.linkBulk(this);
listBOMH
.add(newBOMHeader);
}
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void linkNPD(NPD newNPD)
{
if(newNPD.Id.equals(NPDId))
{
npd = newNPD;
}
}
public String getData()
{
String output = "Name: " + Name + ", Quantity per Each" + QuantityPerEach + ", MaxFill: " + MaxFill + "<br/>";
output += "RD Hierarchy: " + RDHierarchyId + ", SG: " + RDHierarchy.SGMax + "<br/>";
for(BOMHeader bomH : listBOMH)
{
output += "BOM Number: " + bomH.Name + ", Status: " + bomH.Status + ", Base Unit: " + bomH.BaseUnit + "<br/>";
for(BOMDetail bomD : bomH.listBOMDetail)
{
output += " - Material: " + bomD.material.MaterialCode + ", Percent: " + bomD.Percent + ", Scrap: " + bomD.Scrap + ", Plant: " + bomD.Plant + "<br/>";
}
}
return output;
}
}
|
package org.redisson;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.description.field.FieldDescription;
import net.bytebuddy.description.field.FieldList;
import net.bytebuddy.dynamic.DynamicType;
import net.bytebuddy.dynamic.loading.ClassLoadingStrategy;
import net.bytebuddy.implementation.MethodDelegation;
import net.bytebuddy.implementation.bind.annotation.FieldProxy;
import net.bytebuddy.matcher.ElementMatchers;
import org.redisson.core.RExpirable;
import org.redisson.core.RExpirableAsync;
import org.redisson.core.RMap;
import org.redisson.core.RObject;
import org.redisson.core.RObjectAsync;
import org.redisson.liveobject.CodecProvider;
import org.redisson.liveobject.LiveObjectTemplate;
import org.redisson.liveobject.RAttachedLiveObjectService;
import org.redisson.liveobject.RLiveObject;
import org.redisson.liveobject.annotation.REntity;
import org.redisson.liveobject.annotation.RId;
import org.redisson.liveobject.core.AccessorInterceptor;
import org.redisson.liveobject.core.ExpirableInterceptor;
import org.redisson.liveobject.core.LiveObjectInterceptor;
import org.redisson.liveobject.misc.Introspectior;
public class RedissonAttachedLiveObjectService implements RAttachedLiveObjectService {
private final Map<Class, Class> classCache;
private final RedissonClient redisson;
private final CodecProvider codecProvider;
public RedissonAttachedLiveObjectService(RedissonClient redisson, Map<Class, Class> classCache, CodecProvider codecProvider) {
this.redisson = redisson;
this.classCache = classCache;
this.codecProvider = codecProvider;
}
//TODO: Support ID Generator
//TODO: Add ttl renewal functionality
@Override
public <T, K> T get(Class<T> entityClass, K id, long timeToLive, TimeUnit timeUnit) {
T instance = get(entityClass, id);
RMap map = ((RLiveObject) instance).getLiveObjectLiveMap();
map.put("RLiveObjectDefaultTimeToLiveValue", timeToLive);
map.put("RLiveObjectDefaultTimeToLiveUnit", timeUnit.toString());
map.expire(timeToLive, timeUnit);
return instance;
}
@Override
public <T, K> T get(Class<T> entityClass, K id) {
try {
T instance;
try {
instance = getProxyClass(entityClass).newInstance();
} catch (Exception exception) {
instance = getProxyClass(entityClass).getDeclaredConstructor(id.getClass()).newInstance(id);
}
((RLiveObject) instance).setLiveObjectId(id);
return instance;
} catch (Exception ex) {
unregisterClass(entityClass);
throw new RuntimeException(ex);
}
}
private <T, K> Class<? extends T> getProxyClass(Class<T> entityClass) throws Exception {
if (!classCache.containsKey(entityClass)) {
registerClass(entityClass);
}
return classCache.get(entityClass);
}
private <T, K> void registerClass(Class<T> entityClass) throws Exception {
if (entityClass.isAnonymousClass() || entityClass.isLocalClass()) {
throw new IllegalArgumentException(entityClass.getName() + " is not publically accessable.");
}
if (!entityClass.isAnnotationPresent(REntity.class)) {
throw new IllegalArgumentException("REntity annotation is missing from class type declaration.");
}
FieldList<FieldDescription.InDefinedShape> fieldsWithRIdAnnotation
= Introspectior.getFieldsWithAnnotation(entityClass, RId.class);
if (fieldsWithRIdAnnotation.size() == 0) {
throw new IllegalArgumentException("RId annotation is missing from class field declaration.");
}
if (fieldsWithRIdAnnotation.size() > 1) {
throw new IllegalArgumentException("Only one field with RId annotation is allowed in class field declaration.");
}
FieldDescription.InDefinedShape idField = fieldsWithRIdAnnotation.getOnly();
String idFieldName = idField.getName();
if (entityClass.getDeclaredField(idFieldName).getType().isAnnotationPresent(REntity.class)) {
throw new IllegalArgumentException("Field with RId annotation cannot be a type of which class is annotated with REntity.");
}
if (entityClass.getDeclaredField(idFieldName).getType().isAssignableFrom(RObject.class)) {
throw new IllegalArgumentException("Field with RId annotation cannot be a type of RObject");
}
DynamicType.Builder<T> builder = new ByteBuddy()
.subclass(entityClass);
for (FieldDescription.InDefinedShape field
: Introspectior.getTypeDescription(LiveObjectTemplate.class)
.getDeclaredFields()) {
builder = builder.define(field);
}
Class<? extends T> loaded = builder.method(ElementMatchers.isDeclaredBy(
Introspectior.getTypeDescription(RLiveObject.class))
.and(ElementMatchers.isGetter().or(ElementMatchers.isSetter())))
.intercept(MethodDelegation.to(new LiveObjectInterceptor(redisson, codecProvider, entityClass, idFieldName))
.appendParameterBinder(FieldProxy.Binder
.install(LiveObjectInterceptor.Getter.class, LiveObjectInterceptor.Setter.class)))
.implement(RLiveObject.class)
.method(ElementMatchers.isDeclaredBy(RExpirable.class)
.or(ElementMatchers.isDeclaredBy(RExpirableAsync.class))
.or(ElementMatchers.isDeclaredBy(RObject.class))
.or(ElementMatchers.isDeclaredBy(RObjectAsync.class)))
.intercept(MethodDelegation.to(ExpirableInterceptor.class))
.implement(RExpirable.class)
.method(ElementMatchers.not(ElementMatchers.isDeclaredBy(Object.class))
.and(ElementMatchers.not(ElementMatchers.isDeclaredBy(RLiveObject.class)))
.and(ElementMatchers.not(ElementMatchers.isDeclaredBy(RExpirable.class)))
.and(ElementMatchers.not(ElementMatchers.isDeclaredBy(RExpirableAsync.class)))
.and(ElementMatchers.not(ElementMatchers.isDeclaredBy(RObject.class)))
.and(ElementMatchers.not(ElementMatchers.isDeclaredBy(RObjectAsync.class)))
.and(ElementMatchers.isGetter()
.or(ElementMatchers.isSetter()))
.and(ElementMatchers.isPublic()))
.intercept(MethodDelegation.to(
new AccessorInterceptor(redisson, codecProvider)))
.make().load(getClass().getClassLoader(),
ClassLoadingStrategy.Default.WRAPPER)
.getLoaded();
classCache.putIfAbsent(entityClass, loaded);
}
public void unregisterClass(Class cls) {
classCache.remove(cls.isAssignableFrom(RLiveObject.class) ? cls.getSuperclass() : cls);
}
/**
* @return the codecProvider
*/
public CodecProvider getCodecProvider() {
return codecProvider;
}
}
|
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.*;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.*;
import com.twilio.sdk.verbs.TwiMLResponse;
import com.twilio.sdk.verbs.TwiMLException;
import com.twilio.sdk.verbs.Say;
public class Main extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
if (req.getRequestURI().endsWith("/db")) {
showDatabase(req,resp);
} else if (req.getRequestURI().endsWith("/away")) {
showAway(req,resp);
} else if (req.getRequestURI().endsWith("/twilio")) {
twilioService(req,resp);
} else {
showHome(req,resp);
}
}
private void showHome(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.getWriter().print("Hello from Java!");
}
private void showAway(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.getWriter().print("this is away stuff");
}
public void twilioService(HttpServletRequest request, HttpServletResponse response)
throws IOException {
// Create a TwiML response and add our friendly message.
TwiMLResponse twiml = new TwiMLResponse();
Say say = new Say("Hello Twilio Monkey");
try {
twiml.append(say);
} catch (TwiMLException e) {
e.printStackTrace();
}
response.setContentType("application/xml");
response.getWriter().print(twiml.toXML());
}
private void showDatabase(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
try {
Connection connection = getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
String out = "Hello!\n";
while (rs.next()) {
out += "Read from DB: " + rs.getTimestamp("tick") + "\n";
}
resp.getWriter().print(out);
} catch (Exception e) {
resp.getWriter().print("There was an error: " + e.getMessage());
}
}
private Connection getConnection() throws URISyntaxException, SQLException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + dbUri.getPath();
return DriverManager.getConnection(dbUrl, username, password);
}
public static void main(String[] args) throws Exception{
Server server = new Server(Integer.valueOf(System.getenv("PORT")));
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
server.setHandler(context);
|
import java.security.MessageDigest;
import java.sql.*;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.QueryParamsMap;
import spark.Session;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
import spark.utils.StringUtils;
public class Main
{
public static void main(String[] args)
{
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello World");
get("/", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
User user = request.session().attribute("User");
if (user != null)
{
attributes.put("userName", user.getName());
}
return new ModelAndView(attributes, "index.ftl");
}, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try
{
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next())
{
output.add("Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e)
{
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally
{
if (connection != null) try
{
connection.close();
} catch (SQLException e)
{
}
}
}, new FreeMarkerEngine());
post("/login", (request, response) -> {
String email = request.queryMap().get("email").value();
String password = hashSha256(request.queryMap().get("password").value());
Map<String, Object> attributes = new HashMap<>();
Connection connection = null;
try
{
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS societies (\n" +
"ID BIGSERIAL PRIMARY KEY,\n" +
"name varchar(255),\n" +
"email varchar(255),\n" +
"password text);");
// stmt.executeUpdate("INSERT INTO " +
// "societies (name, email, password) " +
// "VALUES ('Some society', 'several27@icloud.com', '5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8')");
PreparedStatement pS = connection.prepareStatement("SELECT ID, name, email " +
"FROM societies " +
"WHERE email = ? AND password = ?");
pS.setString(1, email);
pS.setString(2, password);
ResultSet rs = pS.executeQuery();
User user = null;
while (rs.next())
{
user = new User(rs.getInt("ID"), rs.getString("email"), rs.getString("name"));
}
if (user != null)
{
request.session().attribute("User", user);
attributes.put("successMessage", user.getName() + " login correctly !");
attributes.put("userName", user.getName());
}
else
{
attributes.put("errorMessage", "Wrong email or password");
}
return new ModelAndView(attributes, "index.ftl");
} catch (Exception e)
{
attributes.put("errorMessage", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally
{
if (connection != null) try
{
connection.close();
} catch (SQLException e)
{
}
}
}, new FreeMarkerEngine());
get("/findFreeTime", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
User user = request.session().attribute("User");
if (user == null)
{
response.redirect("/");
return new ModelAndView(attributes, "error.ftl");
}
attributes.put("userName", user.getName());
QueryParamsMap date = request.queryMap().get("date");
QueryParamsMap dateSubmit = request.queryMap().get("date_submit");
String dateS = "", dateSubmitS = "";
try
{
dateS = date.value();
dateSubmitS = dateSubmit.value();
}
catch (Exception e)
{
LocalDateTime now = LocalDateTime.now();
dateS = now.format(DateTimeFormatter.ofPattern("yyyy/MM/DD"));
dateSubmitS = now.format(DateTimeFormatter.ofPattern("MMMM dd, yyyy"));
}
StringBuilder labels = new StringBuilder();
for (int i = 0; i < 24; i++)
{
labels.append("\"");
if (i < 10) labels.append("0");
labels.append(i);
labels.append(":00");
labels.append("\"");
labels.append(",");
}
StringBuilder data = new StringBuilder();
for (int i = 0; i < 24; i++)
{
data.append(i);
data.append(", ");
}
attributes.put("labels", labels.toString());
attributes.put("data", data.toString());
attributes.put("date", " asd as" );
attributes.put("dateSubmit", "asdsad");
attributes.put("activeMenu", "");
return new ModelAndView(attributes, "findFreeTime.ftl");
}, new FreeMarkerEngine());
// get("/signup", (request, response) -> {
// Map<String, Object> attributes = new HashMap<>();
// attributes.put("message", "Hello World!");
// return new ModelAndView(attributes, "index.ftl");
// }, new FreeMarkerEngine());
}
private static String hashSha256(String toHash)
{
try
{
MessageDigest md = MessageDigest.getInstance("SHA-256");
md.update(toHash.getBytes());
byte byteData[] = md.digest();
StringBuffer sb = new StringBuffer();
for (int i = 0; i < byteData.length; i++) {
sb.append(Integer.toString((byteData[i] & 0xff) + 0x100, 16).substring(1));
}
return sb.toString();
}
catch (Exception e) { }
return "";
}
}
|
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
/*get("/hello", (req, res) -> "Hello World");
get("/bye", (req, res) -> "Bye World");
*/
get("/", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
attributes.put("message", "Hello World!");
return new ModelAndView(attributes, "index.ftl");
}, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
}
}
|
package policycompass.fcmmanager.controllers;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.ws.rs.*;
import javax.ws.rs.core.*;
import com.sun.jersey.api.client.*;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.criterion.Projections;
import policycompass.fcmmanager.hibernate.HibernateUtil;
import policycompass.fcmmanager.models.*;;
public class FCMModels {
private final static ADHOCRACY_URL = "http://adhocracy-frontend-stage.policycompass.eu/api";
private final static ADHOCRACY_GODS_URL = "http://adhocracy-frontend-stage.policycompass.eu/api/principals/groups/gods/";
public static FCMModelDetail retrieveFCMModel(String userPath, String userToken, int id) {
FCMModelDetail model = new FCMModelDetail(userPath, id);
return model;
}
public static List<FCMModel> retrieveFCMModelList() {
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
Query query = session.createQuery("from fcmmanager_models");
@SuppressWarnings("unchecked")
List<FCMModel> model = query.list();
session.clear();
session.close();
return model;
}
public static FCMModelDetail createFCMModel(String userPath, String userToken, JSONObject jsonModel) {
int adminUserFlag=isAdminUser(userPath,userToken);
if(adminUserFlag<0)
return null;
FCMModel model = new FCMModel();
List<FCMConcept> concept = new ArrayList<FCMConcept>();
List<FCMConnection> connection = new ArrayList<FCMConnection>();
Date date1 = new Date();
int modelID = getFCMModelID();
int conceptID = getConceptID();
int connectionID = getConnectionID();
try {
model.setId(modelID);
model.setTitle(jsonModel.getJSONObject("data").get("ModelTitle").toString());
model.setDescription(jsonModel.getJSONObject("data").get("ModelDesc").toString());
model.setKeywords(jsonModel.getJSONObject("data").get("ModelKeywords").toString());
model.setUserID(Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
model.setDateAddedtoPC(date1);
model.setDateModified(date1);
model.setUserPath(userPath);
model.setViewsCount(0);
JSONArray concepts = jsonModel.getJSONObject("data").getJSONArray("concepts");
for (int i = 0; i < concepts.length(); i++) {
FCMConcept con = new FCMConcept();
JSONObject ob = concepts.getJSONObject(i);
String ConID = ob.getString("Id");
if (ConID.substring(0, 1).compareTo("n") == 0)
con.setId(conceptID + Integer.parseInt(ConID.substring(1, ConID.length())));
else
con.setId(conceptID + Integer.parseInt(ConID));
con.setFCMModelID(modelID);
con.setTitle(ob.getString("title"));
con.setDescription(ob.getString("description"));
con.setScale(ob.getInt("scale"));
con.setPositionX(ob.getInt("x"));
con.setPositionY(ob.getInt("y"));
con.setDateAddedtoPC(date1);
con.setDateModified(date1);
con.setUserID(Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
con.setViewsCount(0);
concept.add(con);
}
JSONArray connections = jsonModel.getJSONObject("data").getJSONArray("connections");
String SourceID;
String DestinationID;
for (int i = 0; i < connections.length(); i++) {
FCMConnection con = new FCMConnection();
JSONObject ob = connections.getJSONObject(i);
con.setFCMModelID(modelID);
String AssID = ob.getString("Id");
if (AssID.substring(0, 1).compareTo("e") == 0)
con.setId(connectionID + Integer.parseInt(AssID.substring(1, AssID.length())));
else
con.setId(connectionID + Integer.parseInt(AssID));
SourceID = ob.getString("sourceID");
DestinationID = ob.getString("destinationID");
if (SourceID.substring(0, 1).compareTo("n") == 0) {
con.setConceptFrom(conceptID + Integer.parseInt(SourceID.substring(1, SourceID.length())));
con.setWeight(ob.getString("weight"));
} else {
con.setConceptFrom(conceptID + Integer.parseInt(SourceID));
con.setWeight(ob.getString("weighted"));
}
if (DestinationID.substring(0, 1).compareTo("n") == 0) {
con.setConceptTo(conceptID + Integer.parseInt(DestinationID.substring(1, DestinationID.length())));
} else {
con.setConceptTo(conceptID + Integer.parseInt(DestinationID));
}
// con.setConceptFrom(conceptID+Integer.parseInt(ob.getJSONObject("source").getString("Id").substring(1)));
// con.setConceptTo(conceptID+Integer.parseInt(ob.getJSONObject("destination").getString("Id").substring(1)));
con.setDateAddedtoPC(date1);
con.setDateModified(date1);
con.setUserID(Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
con.setViewsCount(0);
connection.add(con);
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
session.save(model);
for (int i = 0; i < concept.size(); i++) {
session.save(concept.get(i));
}
for (int i = 0; i < connection.size(); i++) {
session.save(connection.get(i));
}
session.getTransaction().commit();
session.clear();
session.close();
return (retrieveFCMModel(userPath, userToken, modelID));
}
public static FCMModelDetail updateFCMModel(int id, JSONObject jsonModel, String userPath,String userToken) {
int adminUserFlag=isAdminUser( userPath, userToken);
List<FCMConcept> concept = new ArrayList<FCMConcept>();
List<FCMConnection> connection = new ArrayList<FCMConnection>();
Date date1 = new Date();
JSONArray concepts = null;
JSONArray connections = null;
Boolean Found = false;
int conceptID = getConceptID();
int connectionID = getConnectionID();
try {
concepts = jsonModel.getJSONObject("data").getJSONArray("concepts");
for (int i = 0; i < concepts.length(); i++) {
FCMConcept con = new FCMConcept();
JSONObject ob = concepts.getJSONObject(i);
String ConID = ob.getString("Id");
if (ConID.substring(0, 1).compareTo("n") == 0) {
con.setFCMModelID(id);
con.setId(conceptID + Integer.parseInt(ConID.substring(1, ConID.length())));
con.setTitle(ob.getString("title"));
con.setDescription(ob.getString("description"));
con.setScale(ob.getInt("scale"));
con.setPositionX(ob.getInt("x"));
con.setPositionY(ob.getInt("y"));
con.setDateAddedtoPC(date1);
con.setDateModified(date1);
con.setUserID(Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
con.setViewsCount(0);
concept.add(con);
}
}
connections = jsonModel.getJSONObject("data").getJSONArray("connections");
for (int i = 0; i < connections.length(); i++) {
FCMConnection con = new FCMConnection();
JSONObject ob = connections.getJSONObject(i);
String AssID = ob.getString("Id");
String SourceID;
String DestinationID;
if (AssID.substring(0, 1).compareTo("e") == 0) {
SourceID = ob.getString("sourceID");
DestinationID = ob.getString("destinationID");
con.setFCMModelID(id);
con.setId(connectionID + Integer.parseInt(AssID.substring(1, AssID.length())));
if (SourceID.substring(0, 1).compareTo("n") == 0) {
con.setConceptFrom(conceptID + Integer.parseInt(SourceID.substring(1, SourceID.length())));
} else {
con.setConceptFrom(Integer.parseInt(SourceID));
}
if (DestinationID.substring(0, 1).compareTo("n") == 0) {
con.setConceptTo(
conceptID + Integer.parseInt(DestinationID.substring(1, DestinationID.length())));
} else {
con.setConceptTo(Integer.parseInt(DestinationID));
}
con.setWeight(ob.getString("weight"));
con.setDateAddedtoPC(date1);
con.setDateModified(date1);
con.setUserID(Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
con.setViewsCount(0);
connection.add(con);
}
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
Query qModel;
if(adminUserFlag==1)
qModel = session.createQuery("from fcmmanager_models where id= :id");
else
qModel = session.createQuery("from fcmmanager_models where id= :id and userPath=:userPath");
qModel.setInteger("id", id);
if(adminUserFlag!=1)
qModel.setString("userPath", userPath);
FCMModel model = (FCMModel) qModel.uniqueResult();
if(model == null){
session.getTransaction().rollback();
session.clear();
session.close();
return (retrieveFCMModel("", "", 0));
}
model.setDateModified(date1);
session.update(model);
Query qConcept = session.createQuery("from fcmmanager_concepts where FCMModel_id= :id");
qConcept.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConcept> conceptdb = qConcept.list();
Query qConnection = session.createQuery("from fcmmanager_connections where FCMModel_id= :id");
qConnection.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConnection> connectiondb = qConnection.list();
try {
for (int i = 0; i < conceptdb.size(); i++) {
for (int j = 0; j < concepts.length(); j++) {
JSONObject ob = concepts.getJSONObject(j);
String ConID = ob.getString("Id");
if (ConID.substring(0, 1).compareTo("n") != 0) {
if (conceptdb.get(i).getId() == Integer.parseInt(ConID)) {
conceptdb.get(i).setTitle(ob.getString("title"));
conceptdb.get(i).setDescription(ob.getString("description"));
conceptdb.get(i).setScale(ob.getInt("scale"));
conceptdb.get(i).setPositionX(ob.getInt("x"));
conceptdb.get(i).setPositionY(ob.getInt("y"));
conceptdb.get(i).setDateModified(date1);
conceptdb.get(i).setUserID(
Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
Found = true;
}
}
}
if (Found == false) {
session.delete(conceptdb.get(i));
} else {
session.update(conceptdb.get(i));
Found = false;
}
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
for (int i = 0; i < connectiondb.size(); i++) {
for (int j = 0; j < connections.length(); j++) {
JSONObject ob = connections.getJSONObject(j);
String AssID = ob.getString("Id");
String SourceID;
String DestinationID;
if (AssID.substring(0, 1).compareTo("e") != 0) {
if (connectiondb.get(i).getId() == Integer.parseInt(AssID)) {
SourceID = ob.getString("sourceID");
DestinationID = ob.getString("destinationID");
if (SourceID.substring(0, 1).compareTo("n") == 0) {
connectiondb.get(i).setConceptFrom(
conceptID + Integer.parseInt(SourceID.substring(1, SourceID.length())));
} else {
connectiondb.get(i).setConceptFrom(Integer.parseInt(SourceID));
}
if (DestinationID.substring(0, 1).compareTo("n") == 0) {
connectiondb.get(i).setConceptTo(conceptID
+ Integer.parseInt(DestinationID.substring(1, DestinationID.length())));
} else {
connectiondb.get(i).setConceptTo(Integer.parseInt(DestinationID));
}
connectiondb.get(i).setWeight(ob.getString("weighted"));
connectiondb.get(i).setDateModified(date1);
connectiondb.get(i).setUserID(
Integer.parseInt(jsonModel.getJSONObject("data").get("userID").toString()));
Found = true;
}
}
}
if (Found == false) {
session.delete(connectiondb.get(i));
} else {
session.update(connectiondb.get(i));
Found = false;
}
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
for (int i = 0; i < concept.size(); i++) {
session.save(concept.get(i));
}
for (int i = 0; i < connection.size(); i++) {
session.save(connection.get(i));
}
session.getTransaction().commit();
session.clear();
session.close();
return (retrieveFCMModel("", "", id));
// return(rtnStr);
}
public static void deleteFCMModel(int id) {
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
Query qModel = session.createQuery("from fcmmanager_models where id= :id");
qModel.setInteger("id", id);
FCMModel model = (FCMModel) qModel.uniqueResult();
Query qConcept = session.createQuery("from fcmmanager_concepts where FCMModel_id= :id");
qConcept.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConcept> concept = qConcept.list();
Query qConnection = session.createQuery("from fcmmanager_connections where FCMModel_id= :id");
qConnection.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConnection> connection = qConnection.list();
session.delete(model);
for (int i = 0; i < concept.size(); i++) {
session.delete(concept.get(i));
}
for (int i = 0; i < connection.size(); i++) {
session.delete(connection.get(i));
}
session.getTransaction().commit();
session.clear();
session.close();
}
public static int getFCMModelID() {
int modelID = 0;
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
Criteria criteria = session.createCriteria(FCMModel.class).setProjection(Projections.max("id"));
if (criteria.uniqueResult() == null) {
modelID = 0;
} else {
modelID = (Integer) criteria.uniqueResult();
}
session.clear();
session.close();
return (modelID + 1);
}
public static int getConceptID() {
int conceptID = 0;
Session session = HibernateUtil.getSessionFactory().openSession();
Criteria criteria = session.createCriteria(FCMConcept.class).setProjection(Projections.max("id"));
if (criteria.uniqueResult() == null) {
conceptID = 0;
} else {
conceptID = (Integer) criteria.uniqueResult();
}
session.clear();
session.close();
return (conceptID + 1);
}
public static int getConnectionID() {
int connectionID = 0;
Session session = HibernateUtil.getSessionFactory().openSession();
Criteria criteria = session.createCriteria(FCMConnection.class).setProjection(Projections.max("id"));
if (criteria.uniqueResult() == null) {
connectionID = 0;
} else {
connectionID = (Integer) criteria.uniqueResult();
}
session.clear();
session.close();
return (connectionID + 1);
}
public static FCMModel LoadData() {
FCMConceptActivator[] activator = new FCMConceptActivator[8];
FCMModel[] model = new FCMModel[2];
FCMConcept[] concept = new FCMConcept[29];
FCMConnection[] connection = new FCMConnection[40];
Date date1 = new Date();
for (int i = 0; i < 8; i++) {
activator[i] = new FCMConceptActivator();
}
activator[0].setTitle("Cauchy Activator");
activator[1].setTitle("Gaussian Activator");
activator[2].setTitle("Hyperbolic Tangent Activator");
activator[3].setTitle("Interval Activator");
activator[4].setTitle("Linear Activator");
activator[5].setTitle("Nary Activator");
activator[6].setTitle("Sigmoid Activator");
activator[7].setTitle("Signum Activator");
for (int i = 0; i < 8; i++) {
activator[i].setId(i + 1);
activator[i].setUserID(1);
activator[i].setDateAddedtoPC(date1);
activator[i].setDateModified(date1);
activator[i].setViewsCount(0);
}
for (int i = 0; i < 2; i++) {
model[i] = new FCMModel();
}
for (int i = 0; i < 29; i++) {
concept[i] = new FCMConcept();
}
for (int i = 0; i < 40; i++) {
connection[i] = new FCMConnection();
}
int modelID = getFCMModelID();
int conceptID = getConceptID();
int connectionID = getConnectionID();
model[0].setTitle("Cambridgeshire County Council Field Trial");
model[0].setDescription(
"This is an example fuzzy cognitive map (FCM) of the policy model for the proposed CCC field trial.");
model[0].setKeywords(
"Community Learning, Skill, Deprivation, Unemployment, Current Availability of Learning Provision, Historical Data, Local Knowledge of Stakeholders, Facilities");
model[1].setTitle("Leningrad Region Field Trial");
model[1].setDescription(
"This is an example fuzzy cognitive map (FCM) of the policy model for the proposed Leningrad Region field trial.");
model[1].setKeywords("");
for (int i = 0; i < 2; i++) {
model[i].setId(modelID + i);
model[i].setUserID(1);
model[i].setDateAddedtoPC(date1);
model[i].setDateModified(date1);
model[i].setViewsCount(0);
}
concept[0].setTitle("Proposal 1");
concept[1].setTitle("Proposal 2");
concept[2].setTitle("Proposal n");
concept[3].setTitle("Number of programme specialized for IT beginer");
concept[4].setTitle("Ratio of disadvantage learners");
concept[5].setTitle("Number of programme to social security beneficiary");
concept[6].setTitle("Personalised learning");
concept[7].setTitle("Digital literacy");
concept[8].setTitle("Social renewal");
concept[9].setTitle("Financial literacy");
concept[10].setTitle("Strong community");
concept[11].setTitle("Social inclusion");
concept[12].setTitle("Health and well being");
concept[13].setTitle("Aspiration");
concept[14].setTitle("Employment skills");
concept[15].setTitle("Vulnerable group");
concept[16].setTitle("Quality of life");
concept[17].setTitle("Initiative 1 (e-workflow introduction)");
concept[18].setTitle("Initiative 2 (number of e-services)");
concept[19].setTitle("Initiative 3 (Penetration rate of broadband nets)");
concept[20].setTitle("Initiative 4 (citizen’s e-skills improvement)");
concept[21].setTitle("Government spending");
concept[22].setTitle("Speed of public services delivery");
concept[23].setTitle("Level of public services accessibility");
concept[24].setTitle("Spending on economic development");
concept[25].setTitle("Expenditure on social protection");
concept[26].setTitle("Level of citizens satisfaction with the activities of the authorities");
concept[27].setTitle("Regional GDP");
concept[28].setTitle("Quality of life");
for (int i = 0; i < 29; i++) {
if (i < 17) {
concept[i].setFCMModelID(modelID);
} else {
concept[i].setFCMModelID(modelID + 1);
}
concept[i].setId(conceptID + i);
concept[i].setDescription("");
concept[i].setScale(5);
concept[i].setPositionX((i + 1) * 100);
concept[i].setPositionY((i + 1) * 100);
concept[i].setUserID(1);
concept[i].setDateAddedtoPC(date1);
concept[i].setDateModified(date1);
concept[i].setViewsCount(0);
}
connection[0].setConceptFrom(conceptID - 1 + 1);
connection[0].setConceptTo(conceptID - 1 + 4);
connection[1].setConceptFrom(conceptID - 1 + 1);
connection[1].setConceptTo(conceptID - 1 + 5);
connection[2].setConceptFrom(conceptID - 1 + 2);
connection[2].setConceptTo(conceptID - 1 + 5);
connection[3].setConceptFrom(conceptID - 1 + 3);
connection[3].setConceptTo(conceptID - 1 + 11);
connection[4].setConceptFrom(conceptID - 1 + 4);
connection[4].setConceptTo(conceptID - 1 + 8);
connection[5].setConceptFrom(conceptID - 1 + 5);
connection[5].setConceptTo(conceptID - 1 + 10);
connection[6].setConceptFrom(conceptID - 1 + 6);
connection[6].setConceptTo(conceptID - 1 + 10);
connection[7].setConceptFrom(conceptID - 1 + 7);
connection[7].setConceptTo(conceptID - 1 + 14);
connection[8].setConceptFrom(conceptID - 1 + 7);
connection[8].setConceptTo(conceptID - 1 + 15);
connection[9].setConceptFrom(conceptID - 1 + 8);
connection[9].setConceptTo(conceptID - 1 + 9);
connection[10].setConceptFrom(conceptID - 1 + 8);
connection[10].setConceptTo(conceptID - 1 + 16);
connection[11].setConceptFrom(conceptID - 1 + 9);
connection[11].setConceptTo(conceptID - 1 + 17);
connection[12].setConceptFrom(conceptID - 1 + 10);
connection[12].setConceptTo(conceptID - 1 + 9);
connection[13].setConceptFrom(conceptID - 1 + 10);
connection[13].setConceptTo(conceptID - 1 + 16);
connection[14].setConceptFrom(conceptID - 1 + 11);
connection[14].setConceptTo(conceptID - 1 + 16);
connection[15].setConceptFrom(conceptID - 1 + 11);
connection[15].setConceptTo(conceptID - 1 + 17);
connection[16].setConceptFrom(conceptID - 1 + 12);
connection[16].setConceptTo(conceptID - 1 + 11);
connection[17].setConceptFrom(conceptID - 1 + 12);
connection[17].setConceptTo(conceptID - 1 + 17);
connection[18].setConceptFrom(conceptID - 1 + 13);
connection[18].setConceptTo(conceptID - 1 + 17);
connection[19].setConceptFrom(conceptID - 1 + 11);
connection[19].setConceptTo(conceptID - 1 + 9);
connection[20].setConceptFrom(conceptID - 1 + 15);
connection[20].setConceptTo(conceptID - 1 + 14);
connection[21].setConceptFrom(conceptID - 1 + 18);
connection[21].setConceptTo(conceptID - 1 + 22);
connection[22].setConceptFrom(conceptID - 1 + 18);
connection[22].setConceptTo(conceptID - 1 + 23);
connection[23].setConceptFrom(conceptID - 1 + 19);
connection[23].setConceptTo(conceptID - 1 + 22);
connection[24].setConceptFrom(conceptID - 1 + 19);
connection[24].setConceptTo(conceptID - 1 + 23);
connection[25].setConceptFrom(conceptID - 1 + 19);
connection[25].setConceptTo(conceptID - 1 + 24);
connection[26].setConceptFrom(conceptID - 1 + 20);
connection[26].setConceptTo(conceptID - 1 + 22);
connection[27].setConceptFrom(conceptID - 1 + 20);
connection[27].setConceptTo(conceptID - 1 + 23);
connection[28].setConceptFrom(conceptID - 1 + 20);
connection[28].setConceptTo(conceptID - 1 + 24);
connection[29].setConceptFrom(conceptID - 1 + 21);
connection[29].setConceptTo(conceptID - 1 + 24);
connection[30].setConceptFrom(conceptID - 1 + 22);
connection[30].setConceptTo(conceptID - 1 + 25);
connection[31].setConceptFrom(conceptID - 1 + 22);
connection[31].setConceptTo(conceptID - 1 + 26);
connection[32].setConceptFrom(conceptID - 1 + 23);
connection[32].setConceptTo(conceptID - 1 + 27);
connection[33].setConceptFrom(conceptID - 1 + 24);
connection[33].setConceptTo(conceptID - 1 + 27);
connection[34].setConceptFrom(conceptID - 1 + 25);
connection[34].setConceptTo(conceptID - 1 + 28);
connection[35].setConceptFrom(conceptID - 1 + 26);
connection[35].setConceptTo(conceptID - 1 + 29);
connection[36].setConceptFrom(conceptID - 1 + 27);
connection[36].setConceptTo(conceptID - 1 + 29);
connection[37].setConceptFrom(conceptID - 1 + 28);
connection[37].setConceptTo(conceptID - 1 + 26);
connection[38].setConceptFrom(conceptID - 1 + 28);
connection[38].setConceptTo(conceptID - 1 + 27);
connection[39].setConceptFrom(conceptID - 1 + 28);
connection[39].setConceptTo(conceptID - 1 + 29);
for (int i = 0; i < 40; i++) {
if (i < 21) {
connection[i].setFCMModelID(modelID);
} else {
connection[i].setFCMModelID(modelID + 1);
}
connection[i].setId(connectionID + i);
connection[i].setWeight("?");
connection[i].setUserID(1);
connection[i].setDateAddedtoPC(date1);
connection[i].setDateModified(date1);
connection[i].setViewsCount(0);
}
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
for (int i = 0; i < 8; i++) {
session.save(activator[i]);
}
for (int i = 0; i < 2; i++) {
session.save(model[i]);
}
for (int i = 0; i < 29; i++) {
session.save(concept[i]);
}
for (int i = 0; i < 40; i++) {
session.save(connection[i]);
}
session.getTransaction().commit();
session.clear();
session.close();
return model[0];
}
public static List<FCMModel> retrieveFCMModelsListByMetrics(int id) {
List<Integer> ModelId = new ArrayList<Integer>();
List<FCMModel> model = new ArrayList<FCMModel>();
Session session = HibernateUtil.getSessionFactory().openSession();
Query qConcept = session.createQuery("from fcmmanager_concepts where metric_id= :id");
qConcept.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConcept> concept = qConcept.list();
for (int i = 0; i < concept.size(); i++) {
ModelId.add(concept.get(i).getFCMModelID());
}
if (concept.size() > 0) {
Query query = session.createQuery("from fcmmanager_models where id in ( :mId)");
query.setParameterList("mId", ModelId);
model = (List<FCMModel>) query.list();
}
session.clear();
session.close();
return model;
}
public static List<FCMModel> retrieveFCMModelsListByDatasets(int id) {
List<Integer> ModelId = new ArrayList<Integer>();
List<FCMModel> model = new ArrayList<FCMModel>();
Session session = HibernateUtil.getSessionFactory().openSession();
Query qConcept = session.createQuery("from fcmmanager_concepts where metric_id= :id");
qConcept.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConcept> concept = qConcept.list();
for (int i = 0; i < concept.size(); i++) {
ModelId.add(concept.get(i).getFCMModelID());
}
if (concept.size() > 0) {
Query query = session.createQuery("from fcmmanager_models where id in ( :mId)");
query.setParameterList("mId", ModelId);
model = query.list();
}
session.clear();
session.close();
return model;
}
public static List<FCMModel> retrieveFCMModelsListByIndividuals(int id) {
List<Integer> ModelId = new ArrayList<Integer>();
List<Integer> ConceptId = new ArrayList<Integer>();
List<FCMModel> model = new ArrayList<FCMModel>();
Session session = HibernateUtil.getSessionFactory().openSession();
session.beginTransaction();
Query qConceptIndividuals = session.createQuery("from fcmmanager_conceptindividuals where Individual_id= :id");
qConceptIndividuals.setInteger("id", id);
@SuppressWarnings("unchecked")
List<FCMConceptIndividual> conceptIndividuals = qConceptIndividuals.list();
for (int i = 0; i < conceptIndividuals.size(); i++) {
ConceptId.add(conceptIndividuals.get(i).getConceptID());
}
if (conceptIndividuals.size() > 0) {
Query qConcept = session.createQuery("from fcmmanager_concepts where id in ( :id)");
qConcept.setParameterList("id", ConceptId);
@SuppressWarnings("unchecked")
List<FCMConcept> concept = qConcept.list();
for (int i = 0; i < concept.size(); i++) {
ModelId.add(concept.get(i).getFCMModelID());
}
Query query = session.createQuery("from fcmmanager_models where id in ( :mId)");
query.setParameterList("mId", ModelId);
model = query.list();
}
session.clear();
session.close();
return model;
}
public static int isAdminUser(String userPath,String userToken)
{
int returnValue=-1;
String charset = "UTF-8";
String requestURL = "https://adhocracy-frontend-stage.policycompass.eu/api";
String requestURLGod = "https://adhocracy-frontend-stage.policycompass.eu/api/principals/groups/gods/";
try {
URL url = new URL(requestURL);
HttpURLConnection httpConn = (HttpURLConnection) url.openConnection();
httpConn.setUseCaches(false);
httpConn.setDoOutput(true); // indicates POST method
httpConn.setDoInput(true);
httpConn.setRequestProperty("X-User-Path", userPath);
httpConn.setRequestProperty("X-User-Token", userToken);
List<String> response = new ArrayList<String>();
int status = httpConn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK) {
BufferedReader reader = new BufferedReader(new InputStreamReader(
httpConn.getInputStream()));
String line = null;
while ((line = reader.readLine()) != null) {
response.add(line);
}
reader.close();
httpConn.disconnect();
if(response.toString().toLowerCase().contains("error")){
returnValue=-1;
throw new IOException("Invalid User." );
}
else{
returnValue=0;
url = new URL(requestURLGod);
httpConn = (HttpURLConnection) url.openConnection();
httpConn.setUseCaches(false);
httpConn.setDoOutput(true); // indicates POST method
httpConn.setDoInput(true);
httpConn.setRequestProperty("X-User-Path", userPath);
httpConn.setRequestProperty("X-User-Token", userToken);
response = new ArrayList<String>();
status = httpConn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK) {
reader = new BufferedReader(new InputStreamReader(
httpConn.getInputStream()));
line = null;
while ((line = reader.readLine()) != null) {
response.add(line);
}
reader.close();
httpConn.disconnect();
if(response.toString().toLowerCase().contains("error")){
returnValue=0;
}
else{
returnValue=1;
}
}
}
} else {
throw new IOException("Server returned non-OK status: " + status);
}
} catch (IOException ex) {
ex.printStackTrace();
}
return returnValue;
}
}
|
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello Deep Kandpal JI");
get("/", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
attributes.put("message", "Hello World!");
return new ModelAndView(attributes, "index.ftl");
}, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
URI dbUri = new URI(System.getenv("USERS_DB_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath();
connection= DriverManager.getConnection(dbUrl, username, password);
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
}
}
|
import java.io.BufferedReader;
import java.io.Console;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import joptsimple.OptionSet;
import org.clafer.ast.AstClafer;
import org.clafer.ast.AstModel;
import org.clafer.collection.Triple;
import org.clafer.compiler.ClaferCompiler;
import org.clafer.compiler.ClaferOptimizer;
import org.clafer.compiler.ClaferSolver;
import org.clafer.compiler.ClaferUnsat;
import org.clafer.instance.InstanceClafer;
import org.clafer.instance.InstanceModel;
import org.clafer.javascript.Javascript;
import org.clafer.objective.Objective;
import org.clafer.scope.Scope;
public class REPL {
private static int instanceID = 0; // id of an instance previously been generated
private static int optimalInstanceID = 0; // id of an instance previously been generated
public static void runREPL(File inputFile, OptionSet options) throws Exception {
String commandExit = "q";
String commandNext = "n";
String commandReload = "r";
String commandScopeGlobal = "globalScope";
String commandScopeInt = "maxInt";
String commandScopeIndividual = "scope";
String commandMinUnsat = "minUnsat";
String commandUnsatCore = "unsatCore";
String commandListScopes = "saveScopes";
String commandSooMode = "sooMode";
String commandScopeIncGlobal = "incGlobalScope";
String commandScopeIncIndividual = "incScope";
String scopesFile = inputFile.getAbsolutePath().substring(0, inputFile.getAbsolutePath().length() - 3) + ".cfr-scope";
// Running the model itself(instantiating)
Triple<AstModel, Scope, Objective[]> modelTripple = null;
try{
modelTripple = Javascript.readModel(inputFile);
}
catch(Exception e)
{
if (e.getMessage().indexOf("ReferenceError: \"string\" is not defined.") >= 0)
{
System.out.println("The model contains string clafers, which are currently not supported in Choco-based IG.\nPlease press \"Quit\" or \"Stop\" to exit this instance generator and try another one.");
}
else
{
System.out.println("Unhandled compilation error occured. Please report this problem.");
System.out.println(e.getMessage());
}
String s = "";
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
while(!(s = br.readLine()).equals(commandExit))
{
}
return;
}
AstModel model = modelTripple.getFst();
Scope scope = modelTripple.getSnd();
if (options.has("scope"))
{
scope = scope.toBuilder().defaultScope((int) options.valueOf("scope")).toScope();
}
if (options.has("maxint"))
{
int scopeHigh = (int)options.valueOf("maxint");
int scopeLow = -(scopeHigh + 1);
scope = scope.toBuilder().intLow(scopeLow).intHigh(scopeHigh).toScope();
}
else
{
/* setting the default int range */
int scopeHighDef = 127;
int scopeLowDef = -(scopeHighDef + 1);
scope = scope.toBuilder().intLow(scopeLowDef).intHigh(scopeHighDef).toScope();
}
Mode currentMode = Mode.IG; // start with IG mode
ClaferSolver solver = null;
ClaferOptimizer optimizer = null;
solver = compileModel(model, scope);
if (solver != null)
{
nextInstance(solver);
}
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
String s = "";
while(!(s = br.readLine()).equals(commandExit))
{
s = s.trim();
String commandParts[] = s.split(" ");
if (commandParts.length == 0)
{
System.out.println("Empty Command");
continue;
}
String command = commandParts[0];
if (currentMode == Mode.IG) // normal mode
{
if (command.equals(commandNext)) // next instance
{
if (solver == null)
{
solver = compileModel(model, scope);
}
nextInstance(solver);
}
else if (command.equals(commandMinUnsat)) // unsat
{
System.out.println("Min UNSAT command:");
ClaferUnsat unsat = ClaferCompiler.compileUnsat(model, scope);
// Print the Min-Unsat and near-miss example.
System.out.println(unsat.minUnsat());
}
else if (command.equals(commandUnsatCore)) // reloading
{
System.out.println("UNSAT Core command:");
ClaferUnsat unsat = ClaferCompiler.compileUnsat(model, scope);
// Print the Min-Unsat and near-miss example.
System.out.println(unsat.unsatCore());
}
else if (command.equals(commandReload)) // reloading
{
solver = compileModel(model, scope);
if (solver == null)
{
System.out.println("Could not reload");
}
else
{
System.out.println("Reset");
}
}
else if (command.equals(commandScopeGlobal))
{
System.out.println("Global scope: " + s);
if (commandParts.length != 2)
{
System.out.println("The format of the command is: '" + commandScopeGlobal + " <integer>'");
System.out.println("Given: '" + s + "'");
continue;
}
int scopeValue;
try{
scopeValue = Integer.parseInt(commandParts[1]);
}
catch(Exception e)
{
System.out.println("The scope has to be an integer number. Given '" + commandParts[1] + "'");
continue;
}
scope = scope.toBuilder().defaultScope(scopeValue).toScope();
solver = compileModel(model, scope);
if (solver != null)
System.out.println("Model is ready after the scope change");
}
else if (command.equals(commandScopeIncGlobal))
{
System.out.println("Increase global scope: " + s);
if (commandParts.length != 2)
{
System.out.println("The format of the command is: '" + commandScopeIncGlobal + " <integer>'");
System.out.println("Given: '" + s + "'");
continue;
}
int scopeValue;
try{
scopeValue = Integer.parseInt(commandParts[1]);
}
catch(Exception e)
{
System.out.println("The scope has to be an integer number. Given '" + commandParts[1] + "'");
continue;
}
scope = scope.toBuilder().adjustDefaultScope(scopeValue).toScope();
solver = compileModel(model, scope);
if (solver != null)
System.out.println("Model is ready after the scope change");
}
else if (command.equals(commandScopeInt))
{
System.out.println("Max Integer: " + s);
if (commandParts.length != 2)
{
System.out.println("The format of the command is: '" + commandScopeInt + " <integer>'");
System.out.println("Given: '" + s + "'");
continue;
}
int scopeHigh;
try{
scopeHigh = Integer.parseInt(commandParts[1]);
}
catch(Exception e)
{
System.out.println("Expected integer numbers. Given '" + commandParts[1] + "' '" + commandParts[2] + "'");
continue;
}
int scopeLow = -(scopeHigh + 1);
scope = scope.toBuilder().intLow(scopeLow).intHigh(scopeHigh).toScope();
solver = compileModel(model, scope);
if (solver != null)
System.out.println("Model is ready after the scope change");
}
else if (command.equals(commandListScopes)) // getting list of scopes
{
List<ClaferNameScopePair> claferScopePairs = new ArrayList<ClaferNameScopePair>();
List<AstClafer> allClafers = Utils.getAllModelClafers(model);
for (AstClafer curClafer: allClafers)
{
int curScope;
try{
curScope = scope.getScope(curClafer);
}
catch(Exception e)
{
curScope = 0;
}
claferScopePairs.add(new ClaferNameScopePair(curClafer.getName(), curScope));
}
Collections.sort(claferScopePairs);
Utils.produceScopeFile(claferScopePairs, scopesFile);
}
else if (command.equals(commandScopeIndividual))
{
System.out.println("Individual scope: " + s);
if (commandParts.length != 3)
{
System.out.println("The format of the command is: '" + commandScopeIndividual + " <clafer> <integer>'");
System.out.println("Given: '" + s + "'");
continue;
}
String claferName = commandParts[1];
int claferScopeValue;
try{
claferScopeValue = Integer.parseInt(commandParts[2]);
}
catch(Exception e)
{
System.out.println("The scope has to be an integer number. Given '" + commandParts[2] + "'");
continue;
}
AstClafer clafer = Utils.getModelChildByName(model, claferName);
if (clafer == null)
{
System.out.println("The clafer is not found: '" + claferName + "'");
continue;
}
scope = scope.toBuilder().setScope(clafer, claferScopeValue).toScope();
solver = compileModel(model, scope);
if (solver != null)
System.out.println("Model is ready after the scope change");
}
else if (command.equals(commandScopeIncIndividual))
{
System.out.println("Increase individual scope: " + s);
if (commandParts.length != 3)
{
System.out.println("The format of the command is: '" + commandScopeIncIndividual + " <clafer> <integer>'");
System.out.println("Given: '" + s + "'");
continue;
}
String claferName = commandParts[1];
int claferScopeValue;
try{
claferScopeValue = Integer.parseInt(commandParts[2]);
}
catch(Exception e)
{
System.out.println("The scope has to be an integer number. Given '" + commandParts[2] + "'");
continue;
}
AstClafer clafer = Utils.getModelChildByName(model, claferName);
if (clafer == null)
{
System.out.println("The clafer is not found: '" + claferName + "'");
continue;
}
scope = scope.toBuilder().adjustScope(clafer, claferScopeValue).toScope();
solver = compileModel(model, scope);
if (solver != null)
System.out.println("Model is ready after the scope change");
}
else if (command.equals(commandSooMode))
{
Objective[] goals = modelTripple.getThd();
if (goals.length == 0) {
System.out.println("Cannot switch to the single-objective optimization mode, because there are no goals defined.");
} else if (goals.length > 1) {
System.out.println("Cannot switch to the single-objective optimization mode, because there is more than one goal defined.");
}
else
{
try
{
optimizer = ClaferCompiler.compile(model,
scope,
goals[0]);
}
catch (Exception e)
{
solver = null;
System.out.println(e.getMessage());
continue;
}
System.out.println("Switched to the single-objective optimization mode.");
System.out.println("Use the same command to switch back to normal mode.");
System.out.println("Use 'Next' command to get the next optimal instance.");
System.out.println();
currentMode = Mode.Soo;
optimalInstanceID = 0;
}
}
else
{
System.out.println("Unhandled command: " + s);
}
}
else // mode == SOO
{
if (command.equals(commandSooMode))
{
optimizer = null;
System.out.println("Switched back to the normal mode.");
currentMode = Mode.IG;
}
else if (command.equals(commandNext)) // next instance
{
if (optimizer.find())
{
InstanceModel instance = optimizer.instance();
optimalInstanceID++;
System.out.println("=== Optimal Instance " + optimalInstanceID + " ===\n");
for (InstanceClafer c : instance.getTopClafers())
{
Utils.printClafer(c, System.out);
}
}
else
{
System.out.println("No more optimal instances found.");
}
}
else
{
System.out.println("Invalid command in the SOO mode: " + s);
System.out.println("If you believe the command is valid, please switch to the normal mode and try again.");
}
}
}
System.out.println("Exit command");
}
private static void nextInstance(ClaferSolver solver) throws IOException
{
if (solver == null)
{
System.out.println("Could not start the instantiation");
return;
}
if (solver.find())
{
instanceID++;
System.out.println("=== Instance " + instanceID + " ===\n");
InstanceModel instance = solver.instance();
for (InstanceClafer c : instance.getTopClafers())
{
Utils.printClafer(c, System.out);
}
System.out.println("
}
else
{
System.out.println("No more instances found. Please consider increasing scopes");
}
}
private static ClaferSolver compileModel(AstModel model, Scope scope)
{
ClaferSolver solver;
instanceID = 0; // reset instance ID
try
{
solver = ClaferCompiler.compile(model, scope);
}
catch (Exception e)
{
solver = null;
System.out.println(e.getMessage());
}
return solver;
}
}
enum Mode{
IG,
Soo
}
|
package studentcapture.datalayer;
import java.io.*;
import java.util.Hashtable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import studentcapture.assignment.AssignmentModel;
import studentcapture.datalayer.database.Assignment;
import studentcapture.datalayer.database.Course;
import studentcapture.datalayer.database.Submission;
import studentcapture.datalayer.database.Submission.SubmissionWrapper;
import studentcapture.datalayer.database.User;
import studentcapture.datalayer.filesystem.FilesystemConstants;
import studentcapture.datalayer.filesystem.FilesystemInterface;
import studentcapture.feedback.FeedbackModel;
import javax.validation.Valid;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping(value = "/DB")
public class DatalayerCommunicator {
@Autowired
private Submission submission;
@Autowired
private Assignment assignment;
@Autowired
private Course course;
@Autowired
private User user;
//@Autowired
FilesystemInterface fsi;
@CrossOrigin()
@RequestMapping(produces = MediaType.APPLICATION_JSON_VALUE, value = "getGrade", method = RequestMethod.GET)
public Map<String, Object> getGrade(@Valid FeedbackModel model) {
return submission.getGrade(model.getStudentID(), model.getAssignmentID());
}
/**
*
* @param courseID
* @param assignmentTitle
* @param startDate
* @param endDate
* @param minTime
* @param maxTime
* @param published
* @return
*/
@CrossOrigin
@RequestMapping(value = "/createAssignment", method = RequestMethod.POST)
public String createAssignment(@RequestBody AssignmentModel assignmentModel){
Integer returnResult;
try{
returnResult = assignment.createAssignment(assignmentModel.getCourseID(), assignmentModel.getTitle(),
assignmentModel.getStartDate(), assignmentModel.getEndDate(), assignmentModel.getMinTimeSeconds(),
assignmentModel.getMaxTimeSeconds(), assignmentModel.getPublished());
} catch (IllegalArgumentException e) {
//TODO return smarter error msg
return e.getMessage();
}
return returnResult.toString();
}
/**
* Save grade for a submission
* @param assID Assignment identification
* @param teacherID Teacher identification
* @param studentID Student identification
* @param grade Grade
* @return True if the grade was successfully saved to the database, else false
*/
@CrossOrigin
@RequestMapping(value = "/setGrade", method = RequestMethod.POST)
public boolean setGrade(@RequestParam(value = "assID") String assID,
@RequestParam(value = "teacherID") String teacherID,
@RequestParam(value = "studentID") String studentID,
@RequestParam(value = "grade") String grade) {
return submission.setGrade(assID, teacherID, studentID, grade);
}
/**
* Set feedback for a submission, video and text cannot both be null
* @param assID Assignment identification
* @param studentID Student identification
* @param feedbackVideo Video feedback
* @param feedbackText Text feedback
* @return True if feedback was successfully saved to the database, else false
*/
@CrossOrigin
@RequestMapping(value = "/setFeedback", method = RequestMethod.POST)
public boolean setFeedback(@RequestParam(value = "assID") String assID,
@RequestParam(value = "studentID") String studentID,
@RequestParam(value = "feedbackVideo") MultipartFile feedbackVideo,
@RequestParam(value = "feedbackText") MultipartFile feedbackText) {
String courseID = assignment.getCourseIDForAssignment(assID);
String courseCode = course.getCourseCodeFromId(courseID);
int feedback = 0;
if(feedbackVideo != null) {
fsi.storeFeedbackVideo(courseCode, courseID, assID, studentID, feedbackVideo);
feedback++;
}
if(feedbackText != null) {
fsi.storeFeedbackText(courseCode, courseID, assID, studentID, feedbackText);
feedback++;
}
if(feedback == 0)
return false;
else
return true;
}
/**
* Sends the assignment video file.
* @param courseCode Courses 6 character identifier.
* @param courseId Courses unique database id.
* @param assignmentId Assignments unique database id.
* @return The video file vie http.
*/
@CrossOrigin
@RequestMapping(value = "/getAssignmentVideo/{courseCode}/{courseId}/{assignmentId}",
method = RequestMethod.GET, produces = "video/webm")
public ResponseEntity<InputStreamResource> getAssignmentVideo(
@PathVariable("courseCode") String courseCode,
@PathVariable("courseId") String courseId,
@PathVariable("assignmentId") String assignmentId) {
ResponseEntity<InputStreamResource> responseEntity;
try {
fsi = new FilesystemInterface(); // should not be here? @autowired???
FileInputStream videoInputStream = fsi.getAssignmentVideo(courseCode,courseId,assignmentId);
byte []out = new byte[fsi.getAssignmentVideoFileSize (courseCode, courseId, assignmentId)];
videoInputStream.read(out);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add("content-disposition", "inline; filename=AssignmentVideo");
responseEntity = new ResponseEntity(out, responseHeaders, HttpStatus.OK);
} catch (FileNotFoundException e) {
responseEntity = new ResponseEntity("File not found.", HttpStatus.NOT_FOUND);
} catch (IOException e) {
responseEntity = new ResponseEntity("Error getting file.", HttpStatus.NOT_FOUND);
}
return responseEntity;
}
/**
* Sends the feedback video file.
* @param model Model containing the information needed to get the correct video.
* @return The video file vie http.
*/
@CrossOrigin
@RequestMapping(value = "/getFeedbackVideo",
method = RequestMethod.GET, produces = "video/webm")
public ResponseEntity<InputStreamResource> getAssignmentVideo(@Valid FeedbackModel model) {
ResponseEntity<InputStreamResource> responseEntity;
byte []file = null;
String path = fsi.generatePath(Integer.toString(model.getCourseCode()),
Integer.toString(model.getCourseID()),
Integer.toString(model.getAssignmentID()),
Integer.toString(model.getStudentID()));
String filename = FilesystemConstants.FEEDBACK_VIDEO_FILENAME;
File video = new File(path + filename);
if(video.exists()) {
try {
byte []out = FileCopyUtils.copyToByteArray(video);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add("content-disposition", "inline; filename="+filename);
responseEntity = new ResponseEntity(out, responseHeaders, HttpStatus.OK);
} catch (IOException e) {
responseEntity = new ResponseEntity("Error getting file", HttpStatus.OK);
}
} else {
responseEntity = new ResponseEntity("File not found", HttpStatus.OK);
}
return responseEntity;
}
/**
* Fetches information about an assignment.
* Description is mocked at the moment due to filesystem issues.
* @param assID Unique identifier for the assignment
* @return Array containing [course ID, assignment title, opening datetime, closing datetime, minimum video time, maximum video time, description]
*/
@CrossOrigin
@RequestMapping(value = "/getAssignmentInfo", method = RequestMethod.POST)
public ArrayList<String> getAssignmentInfo(@RequestParam(value = "assID") int assID){
ArrayList<String> results = assignment.getAssignmentInfo(assID);
//Need the courseCode for the path
//code for the filesystem
/*String courseCode = course.getCourseCodeFromId(results.get(0));
FileInputStream descriptionStream = fsi.getAssignmentDescription(courseCode, results.get(0), assID);
Scanner scanner = new Scanner(descriptionStream);
String description = "";
//Construct description string
while (scanner.hasNext()){
description += scanner.nextLine() + "\n";
}*/
String description = "beskrivning";
results.add(description);
return results;
}
/**
* Check if given user name and password exist in database.
* @param username a unique user name.
* @param pswd password for the unique username
* @return true if correct user password and username is given otherwise false
*/
@CrossOrigin
@RequestMapping(value = "/login", method = RequestMethod.GET)
public boolean login(@RequestParam(value = "username") String username,
@RequestParam(value = "pswd") String pswd) {
return user.userExist(username,pswd);
}
/**
* Register user by given information.
*
* @param userName user name for the user to be registerd
* @param fName First name
* @param lName last name
* @param pNr social security number
* @param pwd password
* @return true if registration was successfull else false
*/
@CrossOrigin
@RequestMapping(value = "/register", method = RequestMethod.GET)
public boolean registerUser(@RequestParam(value = "userName") String userName,
@RequestParam(value = "fName") String fName,
@RequestParam(value = "lName") String lName,
@RequestParam(value = "pNr") String pNr,
@RequestParam(value = "pwd") String pwd) {
return user.addUser(userName,fName,lName,pNr,pwd);
}
/**
* Returns list of all submissions made in response to a given assignment.
*
* @param assignmentID assignment identifier
* @return list of submissions
*/
@CrossOrigin
@RequestMapping(
produces = MediaType.APPLICATION_JSON_VALUE,
method = RequestMethod.GET,
value = "/getAllSubmissions")
@ResponseBody
public List<SubmissionWrapper> getAllSubmissions(
@RequestParam(value="assignmentID") String assignmentID) {
return submission.getAllSubmissions(assignmentID).get();
}
/**
* Returns list of all ungraded submissions made in response to a given
* assignment.
*
* @param assignmentID assignment identifier
* @return list of submissions
*/
@CrossOrigin
@RequestMapping(
produces = MediaType.APPLICATION_JSON_VALUE,
method = RequestMethod.GET,
value = "/getAllUngradedSubmissions")
@ResponseBody
public List<SubmissionWrapper> getAllUngradedSubmissions(
@RequestParam(value="assignmentID") String assignmentID) {
return submission.getAllUngraded(assignmentID).get();
}
/**
* Returns list of all submissions made in response to a given assignment,
* including students that are part of the course but has not yet made a
* submission.
*
* @param assignmentID assignment identifier
* @return list of submissions
*/
@CrossOrigin
@RequestMapping(
produces = MediaType.APPLICATION_JSON_VALUE,
method = RequestMethod.GET,
value = "/getAllSubmissionsWithStudents")
@ResponseBody
public List<SubmissionWrapper> getAllSubmissionsWithStudents(
@RequestParam(value="assignmentID") String assignmentID) {
return submission.getAllSubmissionsWithStudents(assignmentID).get();
}
/**
* Add a submission to the database and filesystem.
*
* @param assignmentID
* @param courseID
* @param userID
* @param video
* @return
*/
@CrossOrigin
@RequestMapping(value = "/addSubmission/{courseCode}/{courseID}/{assignmentID}/{userID}", method = RequestMethod.POST)
public String addSubmission(@PathVariable(value = "courseCode") String courseCode,
@PathVariable(value = "courseID") String courseID,
@PathVariable(value = "assignmentID") String assignmentID,
@PathVariable(value = "userID") String userID,
@RequestParam(value = "video",required = false) MultipartFile video) {
if(video == null){
if(submission.addSubmission(assignmentID, userID)){
return "Student submitted an empty answer";
}
else{
return "DB failure for student submission";
}
}
// ADD to database here
if(submission.addSubmission(assignmentID, userID)){
if (FilesystemInterface.storeStudentVideo(courseCode, courseID, assignmentID, userID, video)) {
return "OK";
} else
return "Failed to add video to filesystem.";
}
return "failed to add submission to database";
}
}
|
package tigase.server.xmppsession;
import tigase.util.DNSResolver;
import java.util.Map;
import static tigase.conf.Configurable.*;
/**
* Describe class SessionManagerConfig here.
*
*
* Created: Tue Oct 24 23:07:57 2006
*
* @author <a href="mailto:artur.hefczyc@tigase.org">Artur Hefczyc</a>
* @version $Rev$
*/
public class SessionManagerConfig {
public static final String USER_REPO_CLASS_PROP_KEY = "user-repo-class";
public static final String USER_REPO_URL_PROP_KEY = "user-repo-url";
public static final String AUTH_REPO_CLASS_PROP_KEY = "auth-repo-class";
public static final String AUTH_REPO_URL_PROP_KEY = "auth-repo-url";
public static final String PLUGINS_PROP_KEY = "plugins";
public static final String PLUGINS_CONF_PROP_KEY = "plugins-conf";
/**
* List of default plugins loaded by the server. It can be changed later
* in config file or at runtime.
*/
private static final String[] PLUGINS_NO_REG_PROP_VAL =
{"jabber:iq:auth", "urn:ietf:params:xml:ns:xmpp-sasl",
"urn:ietf:params:xml:ns:xmpp-bind", "urn:ietf:params:xml:ns:xmpp-session",
"jabber:iq:roster", "jabber:iq:privacy", "presence", "msgoffline",
"jabber:iq:version", "http://jabber.org/protocol/stats", "starttls",
"vcard-temp", "http://jabber.org/protocol/commands", "jabber:iq:private",
"urn:xmpp:ping"};
/**
* List of plugins loaded when the server is loaded in test mode.
* Some plugins like off-line message storage is disabled during tests.
*/
private static final String[] PLUGINS_FULL_PROP_VAL =
{"jabber:iq:register", "jabber:iq:auth", "urn:ietf:params:xml:ns:xmpp-sasl",
"urn:ietf:params:xml:ns:xmpp-bind", "urn:ietf:params:xml:ns:xmpp-session",
"jabber:iq:roster", "jabber:iq:privacy", "presence", "jabber:iq:version",
"http://jabber.org/protocol/stats", "starttls", "msgoffline",
"vcard-temp", "http://jabber.org/protocol/commands", "jabber:iq:private",
"urn:xmpp:ping"};
private static String[] HOSTNAMES_PROP_VAL = {"localhost", "hostname"};
private static String[] ADMINS_PROP_VAL = {"admin@localhost", "admin@hostname"};
public static void getDefaults(Map<String, Object> props,
Map<String, Object> params) {
boolean full_comps = true;
String user_repo_class = XML_REPO_CLASS_PROP_VAL;
String user_repo_url = XML_REPO_URL_PROP_VAL;
String auth_repo_class = XML_REPO_CLASS_PROP_VAL;
String auth_repo_url = XML_REPO_URL_PROP_VAL;
if (params.get(GEN_USER_DB) != null) {
if (params.get(GEN_USER_DB).equals("mysql")) {
user_repo_class = MYSQL_REPO_CLASS_PROP_VAL;
user_repo_url = MYSQL_REPO_URL_PROP_VAL;
auth_repo_class = MYSQL_REPO_CLASS_PROP_VAL;
auth_repo_url = MYSQL_REPO_URL_PROP_VAL;
} else {
if (params.get(GEN_USER_DB).equals("pgsql")) {
user_repo_class = PGSQL_REPO_CLASS_PROP_VAL;
user_repo_url = PGSQL_REPO_URL_PROP_VAL;
auth_repo_class = PGSQL_REPO_CLASS_PROP_VAL;
auth_repo_url = PGSQL_REPO_URL_PROP_VAL;
} else {
user_repo_class = (String)params.get(GEN_USER_DB);
auth_repo_class = (String)params.get(GEN_USER_DB);
}
}
}
if (params.get(GEN_USER_DB_URI) != null) {
user_repo_url = (String)params.get(GEN_USER_DB_URI);
auth_repo_url = user_repo_url;
}
if (params.get(GEN_AUTH_DB) != null) {
if (params.get(GEN_AUTH_DB).equals("mysql")) {
auth_repo_class = MYSQL_REPO_CLASS_PROP_VAL;
auth_repo_url = MYSQL_REPO_URL_PROP_VAL;
} else {
if (params.get(GEN_AUTH_DB).equals("pgsql")) {
auth_repo_class = PGSQL_REPO_CLASS_PROP_VAL;
auth_repo_url = PGSQL_REPO_URL_PROP_VAL;
} else {
if (params.get(GEN_AUTH_DB).equals("drupal")) {
auth_repo_class = DRUPAL_REPO_CLASS_PROP_VAL;
auth_repo_url = DRUPAL_REPO_URL_PROP_VAL;
// For Drupal or LibreSource authentication all account
// management is done via Web interface so accounts containers
// for Jabber data have to be created automatically
user_repo_url += "&autoCreateUser=true";
full_comps = false;
} else {
if (params.get(GEN_AUTH_DB).equals("libresource")) {
auth_repo_class = LIBRESOURCE_REPO_CLASS_PROP_VAL;
auth_repo_url = LIBRESOURCE_REPO_URL_PROP_VAL;
// For Drupal or LibreSource authentication all account
// management is done via Web interface so accounts containers
// for Jabber data have to be created automatically
user_repo_url += "&autoCreateUser=true";
full_comps = false;
} else {
auth_repo_class = (String)params.get(GEN_AUTH_DB);
}
}
}
}
}
if (params.get(GEN_AUTH_DB_URI) != null) {
auth_repo_url = (String)params.get(GEN_AUTH_DB_URI);
}
props.put(USER_REPO_CLASS_PROP_KEY, user_repo_class);
props.put(USER_REPO_URL_PROP_KEY, user_repo_url);
props.put(AUTH_REPO_CLASS_PROP_KEY, auth_repo_class);
props.put(AUTH_REPO_URL_PROP_KEY, auth_repo_url);
if (full_comps) {
// Some plugins are not loaded during tests at least until proper
// test cases are created for them. Sample case is off-line message
// storage which may impact some test cases.
props.put(PLUGINS_PROP_KEY, PLUGINS_FULL_PROP_VAL);
} else {
props.put(PLUGINS_PROP_KEY, PLUGINS_NO_REG_PROP_VAL);
}
if (params.get(GEN_VIRT_HOSTS) != null) {
HOSTNAMES_PROP_VAL = ((String)params.get(GEN_VIRT_HOSTS)).split(",");
} else {
HOSTNAMES_PROP_VAL = DNSResolver.getDefHostNames();
}
props.put(HOSTNAMES_PROP_KEY, HOSTNAMES_PROP_VAL);
if (params.get(GEN_ADMINS) != null) {
ADMINS_PROP_VAL = ((String)params.get(GEN_ADMINS)).split(",");
} else {
ADMINS_PROP_VAL = new String[HOSTNAMES_PROP_VAL.length];
for (int i = 0; i < ADMINS_PROP_VAL.length; i++) {
ADMINS_PROP_VAL[i] = "admin@"+HOSTNAMES_PROP_VAL[i];
} // end of for (int i = 0; i < ADMINS_PROP_VAL.length; i++)
}
props.put(ADMINS_PROP_KEY, ADMINS_PROP_VAL);
}
} // SessionManagerConfig
|
package uk.ac.ebi.pride.archive.ebeye;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import uk.ac.ebi.pride.archive.repo.project.*;
import uk.ac.ebi.pride.archive.repo.user.User;
import uk.ac.ebi.pride.data.model.CvParam;
import uk.ac.ebi.pride.data.model.DataFile;
import uk.ac.ebi.pride.data.model.Submission;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
/**
* GenerateEBeyeXML object.
*
* Generates EB-eye search XML to a given output directory based upon a PRIDE Archive project
* supplied as a Project and Submission.
*
* @author Tobias Ternent tobias@ebi.ac.uk
* @author Yasset Perez-Riverol ypriverol@gmail.com
* @version 1.0
* @since 2015-02-10
*/
public class GenerateEBeyeXML {
private static final Logger logger = LoggerFactory.getLogger(GenerateEBeyeXML.class);
private static final String NOT_AVAILABLE = "Not available";
private static final String OMICS_TYPE = "Proteomics";
private static final String PRIDE_URL = "http:
private static String DEFAULT_EXPERIMENT_TYPE = "Mass Spectrometry";
private Project project;
private File outputDirectory;
private Submission submission;
private HashMap<String, String> proteins;
private boolean fromPride;
/**
* Constructor, without parameters.
*/
public GenerateEBeyeXML() {
}
/**
* Constructor.
*
* @param project (required) public project to be used for generating the EB-eye XML.
* @param submission (required) public project submission summary to be used for generating the EB-eye XML.
* @param outputDirectory (required) target output directory.
*/
public GenerateEBeyeXML(Project project, Submission submission, File outputDirectory, HashMap<String, String> proteins) {
this.project = project;
this.submission = submission;
this.outputDirectory = outputDirectory;
this.proteins = proteins;
this.fromPride = false;
}
public GenerateEBeyeXML(Project project, Submission submission, File outputDirectory, HashMap<String, String> proteins, boolean fromPride) {
this.project = project;
this.submission = submission;
this.outputDirectory = outputDirectory;
this.proteins = proteins;
this.fromPride = fromPride;
}
/**
* Performs the EB-eye generation of a defined public project, submission summary, and output directory.
* @throws Exception
*/
public void generate() throws Exception {
if (project==null || submission==null || outputDirectory==null) {
logger.error("The project, submission, and output directory all needs to be set before generating EB-eye XML.");
}
if (!project.isPublicProject()) {
logger.error("Project " + project.getAccession() + " is still private, not generating EB-eye XML.");
} else {
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document = documentBuilder.newDocument();
//Add database Name Node
Element database = document.createElement("database");
document.appendChild(database);
//Add the name of the database
Element name = document.createElement("name");
name.appendChild(document.createTextNode("pride"));
database.appendChild(name);
//Add the description of the database
Element description = document.createElement("description");
description.appendChild(document.createTextNode(""));
database.appendChild(description);
//Database release
Element release = document.createElement("release");
release.appendChild(document.createTextNode("3"));
database.appendChild(release);
//Release date (This release date is related whit the day where the data was generated)
Element releaseDate = document.createElement("release_date");
releaseDate.appendChild(document.createTextNode(new SimpleDateFormat("yyyy-MM-dd").format(new Date())));
database.appendChild(releaseDate);
Element entryCount = document.createElement("entry_count");
entryCount.appendChild(document.createTextNode("1"));
database.appendChild(entryCount);
//Start to index the entries of the project
Element entries = document.createElement("entries");
database.appendChild(entries);
//The project entry to be fill in the document
Element entry = document.createElement("entry");
entry.setAttribute("id", project.getAccession());
Element projectName = document.createElement("name");
projectName.appendChild(document.createTextNode(project.getTitle()));
entry.appendChild(projectName);
String projDescription = project.getTitle();
if (project.getProjectDescription()!=null && !project.getProjectDescription().isEmpty())
projDescription = project.getProjectDescription();
Element projectTitle = document.createElement("description");
projectTitle.appendChild(document.createTextNode(projDescription));
entry.appendChild(projectTitle);
/**
* Add all cross references to other databases such as TAXONOMY, UNIPROT OR ENSEMBL
*/
Element crossReferences = document.createElement("cross_references");
entry.appendChild(crossReferences);
if (submission.getProjectMetaData().getSpecies()!=null && submission.getProjectMetaData().getSpecies().size()>0) {
for (CvParam species : submission.getProjectMetaData().getSpecies()) {
Element refSpecies = document.createElement("ref");
refSpecies.setAttribute("dbkey", species.getAccession());
refSpecies.setAttribute("dbname", "TAXONOMY");
crossReferences.appendChild(refSpecies);
}
}
if (project.getReferences()!=null && project.getReferences().size()>0) {
for (Reference reference : project.getReferences()) {
Element refPubMedID = document.createElement("ref");
refPubMedID.setAttribute("dbkey", Integer.toString(reference.getPubmedId()));
refPubMedID.setAttribute("dbname", "pubmed");
crossReferences.appendChild(refPubMedID);
}
}
if (proteins!=null && !proteins.isEmpty()) {
for (String protein : proteins.keySet()) {
Element refProtein = document.createElement("ref");
refProtein.setAttribute("dbkey", protein);
refProtein.setAttribute("dbname", proteins.get(protein));
crossReferences.appendChild(refProtein);
}
}
Element dates = document.createElement("dates");
entry.appendChild(dates);
Element dateSubmitted = document.createElement("date");
dateSubmitted.setAttribute("value", new SimpleDateFormat("yy-MM-dd").format(project.getSubmissionDate()));
dateSubmitted.setAttribute("type", "submission");
dates.appendChild(dateSubmitted);
Element datePublished = document.createElement("date");
datePublished.setAttribute("value", new SimpleDateFormat("yy-MM-dd").format(project.getPublicationDate()));
datePublished.setAttribute("type", "publication");
dates.appendChild(datePublished);
/**
* Add additional Fields for DDI project to be able to find the projects. Specially additional metadata
* such as omics field, ptms, study type, data protocol sample protocol, etc.
*/
Element additionalFields = document.createElement("additional_fields");
entry.appendChild(additionalFields);
// Add the omics type
Element omicsType = document.createElement("field");
omicsType.setAttribute("name", "omics_type");
omicsType.appendChild(document.createTextNode(OMICS_TYPE));
additionalFields.appendChild(omicsType);
// //Add the Sample Processing Protocol
// if (project.getSubmissionDate()!=null) {
// Element submissionDate = document.createElement("field");
// submissionDate.setAttribute("name", "submission_date");
// submissionDate.appendChild(document.createTextNode(new SimpleDateFormat("yyyy-MM-dd").format(project.getSubmissionDate())));
// additionalFields.appendChild(submissionDate);
//Full dataset Repository
Element full_dataset_link = document.createElement("field");
full_dataset_link.setAttribute("name", "full_dataset_link");
full_dataset_link.appendChild(document.createTextNode(PRIDE_URL + project.getAccession()));
additionalFields.appendChild(full_dataset_link);
//Add the domain source
Element respository = document.createElement("field");
respository.setAttribute("name", "repository");
respository.appendChild(document.createTextNode("pride"));
additionalFields.appendChild(respository);
// //Add the Sample Processing Protocol
// if (project.getPublicationDate()!=null) {
// Element publicationDate = document.createElement("field");
// publicationDate.setAttribute("name", "publication_date");
// publicationDate.appendChild(document.createTextNode(new SimpleDateFormat("yyyy-MM-dd").format(project.getPublicationDate())));
// additionalFields.appendChild(publicationDate);
//Publication Date
if (project.getSampleProcessingProtocol()!=null && !project.getSampleProcessingProtocol().isEmpty()) {
Element sampleProcProt = document.createElement("field");
sampleProcProt.setAttribute("name", "sample_protocol");
sampleProcProt.appendChild(document.createTextNode(project.getSampleProcessingProtocol()));
additionalFields.appendChild(sampleProcProt);
}
//Add Data Processing Protocol
if (project.getDataProcessingProtocol()!=null && !project.getDataProcessingProtocol().isEmpty()) {
Element dataProcProt = document.createElement("field");
dataProcProt.setAttribute("name", "data_protocol");
dataProcProt.appendChild(document.createTextNode(project.getDataProcessingProtocol()));
additionalFields.appendChild(dataProcProt);
}
//Add Instrument information
if (submission.getProjectMetaData().getInstruments()!=null && submission.getProjectMetaData().getInstruments().size()>0) {
for (CvParam instrument : submission.getProjectMetaData().getInstruments()) {
Element fieldInstruemnt = document.createElement("field");
fieldInstruemnt.setAttribute("name", "instrument_platform");
fieldInstruemnt.appendChild(document.createTextNode(instrument.getName()));
additionalFields.appendChild(fieldInstruemnt);
}
} else {
Element fieldInstruemnt = document.createElement("field");
fieldInstruemnt.setAttribute("name", "instrument_platform");
fieldInstruemnt.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(fieldInstruemnt);
}
//Add information about the species
if (submission.getProjectMetaData().getSpecies()!=null && submission.getProjectMetaData().getSpecies().size()>0) {
for (CvParam species : submission.getProjectMetaData().getSpecies()) {
Element refSpecies = document.createElement("field");
refSpecies.setAttribute("name", "species");
refSpecies.appendChild(document.createTextNode(species.getName()));
additionalFields.appendChild(refSpecies);
}
} else {
Element refSpecies = document.createElement("field");
refSpecies.setAttribute("name", "species");
refSpecies.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(refSpecies);
}
//Add information about the Cell Type
if (submission.getProjectMetaData().getCellTypes()!=null && submission.getProjectMetaData().getCellTypes().size()>0) {
for (CvParam cellType : submission.getProjectMetaData().getCellTypes()) {
Element refCellType = document.createElement("field");
refCellType.setAttribute("name", "cell_type");
refCellType.appendChild(document.createTextNode(cellType.getName()));
additionalFields.appendChild(refCellType);
}
} else {
Element refCellType = document.createElement("field");
refCellType.setAttribute("name", "cell_type");
refCellType.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(refCellType);
}
//Add disease information
if (submission.getProjectMetaData().getDiseases()!=null && submission.getProjectMetaData().getDiseases().size()>0) {
for (CvParam disease : submission.getProjectMetaData().getDiseases()) {
Element refDisease = document.createElement("field");
refDisease.setAttribute("name", "disease");
refDisease.appendChild(document.createTextNode(disease.getName()));
additionalFields.appendChild(refDisease);
}
} else {
Element refDisease = document.createElement("field");
refDisease.setAttribute("name", "disease");
refDisease.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(refDisease);
}
//Tissue information
if (submission.getProjectMetaData().getTissues()!=null && submission.getProjectMetaData().getTissues().size()>0) {
for (CvParam tissue : submission.getProjectMetaData().getTissues()) {
Element fieldTissue = document.createElement("field");
fieldTissue.setAttribute("name", "tissue");
fieldTissue.appendChild(document.createTextNode(tissue.getName()));
additionalFields.appendChild(fieldTissue);
}
} else {
Element fieldTissue = document.createElement("field");
fieldTissue.setAttribute("name", "tissue");
fieldTissue.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(fieldTissue);
}
//Add PTMs information
if (project.getPtms()!=null && project.getPtms().size()>0) {
for (ProjectPTM ptmName : project.getPtms()) {
Element modification = document.createElement("field");
modification.setAttribute("name", "modification");
modification.appendChild(document.createTextNode(ptmName.getName()));
additionalFields.appendChild(modification);
}
} else {
Element modification = document.createElement("field");
modification.setAttribute("name", "modification");
modification.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(modification);
}
//Add information about experiment type
if (project.getExperimentTypes()!=null && project.getExperimentTypes().size()>0) {
for (ProjectExperimentType expType : project.getExperimentTypes()) {
Element refExpType = document.createElement("field");
refExpType.setAttribute("name", "technology_type");
refExpType.appendChild(document.createTextNode(expType.getName()));
additionalFields.appendChild(refExpType);
}
}
Element refExpType = document.createElement("field");
refExpType.setAttribute("name", "technology_type");
refExpType.appendChild(document.createTextNode(DEFAULT_EXPERIMENT_TYPE));
additionalFields.appendChild(refExpType);
//Add curator tags and keywords
if (project.getProjectTags()!=null && project.getProjectTags().size()>0) {
for (ProjectTag projectTag : project.getProjectTags()) {
Element fieldProjTag = document.createElement("field");
fieldProjTag.setAttribute("name", "curator_keywords");
fieldProjTag.appendChild(document.createTextNode(projectTag.getTag()));
additionalFields.appendChild(fieldProjTag);
}
}
if (project.getKeywords()!=null && !project.getKeywords().isEmpty()) {
//Todo: check if this always like this, Keywords should be a list of keywords splitted by comma
String[] arrayKey = project.getKeywords().split(",");
for(String key: arrayKey){
Element keywords = document.createElement("field");
keywords.setAttribute("name", "submitter_keywords");
keywords.appendChild(document.createTextNode(key));
additionalFields.appendChild(keywords);
}
}
//Specific to proteomics field the quantitation method
if (project.getQuantificationMethods()!=null && project.getQuantificationMethods().size()>0) {
for (ProjectQuantificationMethodCvParam quantMethod : project.getQuantificationMethods()) {
Element refQuantMethod = document.createElement("field");
refQuantMethod.setAttribute("name", "quantification_method");
refQuantMethod.appendChild(document.createTextNode(quantMethod.getName()));
additionalFields.appendChild(refQuantMethod);
}
} else {
Element quantMethod = document.createElement("field");
quantMethod.setAttribute("name", "quantification_method");
quantMethod.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(quantMethod);
}
Element submissionType = document.createElement("field");
submissionType.setAttribute("name", "submission_type");
submissionType.appendChild(document.createTextNode(project.getSubmissionType().name()));
additionalFields.appendChild(submissionType);
if (project.getSoftware()!=null && project.getSoftware().size()>0) {
for (ProjectSoftwareCvParam software : project.getSoftware()) {
Element refSoftware = document.createElement("field");
refSoftware.setAttribute("name", "software");
refSoftware.appendChild(document.createTextNode(software.getValue()));
additionalFields.appendChild(refSoftware);
}
} else {
Element refSoftware = document.createElement("field");
refSoftware.setAttribute("name", "software");
refSoftware.appendChild(document.createTextNode(NOT_AVAILABLE));
additionalFields.appendChild(refSoftware);
}
//Add publication related information
if (project.getDoi()!=null && !project.getDoi().isEmpty()) {
Element doi = document.createElement("field");
doi.setAttribute("name", "doi");
doi.appendChild(document.createTextNode(project.getDoi()));
additionalFields.appendChild(doi);
}
//Add publication related information
if (project.getReferences()!=null && project.getReferences().size()>0) {
for (Reference reference : project.getReferences()) {
Element refPubMedLine = document.createElement("field");
refPubMedLine.setAttribute("name", "publication");
refPubMedLine.appendChild(document.createTextNode(reference.getReferenceLine()));
additionalFields.appendChild(refPubMedLine);
}
}
//Add submitter information
if(project.getSubmitter() != null){
Element submitter = document.createElement("field");
submitter.setAttribute("name", "submitter");
submitter.appendChild(document.createTextNode(getName(project.getSubmitter())));
additionalFields.appendChild(submitter);
Element submitterMail = document.createElement("field");
submitterMail.setAttribute("name", "submitter_mail");
submitterMail.appendChild(document.createTextNode(project.getSubmitter().getEmail()));
additionalFields.appendChild(submitterMail);
if(project.getSubmitter().getAffiliation() != null){
Element submitterAffiliation = document.createElement("field");
submitterAffiliation.setAttribute("name", "submitter_affiliation");
submitterAffiliation.appendChild(document.createTextNode(project.getSubmitter().getAffiliation()));
additionalFields.appendChild(submitterAffiliation);
}
}
//Add LabHead information
if(project.getLabHeads() != null && !project.getLabHeads().isEmpty()){
for(LabHead labhead: project.getLabHeads()){
Element submitter = document.createElement("field");
submitter.setAttribute("name", "labhead");
submitter.appendChild(document.createTextNode(getName(labhead)));
additionalFields.appendChild(submitter);
Element submitterMail = document.createElement("field");
submitterMail.setAttribute("name", "labhead_mail");
submitterMail.appendChild(document.createTextNode(labhead.getEmail()));
additionalFields.appendChild(submitterMail);
if(labhead.getAffiliation() != null){
Element submitterAffiliation = document.createElement("field");
submitterAffiliation.setAttribute("name", "labhead_affiliation");
submitterAffiliation.appendChild(document.createTextNode(labhead.getAffiliation()));
additionalFields.appendChild(submitterAffiliation);
}
}
}
//Add original link to the files
if(submission.getDataFiles() != null && !submission.getDataFiles().isEmpty()){
for(DataFile file: submission.getDataFiles()){
Element dataset_link = document.createElement("field");
dataset_link.setAttribute("name", "dataset_file");
String url = null;
boolean toBeAdded = true;
if (fromPride) {
Date pubDate = project.getPublicationDate();
Calendar calendar = Calendar.getInstance();
calendar.setTime(pubDate);
int month = calendar.get(Calendar.MONTH) + 1; // the month are zero based, hence the correction +1
int year = calendar.get(Calendar.YEAR);
url = "ftp://ftp.pride.ebi.ac.uk/pride/data/archive/" + year + "/" + (month < 10 ? "0" : "") + month + "/"
+ project.getAccession() + "/" + file.getFileName();
} else if (file.getUrl() != null && !file.getUrl().toString().isEmpty()) {
url = file.getUrl().toString();
}
if(url != null){
dataset_link.appendChild(document.createTextNode(url));
additionalFields.appendChild(dataset_link);
}
}
}
entries.appendChild(entry);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
DOMSource source = new DOMSource(document);
File outputXML = new File(outputDirectory, "PRIDE_EBEYE_" + project.getAccession() + ".xml");
StreamResult result = new StreamResult(outputXML.toURI().getPath());
transformer.transform(source, result);
logger.info("Finished generating EB-eye XML file for: " + outputDirectory + File.separator + "PRIDE_EBEYE_" + project.getAccession() + ".xml" );
}
}
private String getName(User submitter) {
if(submitter.getLastName() != null && submitter.getLastName().length() > 0)
return submitter.getFirstName() + " " + submitter.getLastName();
return submitter.getFirstName();
}
private String getName(LabHead submitter) {
if(submitter.getLastName() != null && submitter.getLastName().length() > 0)
return submitter.getFirstName() + " " + submitter.getLastName();
return submitter.getFirstName();
}
/**
* Sets the current project.
* @param project New project to be assigned.
*/
public void setProject(Project project) {
this.project = project;
}
/**
* Sets the current submission
* @param submission New submission to be assigned.
*/
public void setSubmission(Submission submission) {
this.submission = submission;
}
/**
* Sets the current output directory.
* @param outputDirectory New output directory to be assigned.
*/
public void setOutputDirectory(File outputDirectory) {
this.outputDirectory = outputDirectory;
}
/**
* Sets the current proteins set.
* @param proteins New set of proteins.
*/
public void setProteins(HashMap<String, String> proteins) {
this.proteins = proteins;
}
}
|
package uk.co.skyem.projects.Z80emu;
import uk.co.skyem.projects.Z80emu.Register.*;
import uk.co.skyem.projects.Z80emu.util.buffer.IByteBuffer;
public class InstructionDecoder {
private IByteBuffer memoryBuffer;
private Registers registers;
private Core cpuCore;
InstructionDecoder(Core cpu) {
memoryBuffer = cpu.memoryBuffer;
cpuCore = cpu;
registers = cpu.registers;
}
// 8 Bit registers
// HL is (HL)
private enum RegisterTable {
B, C, D, E, H, L, HL, A
}
// Register Pairs featuring SP
private enum RegisterPairTable {
BC, DE, HL, SP
}
// Register Pairs featuring AF
private enum RegisterPair2Table {
BC, DE, HL, AF
}
private enum ConditionTable {
NZ, Z, NC, C, PO, PE, P, M
}
// Arithmetic and logic operations
private enum AluTable {
ADD_A, ACD_A, SUB, SBC_A, AND, XOR, OR, CP
}
// Rotation and shift operations
private enum RotationTable {
RLC, RRC, RL, RR, SLA, SRA, SLL, SRL
}
private enum InterruptModeTable {
ZERO, ZERO_ONE, ONE, TWO, ZERO_B, ZERO_ONE_B, ONE_B, TWO_B
}
private enum BlockInstructions {
LDI, CPI, INI, OUTI,
LDD, CPD, IND, OUTD,
LDIR, CPIR, INIR, OTIR,
LDDR, CPDR, INDR, OTDR
}
private BlockInstructions[][] BlockInstructionTable = {
{},{},{},{},
{BlockInstructions.LDI, BlockInstructions.CPI, BlockInstructions.INI, BlockInstructions.OUTI},
{BlockInstructions.LDD, BlockInstructions.CPD, BlockInstructions.IND, BlockInstructions.OUTD},
{BlockInstructions.LDIR, BlockInstructions.CPIR, BlockInstructions.INIR, BlockInstructions.OTIR},
{BlockInstructions.LDDR, BlockInstructions.CPDR, BlockInstructions.INDR, BlockInstructions.OTDR},
};
// TODO: Make it fetch the least data necessary.
public void decode(byte[] data) {
byte prefix = 0;
byte opcode;
boolean secondPrefix = false;
byte displacement;
short immediateData;
int position = 0;
// The cleanest way I could do this...
// Find out the prefix (if there is one)
switch ((int) data[position]) {
case 0xDD:case 0xFD:case 0xCB:case 0xED:
prefix = data[position++];
}
// Is there a second prefix?
if (prefix == 0xFD || prefix == 0xDD)
if (data[position] == 0xCB) {
secondPrefix = true;
// Get the displacement byte
displacement = data[++position];
++position;
}
// Get the opcode of the instruction
opcode = data[position++];
// Get the immediate data (if there is no second prefix)
immediateData = secondPrefix ? data[position] : 0;
// split up the opcode for further processing
byte x = (byte) ((0b11000000 & opcode) >>> 6);
byte y = (byte) ((0b00111000 & opcode) >>> 3);
byte z = (byte) (0b00000111 & opcode);
byte p = (byte) (0b110 & y);
boolean q = (0b001 & y) == 0b1;
}
public void decode(long data) {
// Split the long into a byte array to give to decode
}
// TODO: Is there a more appropriate name?
public void cycle() {
short position = registers.getProgramCounter();
byte currentOpcode = memoryBuffer.getByte(position);
switch (currentOpcode) {
case 0x00: // NOP
System.out.println("NOP");
break;
case 0x01: // LD BC,nn
System.out.println("LD BC,nn");
// Put 16 bits (nn) into register BC
LDRegisterMemory(registers.REG_BC, position + 1);
registers.incrementProgramCounter((short) 2);
break;
case 0x02: // LD (BC),A
System.out.println("LD (BC),A");
// Put the data in register A into the memory address specified in BC
LDMemoryRegister(registers.REG_BC.getData(), registers.REG_A);
break;
case 0x03: // INC BC
System.out.println("INC BC");
registers.REG_BC.increment();
break;
case 0x06: // LD B,n
System.out.println("LD B,n");
// Put 8 bits (n) into register B
LDRegisterMemory(registers.REG_B, position + 1);
registers.incrementProgramCounter((short) 1);
break;
default: // Be unpredictable! \o/
break;
}
registers.incrementProgramCounter();
}
private void LDRegisterFixed(Register8 destination, byte data) {
destination.setData(data);
}
private void LDRegisterFixed(Register16 destination, short data) {
destination.setData(data);
}
private void LDRegisterRegister(Register8 destination, Register8 source) {
destination.setData(source);
}
private void LDRegisterRegister(Register16 destination, Register16 source) {
destination.setData(source);
}
private void LDMemoryRegister(short destination, Register8 source) {
memoryBuffer.putByte(destination, source.getData());
}
private void LDMemoryRegister(short destination, Register16 source) {
// TODO: Check that this is what it does.
memoryBuffer.putWord(destination + 1, source.getData());
}
private void LDRegisterMemory(Register8 destination, int source) {
destination.setData(memoryBuffer.getByte(source));
}
private void LDRegisterMemory(Register16 destination, int source) {
// TODO: Check that this is what it does.
destination.setData(memoryBuffer.getWord(source + 1));
}
}
|
package res.algebra;
import res.*;
import java.util.*;
public class Sq implements GradedElement<Sq>
{
public static final Sq UNIT = new Sq(new int[] {});
public static final Sq[] HOPF = new Sq[] {
new Sq(new int[] {1}),
new Sq(new int[] {Config.Q}),
new Sq(new int[] {Config.P*Config.Q}),
new Sq(new int[] {Config.P*Config.P*Config.Q})
};
public int[] q; /* Indices of the power operations.
Mod 2, i indicates Sq^i.
Mod p>2, 2i(p-1) indicates P^i, 2i(p-1)+1 indicates B P^i. */
public Sq(int[] qq) { q = qq; }
private static final int[] EMPTY = new int[] {};
private static final int[] ZERO = new int[] {0};
private static final int[] ONE = new int[] {1};
private static final int[][] SINGLETONS = new int[10000][1];
static {
for(int i = 0; i < 10000; i++) { SINGLETONS[i][0] = i; }
}
/* novikov filtration is 1 if there are no betas. this returns 1 for the
* identity operation; is this okay? */
@Override public int[] extraGrading()
{
if(Config.MICHAEL_MODE) {
for(int i : q)
if(i % Config.P != 0)
return ZERO;
return ONE;
} else if(Config.MOTIVIC_GRADING) {
int tot = 0;
for(int a : q) tot += a/2;
return SINGLETONS[tot];
} else return EMPTY;
}
@Override public int deg()
{
int deg = 0;
for(int i : q)
deg += i;
return deg;
}
public int excess()
{
if(q.length == 0) return 0;
int exc = q[q.length-1];
for(int i = 1; i < q.length; i++)
exc += q[i-1] - Config.P * q[i];
return exc;
}
public ModSet<Sq> times(Sq o)
{
int[] ret = new int[q.length + o.q.length];
for(int i = 0; i < q.length; i++)
ret[i] = q[i];
for(int i = 0; i < o.q.length; i++)
ret[q.length + i] = o.q[i];
if(Config.P == 2 && !Config.MICHAEL_MODE)
return new Sq(ret).resolve_2();
else
return new Sq(ret).resolve_p();
}
private ModSet<Sq> resolve_2()
{
ModSet<Sq> ret;
ret = new ModSet<Sq>();
for(int i = q.length - 2; i >= 0; i
int a = q[i];
int b = q[i+1];
if(a >= 2 * b)
continue;
/* apply Adem relation */
for(int c = 0; c <= a/2; c++) {
if(! ResMath.binom_2(b - c - 1, a - 2*c))
continue;
int[] t;
if(c == 0) {
t = Arrays.copyOf(q, q.length - 1);
for(int k = i+2; k < q.length; k++)
t[k-1] = q[k];
t[i] = a+b-c;
} else {
t = Arrays.copyOf(q, q.length);
t[i] = a+b-c;
t[i+1] = c;
}
/* recurse */
for(Map.Entry<Sq,Integer> sub : new Sq(t).resolve_2().entrySet())
ret.add(sub.getKey(), sub.getValue());
}
return ret;
}
/* all clear */
ret.add(this, 1);
return ret;
}
private ModSet<Sq> resolve_p()
{
ModSet<Sq> ret;
ret = new ModSet<Sq>();
/* convenience */
final int P = Config.P;
final int Q = 2 * (Config.P - 1);
final int R = Config.P - 1;
for(int i = q.length - 2; i >= 0; i
int x = q[i];
int y = q[i+1];
if(x >= Config.P * y)
continue;
/* apply Adem relation */
int a = x / Q;
int b = y / Q;
int rx = x % Q;
int ry = y % Q;
for(int c = 0; c <= a/Config.P; c++) {
int sign = ((a ^ c) & 1) == 0 ? 1 : -1;
// System.out.printf("adem: x=%d y=%d a=%d b=%d sign=%d\n", x, y, a, b, sign);
if(ry == 0)
resolve_p_add_term( sign*ResMath.binom_p(R*(b-c)-1,a-c*P ), (a+b-c)*Q+rx, c*Q , i, ret);
else {
if(rx == 0) {
resolve_p_add_term( sign*ResMath.binom_p(R*(b-c) ,a-c*P ), (a+b-c)*Q+1, c*Q , i, ret);
resolve_p_add_term(-sign*ResMath.binom_p(R*(b-c)-1,a-c*P-1), (a+b-c)*Q , c*Q+1, i, ret);
} else
resolve_p_add_term(-sign*ResMath.binom_p(R*(b-c)-1,a-c*P-1), (a+b-c)*Q+1, c*Q+1, i, ret);
}
}
return ret;
}
/* all clear */
ret.add(this, 1);
return ret;
}
private void resolve_p_add_term(int coeff, int a, int b, int i, ModSet<Sq> ret)
{
// System.out.printf("adem_term: coeff=%d a=%d b=%d\n", coeff, a, b);
coeff = ResMath.dmod(coeff);
if(coeff == 0) return; /* save some work... */
int[] t;
if(b == 0) {
t = Arrays.copyOf(q, q.length - 1);
for(int k = i+2; k < q.length; k++)
t[k-1] = q[k];
t[i] = a;
} else {
t = Arrays.copyOf(q, q.length);
t[i] = a;
t[i+1] = b;
}
/* recurse */
for(Map.Entry<Sq,Integer> sub : new Sq(t).resolve_p().entrySet())
ret.add(sub.getKey(), sub.getValue() * coeff);
}
@Override public String toString()
{
if(q.length == 0) return "1";
String s = "";
if(Config.P == 2 && ! Config.MICHAEL_MODE) {
for(int i : q) s += "Sq"+i;
} else {
for(int i : q) {
if(i == 1)
s += "\u03b2"; /* beta */
else if(i % Config.Q == 0)
s += "P"+(i/Config.Q);
else if(i % Config.Q == 1)
s += "\u03b2P"+(i/Config.Q);
else
Main.die_if(true, "bad A_"+Config.P+" element: Sq"+i);
}
}
return s;
}
@Override public int hashCode()
{
int hash = 0;
for(int i : q)
hash = hash * 27863521 ^ i;
return hash;
}
@Override public boolean equals(Object o)
{
Sq s = (Sq)o;
if(q.length != s.q.length)
return false;
for(int i = 0; i < q.length; i++)
if(q[i] != s.q[i])
return false;
return true;
}
@Override public int compareTo(Sq o)
{
if(q.length != o.q.length)
return q.length - o.q.length;
for(int i = 0; i < q.length; i++)
if(q[i] != o.q[i])
return q[i] - o.q[i];
return 0;
}
}
|
package javaslang.test;
import javaslang.Tuple2;
import javaslang.collection.Iterator;
import javaslang.collection.Stream;
import java.util.Objects;
import java.util.Random;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* Generators are the building blocks for providing arbitrary objects.
* <p>
* To ease the creation of Arbitraries, Gen is a FunctionalInterface which extends {@code Function<Random, T>}.
* <p>
* Gen objects are obtained via one of the methods {@code choose}, {@code fail}, {@code frequency}, {@code of} and
* {@code oneOf}.
* <p>
* Given Gen objects may be transformed using one of the methods {@code filter}, {@code map} and {@code flatMap}.
* <p>
* A simple way to obtain an Arbitrary of a Gen is to call {@linkplain javaslang.test.Gen#arbitrary()}.
* This will ignore the size hint of Arbitrary.
*
* @param <T> type of generated objects
* @author Daniel Dietrich
* @see javaslang.test.Arbitrary
* @since 1.2.0
*/
@FunctionalInterface
public interface Gen<T> {
long serialVersionUID = 1L;
int FILTER_THRESHOLD = Integer.MAX_VALUE;
/**
* Functional interface of this generator.
*
* @param random a random number generator
* @return A generated value of type T.
*/
T apply(Random random);
/**
* A generator which constantly returns t.
*
* @param t A value.
* @param <T> Type of t.
* @return A new T generator
*/
static <T> Gen<T> of(T t) {
return ignored -> t;
}
static <T> Gen<T> of(T seed, Function<? super T, ? extends T> next) {
final Iterator<T> iterator = Stream.iterate(seed, next).iterator();
return ignored -> iterator.next();
}
/**
* Chooses an int between min and max, bounds inclusive and numbers distributed according to the distribution of
* the underlying random number generator.
* <p>
* Note: min and max are internally swapped if min > max.
*
* @param min lower bound
* @param max upper bound
* @return A new int generator
*/
static Gen<Integer> choose(int min, int max) {
if (min == max) {
return ignored -> min;
} else {
final int _min = Math.min(min, max);
final int _max = Math.max(min, max);
return rng -> rng.nextInt(Math.abs(_max - _min) + 1) + _min;
}
}
/**
* Chooses a long between min and max, bounds inclusive and numbers distributed according to the distribution of
* the underlying random number generator.
* <p>
* Note: min and max are internally swapped if min > max.
*
* @param min lower bound
* @param max upper bound
* @return A new long generator
*/
static Gen<Long> choose(long min, long max) {
if (min == max) {
return ignored -> min;
} else {
return random -> {
final double d = random.nextDouble();
final long _min = Math.min(min, max);
final long _max = Math.max(min, max);
return (long) ((d * _max) + ((1.0 - d) * _min) + d);
};
}
}
static Gen<Double> choose(double min, double max) {
if (Double.isInfinite(min)) {
throw new IllegalArgumentException("min is infinite");
}
if (Double.isInfinite(max)) {
throw new IllegalArgumentException("max is infinite");
}
if (Double.isNaN(min)) {
throw new IllegalArgumentException("min is not a number (NaN)");
}
if (Double.isNaN(max)) {
throw new IllegalArgumentException("max is not a number (NaN)");
}
if (min == max) {
return ignored -> min;
} else {
return random -> {
final double d = random.nextDouble();
final double _min = Math.min(min, max);
final double _max = Math.max(min, max);
return d * _max + (1.0 - d) * _min;
};
}
}
/**
* Chooses a char between min and max, bounds inclusive and chars distributed according to the underlying random
* number generator.
* <p>
* Note: min and max are internally swapped if min > max.
*
* @param min lower bound
* @param max upper bound
* @return A new char generator
*/
static Gen<Character> choose(char min, char max) {
if (min == max) {
return ignored -> min;
} else {
return random -> (char) (int) Gen.choose((int) min, (int) max).apply(random);
}
}
/**
* Chooses an enum value from all the enum constants defined in the enumerated type.
* @param clazz Enum class
* @return A new enum generator
*/
static <E extends Enum<E>> Gen<E> choose(Class<E> clazz) {
Objects.requireNonNull(clazz, "clazz is null");
return random -> Gen.choose(clazz.getEnumConstants()).apply(random);
}
/**
* Chooses a value from all values in the array.
* @param values array with the values to choose from
* @return A new enum generator
*/
static <E> Gen<E> choose(E[] values) {
Objects.requireNonNull(values, "values is null");
if(values.length == 0)
return Gen.fail("Empty array");
else
return random -> Gen.choose(0, values.length - 1).map(i -> values[i]).apply(random);
}
/**
* A failing generator which throws a RuntimeException("failed").
*
* @param <T> Type of values theoretically generated.
* @return A new generator which always fails with the message "failed"
*/
static <T> Gen<T> fail() {
return fail("failed");
}
/**
* A failing generator which throws a RuntimeException.
*
* @param message Message thrown.
* @param <T> Type of values theoretically generated.
* @return A new generator which always fails with the given message
*/
static <T> Gen<T> fail(String message) {
return ignored -> {
throw new RuntimeException(message);
};
}
@SuppressWarnings("varargs")
@SafeVarargs
static <T> Gen<T> frequency(Tuple2<Integer, Gen<T>>... generators) {
Objects.requireNonNull(generators, "generators is null");
if (generators.length == 0) {
throw new IllegalArgumentException("generators is empty");
}
final Iterable<Tuple2<Integer, Gen<T>>> iterable = Stream.of(generators);
return frequency(iterable);
}
static <T> Gen<T> frequency(Iterable<Tuple2<Integer, Gen<T>>> generators) {
Objects.requireNonNull(generators, "generators is null");
final Stream<Tuple2<Integer, Gen<T>>> stream = Stream.ofAll(generators);
if (stream.isEmpty()) {
throw new IllegalArgumentException("generators is empty");
}
final class Frequency {
Gen<T> gen(int n, Stream<Tuple2<Integer, Gen<T>>> stream) {
final int k = stream.head()._1;
if (k < 0) {
throw new IllegalArgumentException("negative frequency: " + k);
}
return (n <= k) ? stream.head()._2 : gen(n - k, stream.tail());
}
}
final int size = stream.map(t -> t._1).sum().intValue();
return choose(1, size).flatMap(n -> new Frequency().gen(n, stream));
}
@SafeVarargs
static <T> Gen<T> oneOf(Gen<T>... generators) {
Objects.requireNonNull(generators, "generators is null");
if (generators.length == 0) {
throw new IllegalArgumentException("generators is empty");
}
return choose(0, generators.length - 1).flatMap(i -> generators[i]);
}
static <T> Gen<T> oneOf(Iterable<Gen<T>> generators) {
Objects.requireNonNull(generators, "generators is null");
final Stream<Gen<T>> stream = Stream.ofAll(generators);
if (stream.isEmpty()) {
throw new IllegalArgumentException("generators is empty");
}
@SuppressWarnings("unchecked")
final Gen<T>[] array = stream.toJavaArray((Class<Gen<T>>) (Object) Gen.class);
return oneOf(array);
}
/**
* Converts this Gen to an Arbitrary
*
* @return An arbitrary which returns this generator regardless of the provided size hint n
*/
default Arbitrary<T> arbitrary() {
return n -> this;
}
/**
* Returns a generator based on this generator which produces values that fulfill the given predicate.
*
* @param predicate A predicate
* @return A new generator
*/
default Gen<T> filter(Predicate<? super T> predicate) {
Objects.requireNonNull(predicate, "predicate is null");
return random -> {
int count = 0;
T t;
while (!predicate.test(t = apply(random))) {
// it may take a looooooong time to hit this condition!
if (++count == FILTER_THRESHOLD) {
throw new IllegalStateException("empty filter");
}
}
return t;
};
}
/**
* Maps generated Ts to Us.
*
* @param mapper A function that maps a generated T to a new generator which generates objects of type U.
* @param <U> Type of generated objects of the new generator
* @return A new generator
*/
default <U> Gen<U> flatMap(Function<? super T, ? extends Gen<? extends U>> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return random -> mapper.apply(apply(random)).apply(random);
}
/**
* Maps generated Ts to Us.
*
* @param mapper A function that maps a generated T to an object of type U.
* @param <U> Type of the mapped object
* @return A new generator
*/
default <U> Gen<U> map(Function<? super T, ? extends U> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return random -> mapper.apply(apply(random));
}
default Gen<T> peek(Consumer<? super T> action) {
return random -> {
final T t = apply(random);
action.accept(t);
return t;
};
}
/**
* Transforms this {@code Gen}.
*
* @param f A transformation
* @param <U> Type of transformation result
* @return An instance of type {@code U}
* @throws NullPointerException if {@code f} is null
*/
default <U> U transform(Function<? super Gen<T>, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
return f.apply(this);
}
}
|
package cs437.som.demo;
import cs437.som.SelfOrganizingMap;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* An image edge detector using a self-organizing map.
*/
public class EdgeDetector {
/** The minimum distance between 2 colors to classify as a change in detail */
private static final int MINIMUM_COLOR_DISTANCE = 60;
/** Better than stdout... */
private static Logger log = Logger.getLogger("EdgeDetector");
/** The ED's SOM */
private SelfOrganizingMap map = null;
/** The number of possible 3x3 matrices with each element having 3 possible
* values. This is the expected number of iterations for a SOM that will
* be exhaustively trained.
*/
public static final int threeRaiseNine = 19683;
/** The number of possible colors that can be stored in 24 bits (2^25 - 1) */
private static final int possibleColors = 33554431;
/**
* Create an empty EdgeDetector.
*/
private EdgeDetector() {
}
/**
* Create an edge detector.
*
* @param iterations The number of iterations used in training the SOM.
*/
private EdgeDetector(int iterations) {
SOMBuilber somb = new SOMBuilber(9, iterations);
somb.pack();
somb.setModal(true);
somb.setVisible(true);
map = somb.getMap();
}
/**
* Create an EdgeDetector with a given map and train it exhaustively.
*
* @param map The SOM the EdgeDetector will use. The EdgeDetector assumes
* ownership of the SOM.
* @return An exhaustively trained EdgeDetector.
*/
public static EdgeDetector trainExhaustivelyFromMap(SelfOrganizingMap map) {
EdgeDetector ed = new EdgeDetector();
ed.map = map;
ed.trainExhaustively();
return ed;
}
/**
* Create an EdgeDetector with a given map and train it with random samples
* of possible inputs.
*
* @param map The SOM the EdgeDetector will use. The EdgeDetector assumes
* ownership of the SOM.
* @param samples The number of input samples to train with.
* @return An exhaustively trained EdgeDetector.
*/
public static EdgeDetector trainRandomlyFromMap(SelfOrganizingMap map,
int samples) {
EdgeDetector ed = new EdgeDetector();
ed.map = map;
ed.trainWithRandomPermutations(samples);
return ed;
}
/**
* Train the edge detector's self-organizing map with a random sample of
* the possible inputs it may see.
*
* @param n The number of sample inputs to use for training.
*/
private void trainWithRandomPermutations(int n) {
int[][] matrices = generateRandomPermutations(n);
log.info("Training.");
for (int[] matrix : matrices) {
map.trainWith(matrix);
}
}
/**
* Generate a random sampling of input matrices.
*
* @param n The number of samples to produce.
* @return An array (size n) of double[9], each of which represents a 3x3
* matrix in row major form.
*/
private int[][] generateRandomPermutations(int n) {
int[][] permutations = new int[n][9];
Random r = new SecureRandom();
log.info("Generating " + n + " random matrices.");
for (int i = 0; i < permutations.length; i++) {
permutations[i] = new int[] {
r.nextInt(3) - 1, r.nextInt(3) - 1, r.nextInt(3) - 1,
r.nextInt(3) - 1, r.nextInt(3) - 1, r.nextInt(3) - 1,
r.nextInt(3) - 1, r.nextInt(3) - 1, r.nextInt(3) - 1
};
}
return permutations;
}
/**
* Train the edge detector's self-organizing map with every possible input
* matrix being shown to it once.
*/
public void trainExhaustively() {
int[][] matrices = generateAllPermutations();
log.info("Training.");
for (int[] matrix : matrices) {
map.trainWith(matrix);
}
}
/**
* Generate all possible input matrices.
*
* @return An array (size 19683, or 3^9) of double[9], each of which
* represents a 3x3 matrix in row major form.
*/
/*
public int[][] generateAllPermutations() {
int[] possibleValues = { -1, 0, 1 };
log.info("Creating " + threeRaiseNine + " (all possible) matrices.");
int[][] permutations = new int[threeRaiseNine][9];
int row = 0;
for (int possibleValue0 : possibleValues) {
int[] temp = {possibleValue0, 0, 0, 0, 0, 0, 0, 0, 0};
for (int possibleValue1 : possibleValues) {
temp[1] = possibleValue1;
for (int possibleValue2 : possibleValues) {
temp[2] = possibleValue2;
for (int possibleValue3 : possibleValues) {
temp[3] = possibleValue3;
for (int possibleValue4 : possibleValues) {
temp[4] = possibleValue4;
for (int possibleValue5 : possibleValues) {
temp[5] = possibleValue5;
for (int possibleValue6 : possibleValues) {
temp[6] = possibleValue6;
for (int possibleValue7 : possibleValues) {
temp[7] = possibleValue7;
for (int possibleValue8 : possibleValues) {
temp[8] = possibleValue8;
System.arraycopy(temp, 0, permutations[row], 0, 9);
row++;
}
}
}
}
}
}
}
}
}
return permutations;
}
*/
/**
* Generate all possible input matrices.
*
* @return An array (size 19683, or 3^9) of double[9], each of which
* represents a 3x3 matrix in row major form.
*/
private int[][] generateAllPermutations() {
int[] possibleValues = { -1, 0, 1 };
int rows = threeRaiseNine;
int cols = 9;
int[][] permutations = new int[rows][cols];
int factor;
for (int i = 0; i < rows; i++) {
factor = 1;
for (int j = 0; j < cols; j++) {
permutations[i][j] = possibleValues[i / factor % possibleValues.length];
factor *= possibleValues.length;
}
}
return permutations;
}
/**
* Detect the edges in an image.
*
* @param image The image to process.
* @return A new image where the pixels correspond to colors assigned to
* the individual neurons of the self-organizing map.
*/
public BufferedImage runOnImage(BufferedImage image) {
int height = image.getHeight();
int width = image.getWidth();
log.info("Processing " + width + 'x' + height + " image.");
BufferedImage out = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
int colorStep = possibleColors / map.getNeuronCount();
for (int y = 1; y < height; y++) {
for (int x = 1; x < width; x++) {
int[] differenceMatrix = getDifferenceMatrix(image, x, y);
int best = map.getBestMatchingNeuron(differenceMatrix);
out.setRGB(x, y, colorStep * best);
}
}
return out;
}
/**
* Normalize an image produced in a previous processing.
*
* The most common color corresponds to the SOM's neuron that responds to a
* lack of an edge. Whatever color that neuron was assigned is converted
* to black to serve as the background. All other colors are converted to
* white.
*
* This operation is predicated on the idea that several neurons will
* respond to different features in the image that correspond to parts of
* an edge, whereas another single neuron will be most closely associated
* with a lack of an edge.
*
* @param image The image to normalize.
* @return A black and white image where the white pixels indicate an edge
* in the original image.
*/
public BufferedImage normalizeImage(BufferedImage image) {
log.info("Normalizing.");
int mostCommonColor = findMostCommonColor(image);
log.fine("Rewriting colors.");
BufferedImage out = new BufferedImage(image.getWidth(), image.getHeight(), image.getType());
int black = Color.black.getRGB();
int white = Color.white.getRGB();
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
int color = image.getRGB(x, y);
if (color == mostCommonColor)
out.setRGB(x, y, black);
else
out.setRGB(x, y, white);
}
}
return out;
}
/**
* Compute the most common color in an image.
*
* @param image The input image.
* @return The most common color as a 24-bit RGB value;
*/
private int findMostCommonColor(BufferedImage image) {
Map<Integer, Integer> colorFrequency =
new HashMap<Integer, Integer>(map.getNeuronCount());
log.fine("Computing color frequencies.");
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
int color = image.getRGB(x, y);
if (colorFrequency.containsKey(color))
colorFrequency.put(color, colorFrequency.get(color) + 1);
else
colorFrequency.put(color, 1);
}
}
log.fine("Finding most common color.");
int mostCommonColor = -1;
int max = 0;
for (Integer integer : colorFrequency.keySet()) {
int count = colorFrequency.get(integer);
if (count > max) {
max = count;
mostCommonColor = integer;
}
}
return mostCommonColor;
}
private int[] getDifferenceMatrix(BufferedImage img, int x, int y) {
int[] matrix = new int[9];
int center = getPixelColor(img, x, y);
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
int pixel = getPixelColor(img, x + j - 1, y + i - 1);
if (pixel == -1) {
matrix[i * 3 + j] = -1;
} else {
double difference = colorDistance(center, pixel);
if (difference < MINIMUM_COLOR_DISTANCE) {
matrix[i * 3 + j] = 0;
} else {
matrix[i * 3 + j] = 1;
}
}
}
}
return matrix;
}
/**
* Retrieve the color of a single pixel in an image.
*
* @param image The image in question.
* @param x The x-axis location of the pixel.
* @param y The y-axis location of the pixel.
* @return The color, as a 32-bit ARGB value, at (x, y) or -1 if that
* location would be outside of the boundaries of the image.
*/
private int getPixelColor(BufferedImage image, int x, int y) {
if (x < 0 || x > image.getWidth() - 1 || y < 0 || y > image.getHeight() - 1)
return -1;
return image.getRGB(x, y);
}
/**
* Calculate the Euclidean difference between two colors using their RGB
* values as elements of a 3-dimensional vector.
*
* @param one The first color as a 32-bit ARGB value (the alpha will be
* ignored).
* @param two The second color as a 32-bit ARGB value (the alpha will be
* ignored).
* @return The distance between one and two.
*/
private double colorDistance(int one, int two) {
int r1 = (one >> 16) & 0xFF, g1 = (one >> 8) & 0xFF, b1 = one & 0xFF;
int r2 = (two >> 16) & 0xFF, g2 = (two >> 8) & 0xFF, b2 = two & 0xFF;
int dr = r1 - r2, dg = g1 - g2, db = b1 - b2;
return Math.sqrt(dr * dr + dg * dg + db * db);
}
@Override
public String toString() {
return "EdgeDetector{map=" + map + '}';
}
public static void main(String[] args) throws IOException {
EdgeDetector ed = new EdgeDetector(100);
ed.trainWithRandomPermutations(100);
BufferedImage original = ImageIO.read(new File("image.jpg"));
BufferedImage detected = ed.runOnImage(original);
writeImage(detected, "out");
BufferedImage normalized = ed.normalizeImage(detected);
writeImage(normalized, "out_normalized");
JFrame f1 = ImageFrame.createInJFrame("Reference Image", ImageIO.read(new File("known_edges.jpg")));
f1.setLocation(0,0);
f1.setVisible(true);
JFrame f2 = ImageFrame.createInJFrame("Processed Image", detected);
f2.setLocation(450, 0);
f2.setVisible(true);
JFrame f3 = ImageFrame.createInJFrame("Normalized Image", normalized);
f3.setLocation(900, 0);
f3.setVisible(true);
}
/**
* Write an image to a PNG file.
*
* @param image The image to store.
* @param filename The name of the file to write to (without the ".png"
* extension).
*/
public static void writeImage(BufferedImage image, String filename) {
try {
ImageIO.write(image, "png", new File(filename + ".png"));
} catch (IOException e) {
log.severe("Exception while writing file: " + filename + ".png");
log.log(Level.SEVERE, "Exception: ", e);
}
}
}
|
package com.dotbots.util;
import com.badlogic.gdx.graphics.Color;
import com.dotbots.model.Board;
import com.dotbots.model.Goal;
import com.dotbots.model.Piece;
import com.dotbots.model.Wall;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
public class BoardFactory {
// static board creation
public static Board createBoard(int size) {
List<Piece> pieces = createPieces(size);
List<Wall> walls = createWalls(size);
Goal goal = createGoal(size, pieces, walls);
return new Board(size, pieces, walls, goal);
}
// methods to create pieces, walls, and goal
private static List<Piece> createPieces(int size) {
int[] keys = {};
switch (size) {
case 10:
keys = pieceKeys10;
break;
}
List<Piece> pieces = new ArrayList<Piece>();
for (int i = 0; i < keys.length; i += 2) {
Piece piece = new Piece(keys[i], keys[i + 1], pieceColors[i / 2]);
pieces.add(piece);
}
return pieces;
}
private static List<Wall> createWalls(int size) {
int[] keys = {};
switch (size) {
case 10:
keys = wallKeys10;
break;
}
List<Wall> walls = new ArrayList<Wall>();
for (int i = 0; i < keys.length; i += 3) {
Wall wall = new Wall(keys[i], keys[i + 1], keys[i + 2]);
walls.add(wall);
}
return walls;
}
// TODO - this whole method is sloppy, should be optimized
public static Goal createGoal(int size, List<Piece> pieces, List<Wall> walls) {
Random rand = new Random();
int randNum = rand.nextInt(pieces.size());
Piece piece = pieces.get(randNum);
List<Goal> goals = new ArrayList<Goal>();
for (Wall wall : walls) {
float x = wall.getX();
float y = wall.getY();
Goal goal0 = new Goal(x, y - 1, piece);
Goal goal1 = new Goal(x, y, piece);
Goal goal2 = new Goal(x - 1, y, piece);
Goal goal3 = new Goal(x - 1, y - 1, piece);
goals.add(goal0);
goals.add(goal1);
goals.add(goal2);
goals.add(goal3);
switch (wall.getDir()) {
case 0: goals.remove(goal3); break;
case 1: goals.remove(goal2); break;
case 2: goals.remove(goal1); break;
case 3: goals.remove(goal0); break;
}
}
List<Goal> potentialGoals = new ArrayList<Goal>();
for (Goal goal : goals) {
boolean spawn = true;
for (Piece each : pieces) {
if (each.getX() == goal.getX() && each.getY() == goal.getY()) {
spawn = false;
}
}
if (spawn && !(goal.getX() < 0 || goal.getX() > size - 1 || goal.getY() < 0
|| goal.getY() > size - 1)) {
potentialGoals.add(goal);
}
}
randNum = rand.nextInt(potentialGoals.size());
return potentialGoals.get(randNum);
}
// colors for pieces
final static Color[] pieceColors = {
Color.valueOf("FFAA00"),
Color.valueOf("FF00AA"),
Color.valueOf("AA00FF"),
Color.valueOf("00AAFF")
};
// keys for pieces
final static int[] pieceKeys10 = {
2, 7, // x, y
3, 4,
6, 2,
7, 7 };
// keys for walls
final static int[] wallKeys10 = {
0, 2, 0, // x, y, dir
0, 7, 0,
2, 7, 0,
3, 4, 0,
3, 10, 1,
4, 0, 0,
6, 6, 2,
7, 0, 0,
7, 3, 2,
7, 10, 1,
8, 8, 2,
10, 2, 2,
10, 6, 2 };
}
|
package hudson.model;
import com.thoughtworks.xstream.converters.Converter;
import com.thoughtworks.xstream.converters.basic.AbstractBasicConverter;
import java.io.Serializable;
/**
* The build outcome.
*
* @author Kohsuke Kawaguchi
*/
public final class Result implements Serializable {
/**
* The build didn't have any fatal errors not errors.
*/
public static final Result SUCCESS = new Result("SUCCESS",BallColor.BLUE,0);
/**
* The build didn't have any fatal errors but some errors.
*/
public static final Result UNSTABLE = new Result("UNSTABLE",BallColor.YELLOW,1);
/**
* The build had a fatal error.
*/
public static final Result FAILURE = new Result("FAILURE",BallColor.RED,2);
/**
* The build was manually aborted.
*/
public static final Result ABORTED = new Result("ABORTED",BallColor.GREY,3);
private final String name;
/**
* Bigger numbers are worse.
*/
private final int ordinal;
/**
* Default ball color for this status.
*/
public final BallColor color;
private Result(String name, BallColor color, int ordinal) {
this.name = name;
this.color = color;
this.ordinal = ordinal;
}
/**
* Combines two {@link Result}s and returns the worse one.
*/
public Result combine(Result that) {
if(this.ordinal < that.ordinal)
return that;
else
return this;
}
public boolean isWorseThan(Result that) {
return this.ordinal > that.ordinal;
}
public boolean isWorseOrEqualTo(Result that) {
return this.ordinal >= that.ordinal;
}
public String toString() {
return name;
}
private Object readResolve() {
for (Result r : all)
if (ordinal==r.ordinal)
return r;
return FAILURE;
}
private static final long serialVersionUID = 1L;
private static final Result[] all = new Result[] {SUCCESS,UNSTABLE,FAILURE,ABORTED};
public static final Converter conv = new AbstractBasicConverter () {
public boolean canConvert(Class clazz) {
return clazz==Result.class;
}
protected Object fromString(String s) {
for (Result r : all)
if (s.equals(r.name))
return r;
return FAILURE;
}
};
}
|
package org.egordorichev.lasttry;
import org.egordorichev.lasttry.util.Util;
import java.io.File;
public class Args {
public static String world = "test";
public static String player = "test";
public static int seed = 512;
private static int i;
private static String arg;
private static String[] arguments;
public static void parse(String[] args, Object config) throws Exception {
arguments = args;
/*
if (args.length > 0) {
List<String> argList = Arrays.asList(args);
if (argList.contains("-d")) {
LastTry.release = false;
if (Util.isWindows()) {
System.setSecurityManager(new ExitDumper());
}
}
if (argList.contains("-wd")) {
Util.delete(new File("data" + File.separator + "worlds"));
}
if (argList.contains("-f")) {
config.fullscreen = true;
}if (argList.contains("-nl")) {
LastTry.noLight = true;
}
}
*/
for (i = 0; i < args.length; i++) {
arg = args[i];
switch (arg) {
case "-d":
LastTry.release = false;
if (Util.isWindows()) {
System.setSecurityManager(new ExitDumper());
}
break;
case "-dw":
Util.delete(new File("data" + File.separator + "worlds"));
break;
case "-dp":
Util.delete(new File("data" + File.separator + "players"));
break;
case "-s":
checkForArgument("Expected seed after -s");
try {
seed = Integer.valueOf(args[++i]);
LastTry.random.setSeed(seed);
} catch (Exception exception) {
throw new Exception("Seed is not a valid number");
}
break;
case "-w":
checkForArgument("Expected world name after -w");
world = args[++i];
break;
case "-p":
checkForArgument("Expected player name after -p");
player = args[++i];
break;
case "-nl":
LastTry.noLight = true;
break;
case "-f":
set(config, "fullscreen", true);
break;
default:
throw new Exception("Unknown arg " + arg);
}
}
}
private static void set(Object instance, String field, boolean value) {
// THIS IS TEMPORARY
// The issue is AFAIK the core gradle doesn't load the following:
// com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration
// So do we update the gradle file or change the way its passed?
try {
instance.getClass().getDeclaredField(field).set(instance, value);
} catch (IllegalArgumentException | IllegalAccessException | NoSuchFieldException | SecurityException e) {
e.printStackTrace();
}
}
private static void checkForArgument(String error) throws Exception {
if (arguments.length - 1 == i) {
throw new Exception(error);
}
}
private static class ExitDumper extends SecurityManager {
@Override
public void checkExit(int status) {
Thread.dumpStack();
}
}
}
|
package io.leao.codecolors.view;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CheckedTextView;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.MultiAutoCompleteTextView;
import android.widget.RadioButton;
import android.widget.RatingBar;
import android.widget.SeekBar;
import android.widget.Spinner;
import android.widget.TextView;
import java.util.Set;
import io.leao.codecolors.R;
import io.leao.codecolors.manager.CcColorsManager;
import io.leao.codecolors.manager.CcDependenciesManager;
import io.leao.codecolors.res.CcColorStateList;
public class CcLayoutInflaterFactoryWrapper implements LayoutInflater.Factory2 {
private final CcLayoutInflater mInflater;
private LayoutInflater.Factory2 mFactory;
protected CcColorsManager mColorsManager;
protected CcDependenciesManager mDependenciesManager;
public CcLayoutInflaterFactoryWrapper(CcLayoutInflater inflater, LayoutInflater.Factory2 factory) {
mInflater = inflater;
mFactory = factory;
Context context = inflater.getContext();
mColorsManager = CcColorsManager.obtain(context);
mColorsManager.onNewContext(context);
mDependenciesManager = CcDependenciesManager.obtain(context);
}
public LayoutInflater.Factory2 getFactory() {
return mFactory;
}
public void setFactory(LayoutInflater.Factory2 factory) {
if (mFactory == null) {
mFactory = factory;
} else {
throw new IllegalStateException("Factory already defined.");
}
}
@Override
public View onCreateView(View parent, String name, Context context, AttributeSet attrs) {
return onCreateView(name, context, attrs);
}
@Override
public View onCreateView(String name, Context context, AttributeSet attrs) {
View view;
if (mFactory != null) {
view = mFactory.onCreateView(name, context, attrs);
} else {
view = null;
}
if (view == null) {
view = mInflater.createViewFromTag(context, name, attrs);
}
if (view != null && attrs != null) {
// Add callbacks to refresh drawable states.
addCodeColorCallbacks(context, attrs, view);
}
return view;
}
protected void addCodeColorCallbacks(Context context, AttributeSet attrs, View view) {
TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.CodeColors, getViewDefStyleAttr(view), 0);
try {
final int N = ta.getIndexCount();
for (int i = 0; i < N; i++) {
int attr = ta.getIndex(i);
if (attr == R.styleable.CodeColors_android_background ||
attr == R.styleable.CodeColors_backgroundTint) {
Drawable backgroundDrawable = view.getBackground();
if (backgroundDrawable != null) {
int resourceId = ta.getResourceId(attr, 0);
addCodeColorCallbacks(resourceId, backgroundDrawable, mDrawableInvalidateCallback);
}
} else if (attr == R.styleable.CodeColors_android_src) {
if (view instanceof ImageView) {
Drawable srcDrawable = ((ImageView) view).getDrawable();
if (srcDrawable != null) {
int resourceId = ta.getResourceId(attr, 0);
addCodeColorCallbacks(resourceId, srcDrawable, mDrawableInvalidateCallback);
}
}
}
}
} finally {
ta.recycle();
}
}
private void addCodeColorCallbacks(int resourceId, Drawable drawable, CcColorStateList.AnchorCallback callback) {
Set<Integer> dependencies = mDependenciesManager.resolveDependencies(resourceId);
if (dependencies != null) {
for (Integer dependency : dependencies) {
CcColorStateList codeColor = mColorsManager.getColor(dependency);
if (codeColor != null) {
codeColor.addCallback(drawable, callback);
}
}
}
}
/**
* Returns the default style attribute depending on the view class.
* <p/>
* Order matters: a {@link CheckBox} is also {@link Button}, so we have to be careful when returning the default
* style attribute.
*/
@SuppressLint("InlinedApi")
private static int getViewDefStyleAttr(View view) {
if (view instanceof RadioButton) {
return android.R.attr.radioButtonStyle;
} else if (view instanceof CheckBox) {
return android.R.attr.checkboxStyle;
} else if (view instanceof Button) {
return android.R.attr.buttonStyle;
} else if (view instanceof MultiAutoCompleteTextView) {
return android.R.attr.autoCompleteTextViewStyle;
} else if (view instanceof AutoCompleteTextView) {
return android.R.attr.autoCompleteTextViewStyle;
} else if (view instanceof EditText) {
return android.R.attr.editTextStyle;
} else if (view instanceof CheckedTextView) {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 ?
android.R.attr.checkedTextViewStyle : 0;
} else if (view instanceof TextView) {
return android.R.attr.textViewStyle;
} else if (view instanceof Spinner) {
return android.R.attr.spinnerStyle;
} else if (view instanceof ImageButton) {
return android.R.attr.imageButtonStyle;
} else if (view instanceof RatingBar) {
return android.R.attr.ratingBarStyle;
} else if (view instanceof SeekBar) {
return android.R.attr.seekBarStyle;
} else {
return 0;
}
}
private static final CcColorStateList.AnchorCallback<Drawable> mDrawableInvalidateCallback =
new CcColorStateList.AnchorCallback<Drawable>() {
@Override
public void invalidateColor(Drawable drawable, CcColorStateList color) {
if (drawable != null) {
final int[] state = drawable.getState();
// Force a state change to update the color.
drawable.setState(new int[]{0});
drawable.setState(state);
// Invalidate the drawable (invalidates the view).
drawable.invalidateSelf();
}
}
};
}
|
package com.archimatetool.editor.diagram.util;
import org.eclipse.draw2d.FreeformFigure;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.SWTGraphics;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.gef.EditPartFactory;
import org.eclipse.gef.GraphicalViewer;
import org.eclipse.gef.LayerConstants;
import org.eclipse.gef.RootEditPart;
import org.eclipse.gef.editparts.FreeformGraphicalRootEditPart;
import org.eclipse.gef.editparts.LayerManager;
import org.eclipse.gef.ui.parts.GraphicalViewerImpl;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
import com.archimatetool.editor.diagram.DiagramEditorFactoryExtensionHandler;
import com.archimatetool.editor.diagram.IDiagramEditorFactory;
import com.archimatetool.editor.diagram.editparts.ArchimateDiagramEditPartFactory;
import com.archimatetool.editor.diagram.sketch.editparts.SketchEditPartFactory;
import com.archimatetool.model.IArchimateDiagramModel;
import com.archimatetool.model.IDiagramModel;
import com.archimatetool.model.ISketchModel;
/**
* Diagram Utils
*
* @author Phillip Beauvoir
*/
public final class DiagramUtils {
/**
* Create a GraphicalViewerImpl to show the model. The Viewer has no Scroll Bars
* @param model
* @return A Graphical Viewer
*/
public static GraphicalViewerImpl createViewer(IDiagramModel model, Composite parent) {
EditPartFactory editPartFactory = null;
if(model instanceof IArchimateDiagramModel) {
editPartFactory = new ArchimateDiagramEditPartFactory();
}
else if(model instanceof ISketchModel) {
editPartFactory = new SketchEditPartFactory();
}
else {
// Extensions
IDiagramEditorFactory factory = DiagramEditorFactoryExtensionHandler.INSTANCE.getFactory(model);
if(factory != null) {
editPartFactory = factory.createEditPartFactory();
}
}
if(editPartFactory == null) {
throw new RuntimeException("Unsupported model type"); //$NON-NLS-1$
}
GraphicalViewerImpl viewer = new GraphicalViewerImpl();
viewer.createControl(parent);
viewer.setEditPartFactory(editPartFactory);
RootEditPart rootPart = new FreeformGraphicalRootEditPart();
viewer.setRootEditPart(rootPart);
viewer.setContents(model);
viewer.flush();
return viewer;
}
/**
* @param model The model to create the image from
* @param scale The scale to use. 1 is full size.
* @param margin amount of white space margin to apply around the image
* @return A Scaled Image from the given Diagram Model
* Clients must dispose of the Image when done.
* If model has no children a blank image of 100x100 is returned
*/
public static Image createImage(IDiagramModel model, double scale, int margin) {
return createModelReferencedImage(model, scale, margin).getImage();
}
/**
* @param model The model to create the image from
* @param scale The scale to use. 1 is full size.
* @param margin amount of white space margin to apply around the image
* @return ModelReferencedImage wrapper class containing a Scaled Image from the given Diagram Model and offset bounds
* Clients must dispose of the Image when done.
* If model has no children a blank image of 100x100 is returned
*/
public static ModelReferencedImage createModelReferencedImage(IDiagramModel model, double scale, int margin) {
Shell shell = new Shell();
shell.setLayout(new FillLayout());
GraphicalViewer viewer = createViewer(model, shell);
ModelReferencedImage image = createModelReferencedImage(viewer, scale, margin);
shell.dispose();
return image;
}
/**
* @param graphicalViewer The GraphicalViewer to create the image from
* @param scale The scale to use. 1 is full size. Max of 4 is allowed.
* @param margin amount of white space margin to apply around the image
* @return A Scaled Image from the given GraphicalViewer trimming off whitespace
* Clients must dispose of the Image when done.
* If graphicalViewer has no children a blank image of 100x100 is returned
*/
public static Image createImage(GraphicalViewer graphicalViewer, double scale, int margin) {
return createModelReferencedImage(graphicalViewer, scale, margin).getImage();
}
private static ModelReferencedImage createModelReferencedImage(GraphicalViewer graphicalViewer, double scale, int margin) {
LayerManager layerManager = (LayerManager)graphicalViewer.getEditPartRegistry().get(LayerManager.ID);
IFigure rootFigure = layerManager.getLayer(LayerConstants.PRINTABLE_LAYERS);
return createModelReferencedImage(rootFigure, scale, margin);
}
/**
* @param figure The Figure to create the image from
* @param scale The scale to use. 1 is full size. Max of 5 is allowed.
* @param margin amount of white space margin to apply around the image
* @return A Scaled Image from the given GraphicalViewer trimming off whitespace
* Clients must dispose of the Image when done.
* If figure has no children a blank image of 100x100 is returned
*/
public static Image createImage(IFigure figure, double scale, int margin) {
return createModelReferencedImage(figure, scale, margin).getImage();
}
private static ModelReferencedImage createModelReferencedImage(IFigure figure, double scale, int margin) {
if(scale <= 0) {
scale = 1;
}
if(scale > 5) {
scale = 5;
}
Rectangle bounds = getMinimumBounds(figure);
if(bounds == null) {
bounds = new Rectangle(0, 0, 100, 100); // At least a minimum
}
else {
bounds.expand(margin / scale, margin / scale);
}
Image image = new Image(Display.getDefault(), (int)(bounds.width * scale), (int)(bounds.height * scale) );
GC gc = new GC(image);
SWTGraphics graphics = new SWTGraphics(gc);
// If scaled, then scale now
// Issue #621: SWTGraphics supports scale() so no need to use ScaledGraphics
if(scale != 1) {
graphics.scale(scale);
}
// Compensate for negative co-ordinates
graphics.translate(bounds.x * -1, bounds.y * -1);
// Paint onto graphics
figure.paint(graphics);
// Dispose
gc.dispose();
graphics.dispose();
return new ModelReferencedImage(image, bounds);
}
/**
* Return the extents of the diagram by extending from the left-topmost child to the right-bottom-most child.
* If there are no children in the diagram a minimal size of 100x100 is returned.
*/
public static Rectangle getDiagramExtents(GraphicalViewer graphicalViewer) {
LayerManager layerManager = (LayerManager)graphicalViewer.getEditPartRegistry().get(LayerManager.ID);
IFigure rootFigure = layerManager.getLayer(LayerConstants.PRINTABLE_LAYERS);
Rectangle r = getMinimumBounds(rootFigure);
return r == null ? new Rectangle(0, 0, 100, 100) : r;
}
/**
* @param figure
* @return The minimum bounds for a figure or null if there are no children
*/
public static Rectangle getMinimumBounds(IFigure figure) {
// Simple Figure
if(!(figure instanceof FreeformFigure)) {
return figure.getBounds();
}
Rectangle minimumBounds = null;
for(Object child : figure.getChildren()) {
Rectangle bounds;
if(child instanceof FreeformFigure) {
bounds = getMinimumBounds((IFigure)child);
}
else {
bounds = ((IFigure)child).getBounds();
}
if(bounds != null) {
if(minimumBounds == null) {
minimumBounds = new Rectangle(bounds);
}
else {
minimumBounds.union(bounds);
}
}
}
return minimumBounds;
}
}
|
package org.neo4j.index.impl.lucene;
import static org.neo4j.index.impl.lucene.LuceneDataSource.LUCENE_VERSION;
import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.WhitespaceTokenizer;
final class CustomAnalyzer extends Analyzer
{
static boolean called;
@Override
public final TokenStream tokenStream( String fieldName, Reader reader )
{
called = true;
return new LowerCaseFilter( LUCENE_VERSION, new WhitespaceTokenizer( LUCENE_VERSION, reader ) );
}
}
|
package de.holisticon.bpm.sbr.dmn;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.ejb.Remote;
import javax.ejb.Stateless;
import de.holisticon.bpm.sbr.api.CustomerStatus;
import de.holisticon.bpm.sbr.dmn.api.CandidateResult;
import de.holisticon.bpm.sbr.dmn.api.SkillBasedRoutingService;
import org.camunda.bpm.dmn.engine.DmnDecision;
import org.camunda.bpm.dmn.engine.DmnDecisionOutput;
import org.camunda.bpm.dmn.engine.DmnDecisionResult;
import org.camunda.bpm.dmn.engine.DmnEngine;
import org.camunda.bpm.dmn.engine.impl.DmnEngineConfigurationImpl;
import org.camunda.bpm.engine.delegate.DelegateTask;
import org.slf4j.Logger;
import static org.slf4j.LoggerFactory.getLogger;
@Stateless
@Remote(SkillBasedRoutingService.class)
public class SkillBasedRoutingServiceBean implements SkillBasedRoutingService {
private final Logger logger = getLogger(this.getClass());
private final static String DMN_RESOURCE = "findApprover.dmn";
private DmnDecision decision;
private DmnEngine dmnEngine;
@PostConstruct
public void loadDecision() {
dmnEngine = new DmnEngineConfigurationImpl().buildEngine();
final InputStream resourceAsStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(DMN_RESOURCE);
decision = dmnEngine.parseDecision(resourceAsStream);
logger.info("created decision for " + DMN_RESOURCE);
}
@SuppressWarnings("unchecked")
public <T> T evaluateSingleResult(final Map<String, Object> context, String resultName) {
final DmnDecisionResult result = dmnEngine.evaluate(decision, context);
if (result != null && !result.isEmpty()) {
final DmnDecisionOutput output = result.get(0);
return (T) output.get(resultName);
}
return null;
}
@Override
public CandidateResult evaluate(final DelegateTask task) {
final CandidateResult candidateResult = new CandidateResult();
final Map<String, Object> context = new HashMap<String, Object>();
CustomerStatus c = null;
String customerStatus = (String) task.getVariable("customerStatus");
if (customerStatus != null) {
c = CustomerStatus.valueOf(customerStatus);
}
double approvalSum = (double) task.getVariable("aprovalSum");
String customerCode = (String) task.getVariable("customerCode");
context.put("sheet", new ApprovalSheet(customerCode,approvalSum, c));
String candidateGroup = evaluateSingleResult(context, "group");
if (candidateGroup != null) {
candidateResult.getCandidateGroups().add(candidateGroup);
}
logger.info("Candidate group: {}", candidateGroup);
return candidateResult;
}
}
|
package cz.cuni.mff.odcleanstore.fusiontool;
import cz.cuni.mff.odcleanstore.fusiontool.config.ConfigParameters;
import cz.cuni.mff.odcleanstore.fusiontool.config.DataSourceConfigImpl;
import cz.cuni.mff.odcleanstore.fusiontool.config.EnumDataSourceType;
import cz.cuni.mff.odcleanstore.fusiontool.config.LDFTConfigConstants;
import cz.cuni.mff.odcleanstore.fusiontool.config.SparqlRestrictionImpl;
import cz.cuni.mff.odcleanstore.fusiontool.exceptions.LDFusionToolException;
import cz.cuni.mff.odcleanstore.fusiontool.io.RepositoryFactory;
import cz.cuni.mff.odcleanstore.fusiontool.loaders.data.AllTriplesRepositoryLoader;
import cz.cuni.mff.odcleanstore.fusiontool.source.DataSource;
import cz.cuni.mff.odcleanstore.fusiontool.source.DataSourceImpl;
import cz.cuni.mff.odcleanstore.fusiontool.util.LDFusionToolUtils;
import org.openrdf.model.Statement;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandler;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFWriter;
import org.openrdf.rio.Rio;
import org.openrdf.rio.helpers.RDFHandlerBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.zip.Deflater;
import java.util.zip.GZIPOutputStream;
public class SparqlDumpDownloader {
private static final Logger LOG = LoggerFactory.getLogger(SparqlDumpDownloader.class);
public static final File OUTPUT_FILE = new File("h:\\skola\\PhD\\proj\\vse-vestnik\\dump\\vestnik-dump3.n3");
public static final RDFFormat FILE_SERIALIZATION = RDFFormat.NTRIPLES;
public static final String SPARQL_ENDPOINT = "http://lod2-dev.vse.cz:8890/sparql";
public static final String NAMED_GRAPH_RESTRICTION = "FILTER(?g = <http://linked.opendata.cz/resource/dataset/vestnikverejnychzakazek.cz>)";
public static final String NAMED_GRAPH_RESTRICTION_VAR = "g";
public static final int SPARQL_RESULT_MAX_ROWS = 9_000;
public static final int SPARQL_MIN_QUERY_INTERVAL = 2_000;
public static final int ERROR_RETRY_INTERVAL = 20_000;
public static final int MAX_RETRY_ATTEMPTS = 1_000;
public static final int INITIAL_OFFSET = 0;
public static void main(String[] args) throws Exception {
boolean useGZip = OUTPUT_FILE.getName().endsWith(".gz");
Writer tempOutputWriter = createFileWriter(OUTPUT_FILE, useGZip);
try {
DataSourceConfigImpl dataSourceConfig = createDataSourceConfig();
DataSource dataSource = DataSourceImpl.fromConfig(
dataSourceConfig,
Collections.<String, String>emptyMap(),
new RepositoryFactory(LDFTConfigConstants.DEFAULT_FILE_PARSER_CONFIG));
RDFWriter tempRdfWriter = Rio.createWriter(FILE_SERIALIZATION, tempOutputWriter);
tempRdfWriter.startRDF();
RDFWriterWrapper rdfHandler = new RDFWriterWrapper(tempRdfWriter);
long retryAttempts = 0;
boolean finished = false;
while (!finished) {
int offset = (int) rdfHandler.getCounter() + INITIAL_OFFSET;
try {
loadQuads(dataSource, offset, rdfHandler);
finished = true;
} catch (LDFusionToolException e) {
if (retryAttempts < MAX_RETRY_ATTEMPTS) {
retryAttempts++;
LOG.error("Error loading triples: " + e.getMessage(), e);
LOG.info(String.format("An error occurred, retry # %d in %d s", retryAttempts, ERROR_RETRY_INTERVAL / 1_000));
Thread.sleep(ERROR_RETRY_INTERVAL);
} else {
throw e;
}
}
}
tempRdfWriter.endRDF();
} catch (Exception e) {
e.printStackTrace();
} finally {
if (tempOutputWriter != null) {
tempOutputWriter.close();
}
}
}
private static void loadQuads(DataSource dataSource, int initialOffset, RDFWriterWrapper rdfHandler) throws LDFusionToolException {
AllTriplesRepositoryLoader loader = new AllTriplesRepositoryLoader(dataSource);
loader.setInitialOffset(initialOffset);
try {
loader.loadAllTriples(rdfHandler);
} finally {
loader.close();
}
}
private static DataSourceConfigImpl createDataSourceConfig() {
DataSourceConfigImpl dataSourceConfig = new DataSourceConfigImpl(EnumDataSourceType.SPARQL, "");
dataSourceConfig.setNamedGraphRestriction(new SparqlRestrictionImpl(NAMED_GRAPH_RESTRICTION, NAMED_GRAPH_RESTRICTION_VAR));
dataSourceConfig.getParams().put(ConfigParameters.DATA_SOURCE_SPARQL_ENDPOINT, SPARQL_ENDPOINT);
dataSourceConfig.getParams().put(ConfigParameters.DATA_SOURCE_SPARQL_MIN_QUERY_INTERVAL, Integer.toString(SPARQL_MIN_QUERY_INTERVAL));
dataSourceConfig.getParams().put(ConfigParameters.DATA_SOURCE_SPARQL_RESULT_MAX_ROWS, Integer.toString(SPARQL_RESULT_MAX_ROWS));
return dataSourceConfig;
}
private static Writer createFileWriter(File file, boolean useGzip) throws IOException {
OutputStream outputStream = new FileOutputStream(file);
if (useGzip) {
outputStream = new GZIPOutputStream(outputStream, 2048) {
{
this.def.setLevel(Deflater.BEST_SPEED);
}
};
}
return new BufferedWriter(new OutputStreamWriter(outputStream, Charset.defaultCharset()));
}
private static class RDFWriterWrapper extends RDFHandlerBase implements RDFHandler {
private final RDFWriter writer;
private long counter = 0;
private long startTime;
public RDFWriterWrapper(RDFWriter writer) {
this.writer = writer;
this.startTime = System.currentTimeMillis();
}
public long getCounter() {
return counter;
}
@Override
public void handleNamespace(String prefix, String uri) throws RDFHandlerException {
this.writer.handleNamespace(prefix, uri);
}
@Override
public void handleStatement(Statement st) throws RDFHandlerException {
this.writer.handleStatement(st);
counter++;
if (counter % 100_000 == 0) {
String time = LDFusionToolUtils.formatTime(System.currentTimeMillis() - startTime);
LOG.info(String.format("Stored %,d quads in %s (last %s)\n", counter, time, st));
}
}
}
}
|
package org.cinchapi.concourse.server.cli;
import org.cinchapi.concourse.server.jmx.ConcourseServerMXBean;
import com.beust.jcommander.Parameter;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
/**
* A management CLI to add/modify/remove user access to the server.
*
* @author jnelson
*/
public class ManageUsersCli extends ManagedOperationCli {
/**
* Run the program...
*
* @param args
*/
public static void main(String... args) {
ManageUsersCli cli = new ManageUsersCli(args);
cli.run();
}
/**
* Construct a new instance.
*
* @param options
* @param args
*/
public ManageUsersCli(String[] args) {
super(new MyOptions(), args);
}
@Override
protected void doTask(ConcourseServerMXBean bean) {
MyOptions opts = (MyOptions) options;
try {
if(opts.grant) {
System.out
.println("WARNING: Option --grant is being deprecated,"
+ " and replaced by options --add-user and --edit-user.");
System.out.println("What is the username you want "
+ "to add or modify?");
byte[] username = console.readLine("").getBytes();
System.out.println("What is the new password for this user?");
byte[] password = console.readLine('*').getBytes();
bean.grant(username, password);
System.out.println("Consider it done.");
}
else if(opts.revoke) {
System.out
.println("WARNING: Option --revoke is being deprecated,"
+ " and replaced by option --delete-user.");
System.out.println("What is the username you want to delete?");
byte[] username = console.readLine("").getBytes();
bean.revoke(username);
System.out.println("Consider it done.");
}
else if(!Strings.isNullOrEmpty(opts.addingUsername)) {
if(bean.hasUser(opts.addingUsername.getBytes())) {
console.readLine(opts.addingUsername + " already exists. "
+ "Use CTRL-C to terminate or press RETURN to "
+ "continue editing this user.");
}
if(Strings.isNullOrEmpty(opts.newPassword)) {
opts.newPassword = console.readLine("Password for "
+ opts.addingUsername + " : ", '*');
String reEnteredPassword = console.readLine(
"Re-enter password : ", '*');
if(!opts.newPassword.equals(reEnteredPassword)) {
throw new SecurityException(
"Not the same password. This"
+ " user has not been added.");
}
}
bean.grant(opts.addingUsername.getBytes(),
opts.newPassword.getBytes());
System.out.println("Consider it done.");
}
else if(!Strings.isNullOrEmpty(opts.editingUsername)) {
if(!bean.hasUser(opts.editingUsername.getBytes())) {
console.readLine(opts.editingUsername + " does not exist. "
+ "Use CTRL-C to terminate or press RETURN to "
+ "continue adding this user.");
}
if(Strings.isNullOrEmpty(opts.newPassword)) {
opts.newPassword = console.readLine("Password for "
+ opts.editingUsername + " : ", '*');
String reEnteredPassword = console.readLine(
"Re-enter password : ", '*');
if(!opts.newPassword.equals(reEnteredPassword)) {
throw new SecurityException(
"Not the same password. This"
+ " user has not been edited.");
}
}
bean.grant(opts.editingUsername.getBytes(),
opts.newPassword.getBytes());
System.out.println("Consider it done.");
}
else if(!Strings.isNullOrEmpty(opts.deletingUsername)) {
if(!bean.hasUser(opts.deletingUsername.getBytes())) {
System.out.println(opts.deletingUsername
+ " does not exist.");
}
else {
bean.revoke(opts.deletingUsername.getBytes());
System.out.println("Consider it done.");
}
}
else {
parser.usage();
}
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
/**
* The options that can be passed to the main method of this script.
*
* @author jnelson
*/
private static class MyOptions extends Options {
@Parameter(names = { "-g", "--grant" }, description = "[DEPRECATED] Add a new user or change the password for an existing user. ")
public boolean grant = false;
@Parameter(names = { "-r", "--revoke" }, description = "[DEPRECATED] Remove an existing user")
public boolean revoke = false;
@Parameter(names = { "-a", "--add-user" }, description = "Username of new user to add.")
public String addingUsername;
@Parameter(names = { "-e", "--edit-user" }, description = "Username of existing user to edit.")
public String editingUsername;
@Parameter(names = { "-d", "--delete-user" }, description = "Username of existing user to delete.")
public String deletingUsername;
@Parameter(names = { "-np", "--new-password" }, description = "Password of new user to add/edit.")
public String newPassword;
}
}
|
package com.photoselector.controller;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import android.content.ContentResolver;
import android.content.Context;
import android.database.Cursor;
import android.os.Handler;
import android.os.Message;
import android.provider.MediaStore.Images.ImageColumns;
import android.provider.MediaStore.Images.Media;
import com.synconset.FakeR;
import com.photoselector.model.AlbumModel;
import com.photoselector.model.PhotoModel;
public class AlbumController {
private FakeR fakeR;
private ContentResolver resolver;
public static String RECCENT_PHOTO = null;
public AlbumController(Context context) {
resolver = context.getContentResolver();
fakeR = new FakeR(context);
RECCENT_PHOTO = context.getResources().getString(fakeR.getId("string", "recent_photos"));
}
public List<PhotoModel> getCurrent() {
Cursor cursor = resolver.query(Media.EXTERNAL_CONTENT_URI, new String[] { ImageColumns.DATA,
ImageColumns.DATE_ADDED, ImageColumns.SIZE }, null, null, ImageColumns.DATE_ADDED);
if (cursor == null || !cursor.moveToNext())
return new ArrayList<PhotoModel>();
List<PhotoModel> photos = new ArrayList<PhotoModel>();
cursor.moveToLast();
do {
if (cursor.getLong(cursor.getColumnIndex(ImageColumns.SIZE)) > 0) { //originally 1024 * 10
PhotoModel photoModel = new PhotoModel();
photoModel.setOriginalPath(cursor.getString(cursor.getColumnIndex(ImageColumns.DATA)));
photos.add(photoModel);
}
} while (cursor.moveToPrevious());
return photos;
}
public List<PhotoModel> getCurrentNew(Handler handler) {
Cursor cursor = resolver.query(Media.EXTERNAL_CONTENT_URI, new String[] { ImageColumns.DATA,
ImageColumns.DATE_ADDED, ImageColumns.SIZE }, null, null, ImageColumns.DATE_ADDED);
if (cursor == null || !cursor.moveToNext())
return new ArrayList<PhotoModel>();
List<PhotoModel> photos = new ArrayList<PhotoModel>();
cursor.moveToLast();
int i = 0;
int j = 0;
boolean sent = false;
do {
if (cursor.getLong(cursor.getColumnIndex(ImageColumns.SIZE)) > 0) { //originally 1024 * 10
PhotoModel photoModel = new PhotoModel();
photoModel.setOriginalPath(cursor.getString(cursor.getColumnIndex(ImageColumns.DATA)));
photos.add(photoModel);
}
if (++i > 300 && !sent){
i = 0;
Message msg = new Message();
msg.obj = photos;
msg.arg1 = j;
handler.sendMessage(msg);
photos = new ArrayList<PhotoModel>();
j++;
sent = true;
}
} while (cursor.moveToPrevious());
Message msg = new Message();
msg.obj = photos;
msg.arg1 = j;
handler.sendMessage(msg);
photos = new ArrayList<PhotoModel>();
return photos;
}
public List<AlbumModel> getAlbums() {
List<AlbumModel> albums = new ArrayList<AlbumModel>();
Map<String, AlbumModel> map = new HashMap<String, AlbumModel>();
Cursor cursor = resolver.query(Media.EXTERNAL_CONTENT_URI, new String[] { ImageColumns.DATA,
ImageColumns.BUCKET_DISPLAY_NAME, ImageColumns.SIZE }, null, null, null);
if (cursor == null || !cursor.moveToNext())
return new ArrayList<AlbumModel>();
cursor.moveToLast();
AlbumModel current = new AlbumModel(RECCENT_PHOTO, 0, cursor.getString(cursor.getColumnIndex(ImageColumns.DATA)), true);
albums.add(current);
do {
if (cursor.getInt(cursor.getColumnIndex(ImageColumns.SIZE)) < 1) //originally 1024 * 10
continue;
current.increaseCount();
String name = cursor.getString(cursor.getColumnIndex(ImageColumns.BUCKET_DISPLAY_NAME));
if (map.keySet().contains(name))
map.get(name).increaseCount();
else {
AlbumModel album = new AlbumModel(name, 1, cursor.getString(cursor.getColumnIndex(ImageColumns.DATA)));
map.put(name, album);
albums.add(album);
}
} while (cursor.moveToPrevious());
return albums;
}
public List<PhotoModel> getAlbum(String name) {
Cursor cursor = resolver.query(Media.EXTERNAL_CONTENT_URI, new String[] { ImageColumns.BUCKET_DISPLAY_NAME,
ImageColumns.DATA, ImageColumns.DATE_ADDED, ImageColumns.SIZE }, "bucket_display_name = ?",
new String[] { name }, ImageColumns.DATE_ADDED);
if (cursor == null || !cursor.moveToNext())
return new ArrayList<PhotoModel>();
List<PhotoModel> photos = new ArrayList<PhotoModel>();
cursor.moveToLast();
do {
if (cursor.getLong(cursor.getColumnIndex(ImageColumns.SIZE)) > 0) { //originally 1024 * 10
PhotoModel photoModel = new PhotoModel();
photoModel.setOriginalPath(cursor.getString(cursor.getColumnIndex(ImageColumns.DATA)));
photos.add(photoModel);
}
} while (cursor.moveToPrevious());
return photos;
}
public List<PhotoModel> getAlbumNew(String name, Handler handler) {
Cursor cursor = resolver.query(Media.EXTERNAL_CONTENT_URI, new String[] { ImageColumns.BUCKET_DISPLAY_NAME,
ImageColumns.DATA, ImageColumns.DATE_ADDED, ImageColumns.SIZE }, "bucket_display_name = ?",
new String[] { name }, ImageColumns.DATE_ADDED);
if (cursor == null || !cursor.moveToNext())
return new ArrayList<PhotoModel>();
List<PhotoModel> photos = new ArrayList<PhotoModel>();
cursor.moveToLast();
int i = 0;
int j = 0;
boolean sent = false;
do {
if (cursor.getLong(cursor.getColumnIndex(ImageColumns.SIZE)) > 0) { //originally 1024 * 10
PhotoModel photoModel = new PhotoModel();
photoModel.setOriginalPath(cursor.getString(cursor.getColumnIndex(ImageColumns.DATA)));
photos.add(photoModel);
}
if (++i > 300 && !sent){
i = 0;
Message msg = new Message();
msg.obj = photos;
msg.arg1 = j;
handler.sendMessage(msg);
photos = new ArrayList<PhotoModel>();
j++;
sent = true;
}
} while (cursor.moveToPrevious());
Message msg = new Message();
msg.obj = photos;
msg.arg1 = j;
handler.sendMessage(msg);
photos = new ArrayList<PhotoModel>();
return photos;
}
}
|
package com.diamondq.common.config.core.std;
import com.diamondq.common.config.Config;
import com.diamondq.common.config.core.StandardSetup;
import com.diamondq.common.config.core.impl.BootstrapConfigImpl;
import com.diamondq.common.config.model.BootstrapSetupConfig;
import com.diamondq.common.config.model.BootstrapSetupConfig.Builder;
import com.diamondq.common.config.model.BootstrapSetupConfigHolder;
import com.diamondq.common.config.spi.BootstrapConfigSourceFactory;
import com.diamondq.common.config.spi.ConfigClassBuilder;
import com.diamondq.common.config.spi.ConfigNodeResolver;
import com.diamondq.common.config.spi.ConfigParser;
import com.diamondq.common.config.spi.ConfigSourceFactoryFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.ServiceLoader;
import java.util.Set;
import javax.inject.Singleton;
import io.micronaut.context.annotation.Factory;
/**
* The standard bootstrap algorithm. Used in most cases
*/
@Factory
public class StandardBootstrap {
/**
* Default constructor
*/
public StandardBootstrap() {
}
/**
* Generate the bootstrap config
*
* @return the config
*/
@Singleton
public Config bootstrap() {
BootstrapSetupConfigHolder holder = new BootstrapSetupConfigHolder();
String[] profiles = getProfiles().split(",");
List<String> extensions = new ArrayList<>();
Collection<ConfigParser> parsers = getParsers();
Collection<ConfigClassBuilder> classBuilders = getClassBuilders();
Collection<ConfigNodeResolver> nodeResolvers = getNodeResolvers();
Collection<BootstrapConfigSourceFactory> factories = getBootstrapSources();
for (ConfigParser p : parsers)
extensions.addAll(p.getFileExtensions());
List<BootstrapConfigSourceFactory> bootstrapSources = new ArrayList<BootstrapConfigSourceFactory>(
StandardSetup.getStandardBootstrapSources(getFactoryFactory(), extensions, holder, getAppId()));
for (BootstrapConfigSourceFactory f : factories)
bootstrapSources.add(f);
Builder builder = BootstrapSetupConfig.builder().environment(getEnvironment()).addProfiles(profiles);
builder = builder.addAllNodeResolvers(nodeResolvers).addAllClassBuilders(classBuilders)
.addAllBootstrapSources(bootstrapSources).addAllParsers(parsers);
BootstrapSetupConfig build = builder.build();
holder.value = build;
BootstrapConfigImpl impl = new BootstrapConfigImpl(build);
impl.setLocale(getDefaultLocale());
Set<String> filterSet = new HashSet<>();
filterSet.add(".application");
filterSet.add(".web");
return impl.bootstrapConfig(filterSet);
}
protected Collection<BootstrapConfigSourceFactory> getBootstrapSources() {
return Collections.emptyList();
}
protected Collection<ConfigNodeResolver> getNodeResolvers() {
return StandardSetup.getStandardNodeResolvers();
}
protected Collection<ConfigClassBuilder> getClassBuilders() {
return StandardSetup.getStandardClassBuilders();
}
protected Collection<ConfigParser> getParsers() {
Collection<ConfigParser> results = new ArrayList<>(StandardSetup.getStandardParsers());
ServiceLoader<ConfigParser> loader = ServiceLoader.load(ConfigParser.class);
for (ConfigParser cp : loader)
results.add(cp);
return results;
}
protected String getEnvironment() {
String prop = System.getProperty("application.environment", null);
if (prop == null)
return "";
return prop;
}
protected String getProfiles() {
String prop = System.getProperty("application.profiles", null);
if (prop == null)
return "";
return prop;
}
protected String getAppId() {
String prop = System.getProperty("application.name", null);
if (prop == null)
return "";
return prop;
}
protected Locale getDefaultLocale() {
return Locale.getDefault();
}
protected ConfigSourceFactoryFactory getFactoryFactory() {
return new CoreFactoryFactory();
}
}
|
package org.rstudio.studio.client.workbench.views.packages.ui;
import java.util.ArrayList;
import java.util.List;
import com.google.gwt.aria.client.DialogRole;
import org.rstudio.core.client.ElementIds;
import org.rstudio.core.client.cellview.AriaLabeledCheckboxCell;
import org.rstudio.core.client.cellview.LabeledBoolean;
import org.rstudio.core.client.widget.ModalDialog;
import org.rstudio.core.client.widget.Operation;
import org.rstudio.core.client.widget.OperationWithInput;
import org.rstudio.core.client.widget.ThemedButton;
import org.rstudio.studio.client.common.SimpleRequestCallback;
import org.rstudio.studio.client.server.ServerDataSource;
import org.rstudio.studio.client.server.ServerError;
import com.google.gwt.cell.client.FieldUpdater;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.cellview.client.Column;
import com.google.gwt.user.cellview.client.HasKeyboardSelectionPolicy.KeyboardSelectionPolicy;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ScrollPanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwt.view.client.ListDataProvider;
import com.google.gwt.view.client.NoSelectionModel;
public abstract class PackageActionConfirmationDialog<T extends JavaScriptObject> extends ModalDialog<ArrayList<T>>
{
public PackageActionConfirmationDialog(
String caption,
String okCaption,
DialogRole role,
ServerDataSource<JsArray<T>> actionsDS,
OperationWithInput<ArrayList<T>> checkOperation,
Operation cancelOperation)
{
super(caption, role, checkOperation, cancelOperation);
actionsDS_ = actionsDS;
setOkButtonCaption(okCaption);
addLeftButton(selectAllButton_ = new ThemedButton("Select All",
event -> setGlobalPerformAction("Select All", true)), ElementIds.SELECT_ALL_BUTTON);
selectAllButton_.getElement().getStyle().setMarginRight(10, Unit.PX);
addLeftButton(selectNoneButton_ = new ThemedButton("Select None",
event -> setGlobalPerformAction("Select None", false)), ElementIds.SELECT_NONE_BUTTON);
enableOkButton(false);
selectAllButton_.setEnabled(false);
selectNoneButton_.setEnabled(false);
}
@Override
protected ArrayList<T> collectInput()
{
ArrayList<T> actions = new ArrayList<T>();
for (PendingAction action : actionsDataProvider_.getList())
{
if (action.getPerformAction().getBool())
actions.add(action.getActionInfo());
}
return actions;
}
@Override
protected boolean validate(ArrayList<T> input)
{
return input.size() > 0;
}
@Override
protected Widget createMainWidget()
{
FlowPanel flowPanel = new FlowPanel();
String explanatoryText = getExplanatoryText();
if (explanatoryText.length() > 0)
{
Label text = new Label(explanatoryText);
text.setStylePrimaryName(RESOURCES.styles().explanatoryText());
flowPanel.add(text);
}
actionsTable_ = new CellTable<>(
15,
GWT.<PackagesCellTableResources> create(PackagesCellTableResources.class));
actionsTable_.setKeyboardSelectionPolicy(KeyboardSelectionPolicy.DISABLED);
actionsTable_.setSelectionModel(new NoSelectionModel<>());
actionsTable_.setWidth("100%", true);
ActionColumn actionColumn = new ActionColumn();
actionsTable_.addColumn(actionColumn);
actionsTable_.setColumnWidth(actionColumn, 30, Unit.PX);
addTableColumns(actionsTable_);
ScrollPanel scrollPanel = new ScrollPanel();
scrollPanel.setWidget(actionsTable_);
scrollPanel.setStylePrimaryName(RESOURCES.styles().mainWidget());
flowPanel.add(scrollPanel);
// query for updates
actionsDS_.requestData(new SimpleRequestCallback<JsArray<T>>() {
@Override
public void onResponseReceived(JsArray<T> actions)
{
if (actions != null && actions.length() > 0)
{
ArrayList<PendingAction> pendingActions = new ArrayList<>();
for (int i=0; i<actions.length(); i++)
pendingActions.add(new PendingAction(actions.get(i),
new LabeledBoolean(getActionName(actions.get(i)), false)));
actionsTable_.setPageSize(pendingActions.size());
actionsDataProvider_ = new ListDataProvider<>();
actionsDataProvider_.setList(pendingActions);
actionsDataProvider_.addDataDisplay(actionsTable_);
selectAllButton_.setEnabled(true);
selectNoneButton_.setEnabled(true);
refreshFocusableElements();
focusInitialControl();
}
else
{
closeDialog();
showNoActionsRequired();
}
}
@Override
public void onError(ServerError error)
{
closeDialog();
super.onError(error);
}
});
return flowPanel;
}
protected String getExplanatoryText()
{
return "";
}
protected abstract void showNoActionsRequired();
protected abstract void addTableColumns(CellTable<PendingAction> table);
protected abstract String getActionName(T action);
class ActionColumn extends Column<PendingAction, LabeledBoolean>
{
public ActionColumn()
{
super(new AriaLabeledCheckboxCell(false, false));
setFieldUpdater(new FieldUpdater<PendingAction, LabeledBoolean>() {
public void update(int index, PendingAction action, LabeledBoolean value)
{
List<PendingAction> actions = actionsDataProvider_.getList();
actions.set(actions.indexOf(action), new PendingAction(action.getActionInfo(), value));
manageUIState();
}
});
}
@Override
public LabeledBoolean getValue(PendingAction update)
{
return update.getPerformAction();
}
}
protected class PendingAction
{
public PendingAction(T actionInfo, LabeledBoolean performAction)
{
actionInfo_ = actionInfo;
performAction_ = performAction;
}
public T getActionInfo()
{
return actionInfo_;
}
public LabeledBoolean getPerformAction()
{
return performAction_;
}
private final T actionInfo_;
private final LabeledBoolean performAction_;
}
private void setGlobalPerformAction(String label, Boolean performAction)
{
List<PendingAction> actions = actionsDataProvider_.getList();
ArrayList<PendingAction> newActions = new ArrayList<PendingAction>();
for(PendingAction action : actions)
newActions.add(new PendingAction(action.getActionInfo(), new LabeledBoolean(label, performAction)));
actionsDataProvider_.setList(newActions);
manageUIState();
}
private void manageUIState()
{
enableOkButton(collectInput().size() > 0);
}
interface Styles extends CssResource
{
String mainWidget();
String explanatoryText();
}
interface Resources extends ClientBundle
{
@Source("PackageActionConfirmationDialog.css")
Styles styles();
}
static Resources RESOURCES = GWT.create(Resources.class);
public static void ensureStylesInjected()
{
RESOURCES.styles().ensureInjected();
}
private CellTable<PendingAction> actionsTable_;
private ServerDataSource<JsArray<T>> actionsDS_;
private ListDataProvider<PendingAction> actionsDataProvider_;
private ThemedButton selectAllButton_;
private ThemedButton selectNoneButton_;
}
|
package com.yahoo.vespa.model.container.http;
import com.yahoo.component.ComponentId;
import com.yahoo.component.ComponentSpecification;
import com.yahoo.container.bundle.BundleInstantiationSpecification;
import com.yahoo.jdisc.http.ServerConfig;
import com.yahoo.osgi.provider.model.ComponentModel;
import com.yahoo.vespa.model.container.ApplicationContainerCluster;
import com.yahoo.vespa.model.container.ContainerCluster;
import com.yahoo.vespa.model.container.component.SimpleComponent;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Einar M R Rosenvinge
* @author bjorncs
*/
public class JettyHttpServer extends SimpleComponent implements ServerConfig.Producer {
private final ContainerCluster<?> cluster;
private volatile boolean isHostedVespa;
private final List<ConnectorFactory> connectorFactories = new ArrayList<>();
public JettyHttpServer(String componentId, ContainerCluster<?> cluster, boolean isHostedVespa) {
super(new ComponentModel(componentId, com.yahoo.jdisc.http.server.jetty.JettyHttpServer.class.getName(), null));
this.isHostedVespa = isHostedVespa;
this.cluster = cluster;
final FilterBindingsProviderComponent filterBindingsProviderComponent = new FilterBindingsProviderComponent(componentId);
addChild(filterBindingsProviderComponent);
inject(filterBindingsProviderComponent);
}
public void setHostedVespa(boolean isHostedVespa) { this.isHostedVespa = isHostedVespa; }
public void addConnector(ConnectorFactory connectorFactory) {
connectorFactories.add(connectorFactory);
addChild(connectorFactory);
}
public List<ConnectorFactory> getConnectorFactories() {
return Collections.unmodifiableList(connectorFactories);
}
@Override
public void getConfig(ServerConfig.Builder builder) {
builder.metric(new ServerConfig.Metric.Builder()
.monitoringHandlerPaths(List.of("/state/v1", "/status.html"))
.searchHandlerPaths(List.of("/search"))
);
if (isHostedVespa) {
// Proxy-protocol v1/v2 is used in hosted Vespa for remote address/port
builder.accessLog(new ServerConfig.AccessLog.Builder()
.remoteAddressHeaders(List.of())
.remotePortHeaders(List.of()));
// Enable connection log hosted Vespa
builder.connectionLog(new ServerConfig.ConnectionLog.Builder().enabled(true));
} else {
// TODO Vespa 8: Remove legacy Yahoo headers
builder.accessLog(new ServerConfig.AccessLog.Builder()
.remoteAddressHeaders(List.of("x-forwarded-for", "y-ra", "yahooremoteip", "client-ip"))
.remotePortHeaders(List.of("X-Forwarded-Port", "y-rp")));
}
configureJettyThreadpool(builder);
}
private void configureJettyThreadpool(ServerConfig.Builder builder) {
if (cluster == null) return;
if (cluster instanceof ApplicationContainerCluster) {
builder.minWorkerThreads(-1).maxWorkerThreads(-1);
} else {
builder.minWorkerThreads(4).maxWorkerThreads(4);
}
}
static ComponentModel providerComponentModel(String parentId, String className) {
final ComponentSpecification classNameSpec = new ComponentSpecification(
className);
return new ComponentModel(new BundleInstantiationSpecification(
classNameSpec.nestInNamespace(new ComponentId(parentId)),
classNameSpec,
null));
}
public static final class FilterBindingsProviderComponent extends SimpleComponent {
public FilterBindingsProviderComponent(String parentId) {
super(providerComponentModel(parentId, "com.yahoo.container.jdisc.FilterBindingsProvider"));
}
}
}
|
package org.hisp.dhis.android.core.program;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteConstraintException;
import android.support.test.runner.AndroidJUnit4;
import org.hisp.dhis.android.core.category.CategoryComboModel;
import org.hisp.dhis.android.core.category.CreateCategoryComboUtils;
import org.hisp.dhis.android.core.common.BaseIdentifiableObject;
import org.hisp.dhis.android.core.data.database.AbsStoreTestCase;
import org.hisp.dhis.android.core.period.PeriodType;
import org.hisp.dhis.android.core.program.ProgramModel.Columns;
import org.hisp.dhis.android.core.relationship.CreateRelationshipTypeUtils;
import org.hisp.dhis.android.core.relationship.RelationshipTypeModel;
import org.hisp.dhis.android.core.trackedentity.CreateTrackedEntityUtils;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityTypeModel;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.Date;
import static com.google.common.truth.Truth.assertThat;
import static org.hisp.dhis.android.core.AndroidTestUtils.toInteger;
import static org.hisp.dhis.android.core.data.database.CursorAssert.assertThatCursor;
@RunWith(AndroidJUnit4.class)
public class ProgramStoreShould extends AbsStoreTestCase {
private static final String[] PROGRAM_PROJECTION = {
Columns.UID,
Columns.CODE,
Columns.NAME,
Columns.DISPLAY_NAME,
Columns.CREATED,
Columns.LAST_UPDATED,
Columns.SHORT_NAME,
Columns.DISPLAY_SHORT_NAME,
Columns.DESCRIPTION,
Columns.DISPLAY_DESCRIPTION,
Columns.VERSION,
Columns.ONLY_ENROLL_ONCE,
Columns.ENROLLMENT_DATE_LABEL,
Columns.DISPLAY_INCIDENT_DATE,
Columns.INCIDENT_DATE_LABEL,
Columns.REGISTRATION,
Columns.SELECT_ENROLLMENT_DATES_IN_FUTURE,
Columns.DATA_ENTRY_METHOD,
Columns.IGNORE_OVERDUE_EVENTS,
Columns.RELATIONSHIP_FROM_A,
Columns.SELECT_INCIDENT_DATES_IN_FUTURE,
Columns.CAPTURE_COORDINATES,
Columns.USE_FIRST_STAGE_DURING_REGISTRATION,
Columns.DISPLAY_FRONT_PAGE_LIST,
Columns.PROGRAM_TYPE,
Columns.RELATIONSHIP_TYPE,
Columns.RELATIONSHIP_TEXT,
Columns.RELATED_PROGRAM,
Columns.TRACKED_ENTITY_TYPE,
Columns.CATEGORY_COMBO,
Columns.ACCESS_DATA_WRITE,
Columns.EXPIRY_DAYS,
Columns.COMPLETE_EVENTS_EXPIRY_DAYS,
Columns.EXPIRY_PERIOD_TYPE
};
//BaseIdentifiableModel attributes:
private static final String UID = "test_uid";
private final static String UID2 = "second_test_program";
private static final String CODE = "test_code";
private static final String NAME = "test_name";
private static final String DISPLAY_NAME = "test_display_name";
//BaseNameableModel attributes:
private static final String SHORT_NAME = "test_program";
private static final String DISPLAY_SHORT_NAME = "test_program";
private static final String DESCRIPTION = "A test program for the integration tests.";
private static final String DISPLAY_DESCRIPTION = "A test program for the integration tests.";
//ProgramModel attributes:
private static final Integer VERSION = 1;
private static final Boolean ONLY_ENROLL_ONCE = true;
private static final String ENROLLMENT_DATE_LABEL = "enrollment date";
private static final Boolean DISPLAY_INCIDENT_DATE = true;
private static final String INCIDENT_DATE_LABEL = "incident date label";
private static final Boolean REGISTRATION = true;
private static final Boolean SELECT_ENROLLMENT_DATES_IN_FUTURE = true;
private static final Boolean DATA_ENTRY_METHOD = true;
private static final Boolean IGNORE_OVERDUE_EVENTS = false;
private static final Boolean RELATIONSHIP_FROM_A = true;
private static final Boolean SELECT_INCIDENT_DATES_IN_FUTURE = true;
private static final Boolean CAPTURE_COORDINATES = true;
private static final Boolean USE_FIRST_STAGE_DURING_REGISTRATION = true;
private static final Boolean DISPLAY_FRONT_PAGE_LIST = true;
private static final ProgramType PROGRAM_TYPE = ProgramType.WITH_REGISTRATION;
private static final Long RELATIONSHIP_TYPE_ID = 3L;
private static final String RELATIONSHIP_TYPE = "relationshipUid";
private static final String RELATIONSHIP_TEXT = "test relationship";
private static final String RELATED_PROGRAM = "RelatedProgramUid";
private static final Long TRACKED_ENTITY_ID = 4L;
private static final String TRACKED_ENTITY = "TrackedEntityUid";
private static final Long CATEGORY_COMBO_ID = 4L;
private static final String CATEGORY_COMBO = "CategoryComboUid";
private static final Boolean ACCESS_DATA_WRITE = true;
private final static Integer EXPIRY_DAYS = 7;
private final static Integer COMPLETE_EVENTS_EXPIRY_DAYS = 30;
private final static PeriodType EXPIRY_PERIOD_TYPE = PeriodType.Daily;
private final Date date;
private final String dateString;
private ProgramStore store;
public ProgramStoreShould() {
this.date = new Date();
this.dateString = BaseIdentifiableObject.DATE_FORMAT.format(date);
}
@Before
@Override
public void setUp() throws IOException {
super.setUp();
this.store = new ProgramStoreImpl(databaseAdapter());
//RelationshipType foreign key corresponds to table entry
ContentValues relationshipType = CreateRelationshipTypeUtils.create(RELATIONSHIP_TYPE_ID, RELATIONSHIP_TYPE);
database().insert(RelationshipTypeModel.TABLE, null, relationshipType);
//TrackedEntityType foreign key corresponds to table entry
ContentValues trackedEntityType = CreateTrackedEntityUtils.create(TRACKED_ENTITY_ID, TRACKED_ENTITY);
database().insert(TrackedEntityTypeModel.TABLE, null, trackedEntityType);
ContentValues categoryCombo = CreateCategoryComboUtils.create(CATEGORY_COMBO_ID, CATEGORY_COMBO);
database().insert(CategoryComboModel.TABLE, null, categoryCombo);
}
@Test
public void insert_program_in_data_base_when_insert() {
long rowId = store.insert(
UID,
CODE,
NAME,
DISPLAY_NAME,
date,
date,
SHORT_NAME,
DISPLAY_SHORT_NAME,
DESCRIPTION,
DISPLAY_DESCRIPTION,
VERSION,
ONLY_ENROLL_ONCE,
ENROLLMENT_DATE_LABEL,
DISPLAY_INCIDENT_DATE,
INCIDENT_DATE_LABEL,
REGISTRATION,
SELECT_ENROLLMENT_DATES_IN_FUTURE,
DATA_ENTRY_METHOD,
IGNORE_OVERDUE_EVENTS,
RELATIONSHIP_FROM_A,
SELECT_INCIDENT_DATES_IN_FUTURE,
CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION,
DISPLAY_FRONT_PAGE_LIST,
PROGRAM_TYPE,
RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT,
null,
TRACKED_ENTITY,
CATEGORY_COMBO,
ACCESS_DATA_WRITE,
EXPIRY_DAYS,
COMPLETE_EVENTS_EXPIRY_DAYS,
EXPIRY_PERIOD_TYPE
);
Cursor cursor = database().query(ProgramModel.TABLE, PROGRAM_PROJECTION, null, null, null, null, null, null);
assertThat(rowId).isEqualTo(1L);
assertThatCursor(cursor).hasRow(
UID,
CODE,
NAME,
DISPLAY_NAME,
dateString,
dateString,
SHORT_NAME,
DISPLAY_SHORT_NAME,
DESCRIPTION,
DISPLAY_DESCRIPTION,
VERSION,
toInteger(ONLY_ENROLL_ONCE),
ENROLLMENT_DATE_LABEL,
toInteger(DISPLAY_INCIDENT_DATE),
INCIDENT_DATE_LABEL,
toInteger(REGISTRATION),
toInteger(SELECT_ENROLLMENT_DATES_IN_FUTURE),
toInteger(DATA_ENTRY_METHOD),
toInteger(IGNORE_OVERDUE_EVENTS),
toInteger(RELATIONSHIP_FROM_A),
toInteger(SELECT_INCIDENT_DATES_IN_FUTURE),
toInteger(CAPTURE_COORDINATES),
toInteger(USE_FIRST_STAGE_DURING_REGISTRATION),
toInteger(DISPLAY_FRONT_PAGE_LIST),
PROGRAM_TYPE,
RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT,
null,
TRACKED_ENTITY,
CATEGORY_COMBO,
toInteger(ACCESS_DATA_WRITE),
EXPIRY_DAYS,
COMPLETE_EVENTS_EXPIRY_DAYS,
EXPIRY_PERIOD_TYPE
).isExhausted();
}
@Test
public void insert_program_with_deferred_foreign_key_in_data_base_when_insert() {
final String deferredRelationshipTypeUid = "deferredRelationshipTypeUid";
final String deferredTrackedEntityUid = "deferredTrackedEntityUid";
final String deferredCategoryComboUid = "deferredCategoryComboUid";
//RelationshipType foreign key corresponds to table entry
ContentValues relationshipType = CreateRelationshipTypeUtils.create(2L, deferredRelationshipTypeUid);
database().insert(RelationshipTypeModel.TABLE, null, relationshipType);
//TrackedEntityType foreign key corresponds to table entry
ContentValues trackedEntityType = CreateTrackedEntityUtils.create(2L, deferredTrackedEntityUid);
database().insert(TrackedEntityTypeModel.TABLE, null, trackedEntityType);
ContentValues categoryCombo = CreateCategoryComboUtils.create(2L, deferredCategoryComboUid);
database().insert(CategoryComboModel.TABLE, null, categoryCombo);
database().beginTransaction();
long rowId = store.insert(
UID, CODE, NAME, DISPLAY_NAME, date,
date, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION, DISPLAY_DESCRIPTION,
VERSION, ONLY_ENROLL_ONCE, ENROLLMENT_DATE_LABEL, DISPLAY_INCIDENT_DATE, INCIDENT_DATE_LABEL,
REGISTRATION, SELECT_ENROLLMENT_DATES_IN_FUTURE, DATA_ENTRY_METHOD, IGNORE_OVERDUE_EVENTS, RELATIONSHIP_FROM_A,
SELECT_INCIDENT_DATES_IN_FUTURE, CAPTURE_COORDINATES, USE_FIRST_STAGE_DURING_REGISTRATION, DISPLAY_FRONT_PAGE_LIST, PROGRAM_TYPE,
deferredRelationshipTypeUid, RELATIONSHIP_TEXT, UID2, deferredTrackedEntityUid, deferredCategoryComboUid,
ACCESS_DATA_WRITE, EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE
);
long rowId2 = store.insert(
UID2, CODE, NAME, DISPLAY_NAME, date,
date, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION, DISPLAY_DESCRIPTION,
VERSION, ONLY_ENROLL_ONCE, ENROLLMENT_DATE_LABEL, DISPLAY_INCIDENT_DATE, INCIDENT_DATE_LABEL,
REGISTRATION, SELECT_ENROLLMENT_DATES_IN_FUTURE, DATA_ENTRY_METHOD, IGNORE_OVERDUE_EVENTS, RELATIONSHIP_FROM_A,
SELECT_INCIDENT_DATES_IN_FUTURE, CAPTURE_COORDINATES, USE_FIRST_STAGE_DURING_REGISTRATION, DISPLAY_FRONT_PAGE_LIST, PROGRAM_TYPE,
RELATIONSHIP_TYPE, RELATIONSHIP_TEXT, UID, TRACKED_ENTITY, CATEGORY_COMBO, ACCESS_DATA_WRITE,
EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE
);
database().setTransactionSuccessful();
database().endTransaction();
Cursor cursor = database().query(ProgramModel.TABLE, PROGRAM_PROJECTION, null, null, null, null, null, null);
assertThat(rowId).isEqualTo(1L);
assertThat(rowId2).isEqualTo(2L);
assertThatCursor(cursor).hasRow(
UID, CODE, NAME, DISPLAY_NAME, dateString,
dateString, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION, DISPLAY_DESCRIPTION,
VERSION, toInteger(ONLY_ENROLL_ONCE), ENROLLMENT_DATE_LABEL, toInteger(DISPLAY_INCIDENT_DATE), INCIDENT_DATE_LABEL,
toInteger(REGISTRATION), toInteger(SELECT_ENROLLMENT_DATES_IN_FUTURE), toInteger(DATA_ENTRY_METHOD), toInteger(IGNORE_OVERDUE_EVENTS), toInteger(RELATIONSHIP_FROM_A),
toInteger(SELECT_INCIDENT_DATES_IN_FUTURE), toInteger(CAPTURE_COORDINATES), toInteger(USE_FIRST_STAGE_DURING_REGISTRATION), toInteger(DISPLAY_FRONT_PAGE_LIST), PROGRAM_TYPE, deferredRelationshipTypeUid, RELATIONSHIP_TEXT,
UID2, deferredTrackedEntityUid, deferredCategoryComboUid, toInteger(ACCESS_DATA_WRITE), EXPIRY_DAYS,
COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE
);
assertThatCursor(cursor).hasRow(
UID2, CODE, NAME, DISPLAY_NAME, dateString,
dateString, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION,
DISPLAY_DESCRIPTION, VERSION, toInteger(ONLY_ENROLL_ONCE), ENROLLMENT_DATE_LABEL, toInteger(DISPLAY_INCIDENT_DATE),
INCIDENT_DATE_LABEL, toInteger(REGISTRATION), toInteger(SELECT_ENROLLMENT_DATES_IN_FUTURE), toInteger(DATA_ENTRY_METHOD), toInteger(IGNORE_OVERDUE_EVENTS),
toInteger(RELATIONSHIP_FROM_A), toInteger(SELECT_INCIDENT_DATES_IN_FUTURE), toInteger(CAPTURE_COORDINATES), toInteger(USE_FIRST_STAGE_DURING_REGISTRATION), toInteger(DISPLAY_FRONT_PAGE_LIST), PROGRAM_TYPE,
RELATIONSHIP_TYPE, RELATIONSHIP_TEXT, UID, TRACKED_ENTITY, CATEGORY_COMBO, toInteger(ACCESS_DATA_WRITE), EXPIRY_DAYS,
COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE
);
assertThatCursor(cursor).isExhausted();
}
@Test(expected = SQLiteConstraintException.class)
public void throw_sqlite_constraint_exception_when__persistProgramWithInvalidRelationshipTypeForeignKey() {
store.insert(UID, null, NAME, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, RELATIONSHIP_FROM_A, null, null, null, null, PROGRAM_TYPE,
"wrong", null, null, TRACKED_ENTITY, CATEGORY_COMBO, ACCESS_DATA_WRITE,
EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE);
}
@Test(expected = SQLiteConstraintException.class)
public void throw_sqlite_constraint_exception_when__persistProgramWithInvalidTrackedEntityForeignKey() {
store.insert(UID, null, NAME, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, RELATIONSHIP_FROM_A, null, null, null, null, PROGRAM_TYPE,
RELATIONSHIP_TYPE, null, null, "wrong", CATEGORY_COMBO, ACCESS_DATA_WRITE,
EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE);
}
@Test
public void insert_program_in_data_base_when_insert_nullable_program() {
long rowId = store.insert(
UID, null, NAME, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, RELATIONSHIP_FROM_A, null,
null, null, null, PROGRAM_TYPE, null, null, null, null, null, false,
null, null, EXPIRY_PERIOD_TYPE);
Cursor cursor = database().query(ProgramModel.TABLE, PROGRAM_PROJECTION, null, null, null, null, null, null);
assertThat(rowId).isEqualTo(1L);
assertThatCursor(cursor).hasRow(UID, null, NAME, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, toInteger(RELATIONSHIP_FROM_A), null,
null, null, null, PROGRAM_TYPE, null, null, null, null, null, toInteger(false), null, null,
EXPIRY_PERIOD_TYPE).isExhausted();
}
@Test
public void delete_program_when_delete_relationship_type_foreign_key() {
store.insert(
UID,
CODE,
NAME,
DISPLAY_NAME,
date,
date,
SHORT_NAME,
DISPLAY_SHORT_NAME,
DESCRIPTION,
DISPLAY_DESCRIPTION,
VERSION,
ONLY_ENROLL_ONCE,
ENROLLMENT_DATE_LABEL,
DISPLAY_INCIDENT_DATE,
INCIDENT_DATE_LABEL,
REGISTRATION,
SELECT_ENROLLMENT_DATES_IN_FUTURE,
DATA_ENTRY_METHOD,
IGNORE_OVERDUE_EVENTS,
RELATIONSHIP_FROM_A,
SELECT_INCIDENT_DATES_IN_FUTURE,
CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION,
DISPLAY_FRONT_PAGE_LIST,
PROGRAM_TYPE,
RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT,
null,
TRACKED_ENTITY,
CATEGORY_COMBO,
ACCESS_DATA_WRITE,
EXPIRY_DAYS,
COMPLETE_EVENTS_EXPIRY_DAYS,
EXPIRY_PERIOD_TYPE
);
database().delete(RelationshipTypeModel.TABLE,
RelationshipTypeModel.Columns.UID + "=?", new String[]{RELATIONSHIP_TYPE});
Cursor cursor = database().query(ProgramModel.TABLE, PROGRAM_PROJECTION, null, null, null, null, null);
assertThatCursor(cursor).isExhausted();
}
@Test
public void delete_program_when_delete_tracked_entity_foreign_key() {
store.insert(
UID,
CODE,
NAME,
DISPLAY_NAME,
date,
date,
SHORT_NAME,
DISPLAY_SHORT_NAME,
DESCRIPTION,
DISPLAY_DESCRIPTION,
VERSION,
ONLY_ENROLL_ONCE,
ENROLLMENT_DATE_LABEL,
DISPLAY_INCIDENT_DATE,
INCIDENT_DATE_LABEL,
REGISTRATION,
SELECT_ENROLLMENT_DATES_IN_FUTURE,
DATA_ENTRY_METHOD,
IGNORE_OVERDUE_EVENTS,
RELATIONSHIP_FROM_A,
SELECT_INCIDENT_DATES_IN_FUTURE,
CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION,
DISPLAY_FRONT_PAGE_LIST,
PROGRAM_TYPE,
RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT,
null,
TRACKED_ENTITY,
CATEGORY_COMBO,
ACCESS_DATA_WRITE,
EXPIRY_DAYS,
COMPLETE_EVENTS_EXPIRY_DAYS,
EXPIRY_PERIOD_TYPE
);
database().delete(TrackedEntityTypeModel.TABLE,
TrackedEntityTypeModel.Columns.UID + "=?", new String[]{TRACKED_ENTITY});
Cursor cursor = database().query(ProgramModel.TABLE, PROGRAM_PROJECTION, null, null, null, null, null);
assertThatCursor(cursor).isExhausted();
}
@Test
public void update_program_in_data_base_when_update() throws Exception {
// insert program into database
ContentValues program = new ContentValues();
program.put(Columns.UID, UID);
program.put(Columns.CODE, CODE);
program.put(Columns.DISPLAY_SHORT_NAME, DISPLAY_SHORT_NAME);
database().insert(ProgramModel.TABLE, null, program);
String[] projection = {Columns.UID, Columns.CODE, Columns.DISPLAY_SHORT_NAME};
Cursor cursor = database().query(ProgramModel.TABLE, projection, null, null, null, null, null);
// check that program was successfully inserted
assertThatCursor(cursor).hasRow(UID, CODE, DISPLAY_SHORT_NAME);
String updatedCode = "updated_program_code";
String updatedDisplayShortName = "updated_program_display_short_name";
// update the program with updatedCode and updatedDisplayShortName
int update = store.update(
UID, updatedCode, NAME, DISPLAY_NAME, date, date,
SHORT_NAME, updatedDisplayShortName, DESCRIPTION,
DISPLAY_DESCRIPTION, VERSION, ONLY_ENROLL_ONCE, ENROLLMENT_DATE_LABEL,
DISPLAY_INCIDENT_DATE, INCIDENT_DATE_LABEL, REGISTRATION,
SELECT_ENROLLMENT_DATES_IN_FUTURE, DATA_ENTRY_METHOD, IGNORE_OVERDUE_EVENTS,
RELATIONSHIP_FROM_A, SELECT_INCIDENT_DATES_IN_FUTURE, CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION, DISPLAY_FRONT_PAGE_LIST, PROGRAM_TYPE,
null, null, null, null, null,
ACCESS_DATA_WRITE, EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE, UID
);
// check that store returns 1 when successfully update
assertThat(update).isEqualTo(1);
cursor = database().query(ProgramModel.TABLE, projection, null, null, null, null, null);
// check that program is updated in database
assertThatCursor(cursor).hasRow(UID, updatedCode, updatedDisplayShortName).isExhausted();
}
@Test
public void delete_program_in_data_base_when_delete() throws Exception {
// insert program into database
ContentValues program = new ContentValues();
program.put(Columns.UID, UID);
database().insert(ProgramModel.TABLE, null, program);
String[] projection = {Columns.UID};
Cursor cursor = database().query(ProgramModel.TABLE, projection, null, null, null, null, null);
// check that program was successfully inserted into database
assertThatCursor(cursor).hasRow(UID);
// delete the program
int delete = store.delete(UID);
// check that store returns 1 on successful delete
assertThat(delete).isEqualTo(1);
cursor = database().query(ProgramModel.TABLE, projection, null, null, null, null, null);
// check that program doesn't exist in database
assertThatCursor(cursor).isExhausted();
}
@Test(expected = IllegalArgumentException.class)
public void throw_illegal_argument_exception_when_insert_null_uid() {
store.insert(null, CODE, NAME, DISPLAY_NAME, date, date, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION,
DISPLAY_DESCRIPTION, VERSION, ONLY_ENROLL_ONCE, ENROLLMENT_DATE_LABEL, DISPLAY_INCIDENT_DATE,
INCIDENT_DATE_LABEL, REGISTRATION, SELECT_ENROLLMENT_DATES_IN_FUTURE, DATA_ENTRY_METHOD,
IGNORE_OVERDUE_EVENTS, RELATIONSHIP_FROM_A, SELECT_INCIDENT_DATES_IN_FUTURE, CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION, DISPLAY_FRONT_PAGE_LIST, PROGRAM_TYPE, RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT, null, TRACKED_ENTITY, CATEGORY_COMBO, ACCESS_DATA_WRITE,
EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE);
}
@Test(expected = IllegalArgumentException.class)
public void throw_illegal_argument_exception_when_update_null_uid() {
store.update(null, CODE, NAME, DISPLAY_NAME, date, date, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION,
DISPLAY_DESCRIPTION, VERSION, ONLY_ENROLL_ONCE, ENROLLMENT_DATE_LABEL, DISPLAY_INCIDENT_DATE,
INCIDENT_DATE_LABEL, REGISTRATION, SELECT_ENROLLMENT_DATES_IN_FUTURE, DATA_ENTRY_METHOD,
IGNORE_OVERDUE_EVENTS, RELATIONSHIP_FROM_A, SELECT_INCIDENT_DATES_IN_FUTURE, CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION, DISPLAY_FRONT_PAGE_LIST, PROGRAM_TYPE, RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT, null, TRACKED_ENTITY, CATEGORY_COMBO, ACCESS_DATA_WRITE,
EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE, UID);
}
@Test(expected = IllegalArgumentException.class)
public void throw_illegal_argument_exception_when_update_null_where_uid() {
store.update(UID, CODE, NAME, DISPLAY_NAME, date, date, SHORT_NAME, DISPLAY_SHORT_NAME, DESCRIPTION,
DISPLAY_DESCRIPTION, VERSION, ONLY_ENROLL_ONCE, ENROLLMENT_DATE_LABEL, DISPLAY_INCIDENT_DATE,
INCIDENT_DATE_LABEL, REGISTRATION, SELECT_ENROLLMENT_DATES_IN_FUTURE, DATA_ENTRY_METHOD,
IGNORE_OVERDUE_EVENTS, RELATIONSHIP_FROM_A, SELECT_INCIDENT_DATES_IN_FUTURE, CAPTURE_COORDINATES,
USE_FIRST_STAGE_DURING_REGISTRATION, DISPLAY_FRONT_PAGE_LIST, PROGRAM_TYPE, RELATIONSHIP_TYPE,
RELATIONSHIP_TEXT, null, TRACKED_ENTITY, CATEGORY_COMBO, ACCESS_DATA_WRITE,
EXPIRY_DAYS, COMPLETE_EVENTS_EXPIRY_DAYS, EXPIRY_PERIOD_TYPE,null);
}
@Test(expected = IllegalArgumentException.class)
public void throw_illegal_argument_exception_when_delete_null_uid() {
store.delete(null);
}
}
|
package matlab;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import matlab.FunctionEndScanner.NoChangeResult;
import matlab.FunctionEndScanner.ProblemResult;
import matlab.FunctionEndScanner.TranslationResult;
import org.antlr.runtime.ANTLRReaderStream;
/**
* A utility for producing the output file corresponding to a given input file.
* Note that the output should be checked manually before using it as a test.
*/
public class TranslatorTestTool {
public static void main(String[] args) {
if (args.length != 1) {
System.err.println("Usage: java matlab.TranslatorTestTool {basename}");
System.exit(1);
}
String basename = args[0];
try {
PrintWriter out = new PrintWriter(new FileWriter(basename + ".out"));
BufferedReader in = new BufferedReader(new FileReader(basename + ".in"));
PositionMap prePosMap = null;
FunctionEndScanner prescanner = new FunctionEndScanner(in);
FunctionEndScanner.Result result = prescanner.translate();
in.close();
if(result instanceof NoChangeResult) {
in = new BufferedReader(new FileReader(basename + ".in")); //just re-open original file
} else if(result instanceof ProblemResult) {
for(TranslationProblem prob : ((ProblemResult) result).getProblems()) {
out.println("~" + prob);
}
out.close();
System.exit(0); //terminate early since extraction parser can't work without balanced 'end's
} else if(result instanceof TranslationResult) {
TranslationResult transResult = (TranslationResult) result;
in = new BufferedReader(new StringReader(transResult.getText()));
prePosMap = transResult.getPositionMap();
System.err.println(transResult.getText());
}
OffsetTracker offsetTracker = new OffsetTracker(new TextPosition(1, 1));
List<TranslationProblem> problems = new ArrayList<TranslationProblem>();
String destText = MatlabParser.translate(new ANTLRReaderStream(in), 1, 1, offsetTracker, problems);
if(problems.isEmpty()) {
PositionMap posMap = offsetTracker.buildPositionMap();
// out.println(">>>> Ends Added -> Matlab");
// for(TextPosition destPos : getAllTextPositions(destText)) {
// TextPosition sourcePos = prePosMap.getPreTranslationPosition(destPos);
// out.println("[" + destPos.getLine() + ", " + destPos.getColumn() + "] -> " +
// "(" + sourcePos.getLine() + ", " + sourcePos.getColumn() + ")");
// out.println(">>>> Natlab -> Ends Added");
// for(TextPosition destPos : getAllTextPositions(destText)) {
// TextPosition sourcePos = posMap.getPreTranslationPosition(destPos);
// out.println("[" + destPos.getLine() + ", " + destPos.getColumn() + "] -> " +
// "(" + sourcePos.getLine() + ", " + sourcePos.getColumn() + ")");
if(prePosMap != null) {
posMap = new CompositePositionMap(posMap, prePosMap);
}
out.println(">>>> Destination Language -> Source Language");
for(TextPosition destPos : getAllTextPositions(destText)) {
TextPosition sourcePos = posMap.getPreTranslationPosition(destPos);
out.println("[" + destPos.getLine() + ", " + destPos.getColumn() + "] -> " +
"(" + sourcePos.getLine() + ", " + sourcePos.getColumn() + ")");
}
out.println(">>>> Translated Text");
out.print(destText);
} else {
for(TranslationProblem prob : problems) {
out.println("~" + prob);
}
}
out.close();
in.close();
System.exit(0);
} catch(IOException e) {
e.printStackTrace();
System.exit(2);
}
}
private static List<TextPosition> getAllTextPositions(String text) {
List<TextPosition> allPositions = new ArrayList<TextPosition>();
StringReader reader = new StringReader(text);
TrivialScanner scanner = new TrivialScanner(reader);
try {
while(true) {
TextPosition sym = scanner.nextPos();
if(sym == null) {
break;
}
allPositions.add(sym);
}
reader.close();
} catch(IOException e) {
//can't happen since StringRead
e.printStackTrace();
throw new RuntimeException(e);
}
return allPositions;
}
}
|
package dakara.eclipse.plugin.platform;
public class ResourceItem {
public final String name;
public final String path;
public final String project;
public ResourceItem(String name, String path, String project) {
if (name == null) name = "";
if (path == null) path = "";
if (project == null) project = "";
this.name = name;
this.path = path;
this.project = project;
}
@Override
public boolean equals(Object obj) {
if (obj == null) return false;
if (!(obj instanceof ResourceItem)) return false;
ResourceItem other = (ResourceItem) obj;
if (name.equals(other.name) && path.equals(other.path) && project.equals(other.project)) return true;
return false;
}
@Override
public int hashCode() {
return name.hashCode() ^ path.hashCode() ^ project.hashCode();
}
}
|
package com.github.dandelion.core.asset;
import com.github.dandelion.core.DandelionException;
import com.github.dandelion.core.asset.loader.AssetsJsonLoader;
import com.github.dandelion.core.utils.DandelionScanner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import static com.github.dandelion.core.asset.AssetsStorage.*;
/**
* Load Assets configuration
* <ul>
* <li>assetsLoader :
* <ul>
* <li>the {@link AssetsLoader}
* found in 'dandelion/dandelion.properties' for key 'assetsLoader'</li>
* <li>or {@link com.github.dandelion.core.asset.loader.AssetsJsonLoader} by default</li>
* </ul>
* </li>
* <li>assetsLocations : type of access to assets content(remote [by default], local)</li>
* </ul>
* Default Asset Loader is
*
*/
public class AssetsConfigurator {
// Logger
private static final Logger LOG = LoggerFactory.getLogger(AssetsConfigurator.class);
AssetsStorage assetsStorage;
AssetsLoader assetsLoader;
List<String> assetsLocations;
List<String> excludedScopes;
List<String> excludedAssets;
Map<String, AssetsLocationWrapper> assetsLocationWrappers;
private Map<String, List<Asset>> componentsByScope = new HashMap<String, List<Asset>>();
private Map<String, List<String>> scopesByParentScope = new HashMap<String, List<String>>();
private Map<String, String> parentScopesByScope = new HashMap<String, String>();
AssetsConfigurator(AssetsStorage assetsStorage) {
this.assetsStorage = assetsStorage;
}
/**
* Initialization of Assets Configurator on application load
*
* @throws IOException if a I/O error appends when 'dandelion/dandelion.properties' is loaded
*/
void initialize() {
try {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
Properties properties = getConfigurationProperties(classLoader);
assetsLocations = setPropertyAsList(properties.getProperty("assetsLocations"), ",");
excludedScopes = setPropertyAsList(properties.getProperty("excludedScopes"), ",");
excludedAssets = setPropertyAsList(properties.getProperty("excludedAssets"), ",");
assetsLoader = setPropertyAsAssetsLoader(classLoader, properties);
assetsLocationWrappers = extractAssetsLocationWrappers(classLoader, properties);
} catch (IOException e) {
LOG.error("Assets configurator can't access/read to the file 'dandelion/dandelion.properties'");
}
processAssetsLoading(true);
}
/**
*
* @param classLoader
* @return
* @throws IOException
*/
private Properties getConfigurationProperties(ClassLoader classLoader) throws IOException {
String mainResource = DandelionScanner.getResource("dandelion.properties");
Set<String> otherResources = DandelionScanner.getResources("dandelion", "properties");
otherResources.remove(mainResource);
Properties mainProperties = new Properties();
mainProperties.load(classLoader.getResourceAsStream(mainResource));
Properties properties = new Properties(mainProperties);
for(String resource:otherResources) {
properties.load(classLoader.getResourceAsStream(resource));
}
return properties;
}
/**
* Load all wrappers found in configuration properties<br/>
* a wrapper configuration have a key like assetsLocationWrapper.{location}<br/>
* {location} must match {@link com.github.dandelion.core.asset.AssetsLocationWrapper#locationKey()}
*
* @param classLoader class loader
* @param properties configuration properties
* @return all wrappers
*/
private Map<String, AssetsLocationWrapper> extractAssetsLocationWrappers(ClassLoader classLoader, Properties properties) {
Map<String, AssetsLocationWrapper> wrappers = new HashMap<String, AssetsLocationWrapper>();
for(String property:properties.stringPropertyNames()) {
if(property.startsWith("assetsLocationWrapper.")) {
AssetsLocationWrapper alw = getPropertyAsAssetsLocationWrapper(classLoader, properties.getProperty(property));
if(alw != null) {
String location = property.replace("assetsLocationWrapper.", "");
if(location.equalsIgnoreCase(alw.locationKey())) {
wrappers.put(location, alw);
}
}
}
}
return wrappers;
}
/**
* @param classLoader class loader
* @param wrapper wrapper class name
* @return an instance of a wrapper
*/
private AssetsLocationWrapper getPropertyAsAssetsLocationWrapper(ClassLoader classLoader, String wrapper) {
if(wrapper != null) {
try {
Class<AssetsLocationWrapper> cal = (Class<AssetsLocationWrapper>) classLoader.loadClass(wrapper);
return cal.newInstance();
} catch (ClassCastException e) {
LOG.warn("the 'wrapper[{}]' must implements '{}'",
wrapper, AssetsLocationWrapper.class.getCanonicalName());
} catch (InstantiationException e) {
LOG.warn("the 'wrapper[{}]' should authorize instantiation", wrapper);
} catch (IllegalAccessException e) {
LOG.warn("the 'wrapper[{}]' should authorize access from '{}'",
wrapper, AssetsConfigurator.class.getCanonicalName());
} catch (ClassNotFoundException e) {
LOG.warn("the 'wrapper[{}]' must exists in the classpath", wrapper);
}
}
return null;
}
private AssetsLoader setPropertyAsAssetsLoader(ClassLoader classLoader, Properties properties) {
String assetsLoaderClassname = properties.getProperty("assetsLoader");
if(assetsLoaderClassname != null) {
try {
Class<AssetsLoader> cal = (Class<AssetsLoader>) classLoader.loadClass(assetsLoaderClassname);
return cal.newInstance();
} catch (ClassCastException e) {
LOG.warn("the 'assetsLoader[{}]' must implements '{}'",
assetsLoaderClassname, AssetsLoader.class.getCanonicalName());
} catch (InstantiationException e) {
LOG.warn("the 'assetsLoader[{}]' should authorize instantiation", assetsLoaderClassname);
} catch (IllegalAccessException e) {
LOG.warn("the 'assetsLoader[{}]' should authorize access from '{}'",
assetsLoaderClassname, AssetsConfigurator.class.getCanonicalName());
} catch (ClassNotFoundException e) {
LOG.warn("the 'assetsLoader[{}]' must exists in the classpath", assetsLoaderClassname);
}
}
return null;
}
/**
* Set the default configuration when it's needed
*/
void setDefaultsIfNeeded() {
if(assetsLoader == null) {
assetsLoader = new AssetsJsonLoader();
}
if(assetsLocations == null) {
assetsLocations = setPropertyAsList("remote,local", ",");
}
if(excludedScopes == null) {
excludedScopes = new ArrayList<String>();
}
if(excludedAssets == null) {
excludedAssets = new ArrayList<String>();
}
if(assetsLocationWrappers == null) {
assetsLocationWrappers = new HashMap<String, AssetsLocationWrapper>();
}
}
/**
* Process to the assets loading from defined asset loader
*/
void processAssetsLoading(boolean defaultsNeeded) {
if(defaultsNeeded) setDefaultsIfNeeded();
prepareAssetsLoading(assetsLoader.loadAssets());
storeAssetsFromScope(ROOT_SCOPE, true);
storeAssetsFromScope(DETACHED_PARENT_SCOPE, true);
clearAllAssetsProcessElements();
}
/**
* Prepare Assets Loading by
*
* <ul>
* <li>link a scope to all his assets</li>
* <li>link a scope to his parent scope</li>
* <li>link a parent scope to all his scopes</li>
* </ul>
*
* @param components components to analyze
*/
private void prepareAssetsLoading(List<AssetsComponent> components) {
LOG.debug("Excludes scopes are {}", excludedScopes);
LOG.debug("Excludes assets are {}", excludedAssets);
for(AssetsComponent component:components) {
LOG.debug("Prepare {}", component);
if(!excludedScopes.contains(component.getScope())
&& !excludedScopes.contains(component.getParent())) {
LOG.debug("Scope {} and his parent {} are not in excludes scopes",
component.getScope(), component.getParent());
prepareParentScope(component);
prepareScope(component);
prepareAssets(component);
}
}
}
/**
* Store assets from scope
*
* @param scope scope to store
* @param recursiveMode <code>true</code> to activate recursive mode for scope/parent scope
*/
private void storeAssetsFromScope(String scope, boolean recursiveMode) {
if(componentsByScope.containsKey(scope)) {
List<Asset> _assets = componentsByScope.get(scope);
for(Asset _asset:_assets) {
storeAsset(_asset, scope, parentScopesByScope.get(scope));
}
}
if(recursiveMode) {
if(scopesByParentScope.containsKey(scope)) {
List<String> _scopes = scopesByParentScope.get(scope);
for(String _scope:_scopes) {
storeAssetsFromScope(_scope, true);
}
}
}
}
/**
* Workflow to store an asset
*
* @param asset asset to store
* @param scope scope of this asset
* @param parentScope parent of this scope
*/
private void storeAsset(Asset asset, String scope, String parentScope) {
LOG.debug("Store '{}' in scope '{}/{}'", asset, scope, parentScope);
try {
assetsStorage.store(asset, scope, parentScope);
} catch (DandelionException e) {
LOG.debug(e.getLocalizedMessage());
if(e.getErrorCode() == AssetsStorageError.UNDEFINED_PARENT_SCOPE) {
LOG.debug("To avoid any configuration problem, a scope '{}' with no assets is created", parentScope);
assetsStorage.store(null, parentScope);
storeAsset(asset, scope, parentScope);
}
}
}
/**
* Clear all working attributes
*/
void clearAllAssetsProcessElements() {
LOG.debug("Clear all assets process elements");
componentsByScope.clear();
scopesByParentScope.clear();
parentScopesByScope.clear();
}
private List<String> setPropertyAsList(String values, String delimiter) {
if(values == null || values.isEmpty()) return null;
return Arrays.asList(values.split(delimiter));
}
private void prepareScope(AssetsComponent component) {
if (ROOT_SCOPE.equalsIgnoreCase(component.getScope())) {
LOG.debug("{} is the root scope", component.getScope());
return;
}
if (!scopesByParentScope.containsKey(component.getParent())) {
scopesByParentScope.put(component.getParent(), new ArrayList());
}
List<String> _scopes = scopesByParentScope.get(component.getParent());
if(!_scopes.contains(component.getScope())) {
LOG.debug("Store {} as child of {}", component.getScope(), component.getParent());
_scopes.add(component.getScope());
} else {
LOG.debug("Store {} is already a child of {}", component.getScope(), component.getParent());
}
}
private void prepareParentScope(AssetsComponent component) {
LOG.debug("Store {} as parent of {}", component.getParent(), component.getScope());
if(ROOT_SCOPE.equalsIgnoreCase(component.getParent())
&& ROOT_SCOPE.equalsIgnoreCase(component.getScope())) {
component.setParent(MASTER_SCOPE);
}
parentScopesByScope.put(component.getScope(), component.getParent());
}
private void prepareAssets(AssetsComponent component) {
if (!componentsByScope.containsKey(component.getScope())) {
componentsByScope.put(component.getScope(), new ArrayList<Asset>());
}
List<Asset> _assets = componentsByScope.get(component.getScope());
for(Asset asset:component.getAssets()) {
if(!excludedAssets.contains(asset.getName())) {
LOG.debug("Store {} as child of {}", asset.getName(), component.getScope());
_assets.add(asset);
} else {
LOG.debug("{} is exclude", asset.getName());
}
}
}
}
|
package corpus;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
/**
* An object that maps all the word French to vector.
* A <tt>Corpus</tt> cannot contain duplicate words;
* each words can map to at most one vector.
*
* <p>This class will read a file vecs50 in order to get all the words and its
* vectors</p>
*
* <p>The <tt>Corpus</tt> class provides one factory method , which
* allow user to get a corpus from a file. There are no constructor for users.
*
* @since 1.0
*/
public class Corpus {
Map<String, Vector> vecDir;
private int nbWord = 0;
/***
* Gets the number of words in the corpus
* @return the number of words in the corpus
* **/
public int getNbWord() {
return nbWord;
}
/**
* Returns the dimension of the vector representing the word
* @return the dimension of vector
* **/
public int getDimension() {
return dimension;
}
private int dimension = 0;
private Corpus() {
vecDir = new HashMap<>();
}
/**
* Associates the specified vector with the specified word in this corpus
* (optional operation). If the map previously contained a mapping for
* the key, the old value is replaced by the specified value.
*
* @param word word with which the specified vector is to be associated
* @param vect vector to be associated with the specified word
*/
public void putWord(String word, Vector vect){
vecDir.put(word, vect);
}
/*** Gets the corpus from the file vecs50
* @param filePath the file path of the vecs50
* **/
private void readFromFile(String filePath){
File file = new File(filePath);
try(FileReader fileReader = new FileReader(file);
BufferedReader input = new BufferedReader(fileReader)){
String inputLine = input.readLine();
String[] tmp = inputLine.split(" ");
nbWord = Integer.parseInt(tmp[0]);
dimension = Integer.parseInt(tmp[1]);
/*** get all vectors in the file vecs50**/
Vector tmpVector = null;
String word = null;
for(int i = 0; i<nbWord; i++){
tmpVector = new Vector(50);
tmp = input.readLine().split(" ");
word = tmp[0];
for (int j=0; j<dimension ;j++) {
tmpVector.setComponent(j,Double.parseDouble(tmp[j+1]));;
}
putWord(word, tmpVector);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Returns the vector to which the specified word is mapped.
*
* <p>More formally, if this corpus contains a mapping from a word
* to a vector, then this method returns its vector; otherwise
* it will throw a <code>WordNotFoundException</code>.
* @param word the word whose associated vector is to be returned
* @return the vector to which the specified word is mapped.
* @throws WordNotFoundException if the word is not in corpus.
* @throws NullPointerException if the specified word is null and this corpus
* does not permit null keys
*/
public Vector getVector(String word) throws WordNotFoundException{
Vector vector = this.vecDir.get(word);
if(vector==null)
throw new WordNotFoundException("Can not find the word '" + word + "' in corpus!");
return vector;
}
public List<Map.Entry<Double,String>> getSimilarWord(Vector vect,int size){
TreeMap<Double, String> sortedMap = new TreeMap<>((a,b)-> b.compareTo(a));
for (Entry<String, Vector> entry: this.vecDir.entrySet()) {
sortedMap.put(entry.getValue().similarity(vect), entry.getKey());
}
List<Map.Entry<Double, String>> resultat = new ArrayList<>(size);
int comptor = 0;
for (Map.Entry<Double, String> couple: sortedMap.entrySet()) {
resultat.add(couple);
comptor++;
if(comptor==size) break;
}
return resultat;
}
public List<Map.Entry<Double,String>> getSimilarWord(String word,int size) throws WordNotFoundException{
Vector vect = this.vecDir.get(word);
if(vect == null) throw new WordNotFoundException("Can not find the word '" + word + "' in corpus!");
return getSimilarWord(vect, size);
}
/*** a factory method who allows us to get the corpus from
* a file donated by the argument
* @param filePath The file path of corpus
* **/
public static Corpus corpusFactory(String filePath){
Corpus corpus = new Corpus();
corpus.readFromFile(filePath);
return corpus;
}
@Override
public String toString() {
return this.vecDir.toString();
}
}
|
package netflix.nebula.dependency.recommendations.provider;
import org.apache.commons.io.IOUtils;
import org.gradle.api.Project;
import org.gradle.mvn3.org.apache.maven.model.Dependency;
import org.gradle.mvn3.org.apache.maven.model.Model;
import org.gradle.mvn3.org.apache.maven.model.building.*;
import org.gradle.mvn3.org.apache.maven.model.interpolation.StringSearchModelInterpolator;
import org.gradle.mvn3.org.apache.maven.model.path.DefaultPathTranslator;
import org.gradle.mvn3.org.apache.maven.model.path.DefaultUrlNormalizer;
import org.gradle.mvn3.org.codehaus.plexus.interpolation.MapBasedValueSource;
import org.gradle.mvn3.org.codehaus.plexus.interpolation.PropertiesBasedValueSource;
import org.gradle.mvn3.org.codehaus.plexus.interpolation.ValueSource;
import java.io.File;
import java.util.*;
public class MavenBomRecommendationProvider extends FileBasedRecommendationProvider {
private Map<String, String> recommendations;
public MavenBomRecommendationProvider(Project project) {
super(project);
}
@Override
public String getVersion(String org, String name) throws Exception {
if(recommendations == null) {
recommendations = new HashMap<>();
DefaultModelBuildingRequest request = new DefaultModelBuildingRequest();
request.setModelSource(new StringModelSource(IOUtils.toString(getInput(), "UTF-8")));
DefaultModelBuilder modelBuilder = new DefaultModelBuilderFactory().newInstance();
modelBuilder.setModelInterpolator(new ProjectPropertiesModelInterpolator(project));
ModelBuildingResult result = modelBuilder.build(request);
for (Dependency d : result.getEffectiveModel().getDependencyManagement().getDependencies()) {
recommendations.put(d.getGroupId() + ":" + d.getArtifactId(), d.getVersion());
}
}
return recommendations.get(org + ":" + name);
}
@SuppressWarnings("unchecked")
@Override
public InputStreamProvider setModule(Object dependencyNotation) {
if(dependencyNotation instanceof String && !((String) dependencyNotation).endsWith("@pom"))
dependencyNotation = dependencyNotation + "@pom";
if(dependencyNotation != null && Map.class.isAssignableFrom(dependencyNotation.getClass()))
((Map) dependencyNotation).put("ext", "pom");
return super.setModule(dependencyNotation);
}
@Override
protected Collection<String> propertyNames() {
throw new UnsupportedOperationException("Maven BOMs do not support recursive references");
}
@Override
protected String propertyValue(String name) {
throw new UnsupportedOperationException("Maven BOMs do not support recursive references");
}
private static class ProjectPropertiesModelInterpolator extends StringSearchModelInterpolator {
private final Project project;
ProjectPropertiesModelInterpolator(Project project) {
this.project = project;
setUrlNormalizer(new DefaultUrlNormalizer());
setPathTranslator(new DefaultPathTranslator());
}
public List<ValueSource> createValueSources(Model model, File projectDir, ModelBuildingRequest request, ModelProblemCollector collector) {
List<ValueSource> sources = new ArrayList<>();
sources.add(new MapBasedValueSource(project.getProperties()));
sources.add(new PropertiesBasedValueSource(System.getProperties()));
sources.addAll(super.createValueSources(model, projectDir, request, collector));
return sources;
}
}
}
|
package com.creativemd.littletiles.common.util.outdated.identifier;
import java.security.InvalidParameterException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.stream.IntStream;
import com.creativemd.littletiles.common.tile.math.vec.LittleVec;
import com.creativemd.littletiles.common.util.grid.LittleGridContext;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.math.BlockPos;
@Deprecated
public class LittleIdentifierRelative {
public BlockPos coord;
public LittleGridContext context;
public int[] identifier;
public LittleIdentifierRelative(int relativeX, int relativeY, int relativeZ, LittleGridContext context, int[] identifier) {
this.coord = new BlockPos(relativeX, relativeY, relativeZ);
this.context = context;
this.identifier = identifier;
}
public static LittleIdentifierRelative loadIdentifierOld(String id, NBTTagCompound nbt) {
return new LittleIdentifierRelative(id, nbt);
}
private LittleIdentifierRelative(String id, NBTTagCompound nbt) {
if (nbt.hasKey(id + "coord")) {
int[] array = nbt.getIntArray(id + "coord");
if (array.length == 3)
coord = new BlockPos(array[0], array[1], array[2]);
else
throw new InvalidParameterException("No valid coord given " + nbt);
} else if (nbt.hasKey(id + "coordX"))
coord = new BlockPos(nbt.getInteger(id + "coordX"), nbt.getInteger(id + "coordY"), nbt.getInteger(id + "coordZ"));
else
coord = new BlockPos(0, 0, 0);
if (nbt.hasKey(id + "pos")) {
LittleVec position = new LittleVec(id + "pos", nbt);
identifier = new int[] { position.x, position.y, position.z };
} else
identifier = nbt.getIntArray("id");
context = LittleGridContext.get(nbt);
}
public LittleIdentifierRelative(NBTTagCompound nbt) {
if (nbt.hasKey("coord")) {
int[] array = nbt.getIntArray("coord");
if (array.length == 3)
coord = new BlockPos(array[0], array[1], array[2]);
else
throw new InvalidParameterException("No valid coord given " + nbt);
} else if (nbt.hasKey("coordX"))
coord = new BlockPos(nbt.getInteger("coordX"), nbt.getInteger("coordY"), nbt.getInteger("coordZ"));
else
coord = new BlockPos(0, 0, 0);
if (nbt.hasKey("pos")) {
LittleVec position = new LittleVec("pos", nbt);
identifier = new int[] { position.x, position.y, position.z };
} else
identifier = nbt.getIntArray("id");
context = LittleGridContext.get(nbt);
}
public BlockPos getAbsolutePosition(TileEntity te) {
return getAbsolutePosition(te.getPos().getX(), te.getPos().getY(), te.getPos().getZ());
}
public BlockPos getAbsolutePosition(BlockPos origin) {
return getAbsolutePosition(origin.getX(), origin.getY(), origin.getZ());
}
public BlockPos getAbsolutePosition(int x, int y, int z) {
return new BlockPos(coord.getX() + x, coord.getY() + y, coord.getZ() + z);
}
public NBTTagCompound writeToNBT(NBTTagCompound nbt) {
nbt.setIntArray("coord", new int[] { coord.getX(), coord.getY(), coord.getZ() });
nbt.setIntArray("id", identifier);
context.set(nbt);
return nbt;
}
@Override
public int hashCode() {
return coord.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof LittleIdentifierRelative) {
if (!coord.equals(((LittleIdentifierRelative) obj).coord))
return false;
return Arrays.equals(identifier, LittleIdentifierAbsolute.convertTo(((LittleIdentifierRelative) obj).identifier, ((LittleIdentifierRelative) obj).context, context));
}
return false;
}
@Override
public String toString() {
return "coord:[" + coord.getX() + "," + coord.getY() + "," + coord.getZ() + "]|position:" + Arrays.toString(identifier);
}
public LittleIdentifierRelative copy() {
return new LittleIdentifierRelative(coord.getX(), coord.getY(), coord.getZ(), context, identifier.clone());
}
private static HashMap<Integer, LittleIdentifierRelative> converted = new HashMap<>();
@Deprecated
public int generateIndex(BlockPos pos) {
int[] array = LittleIdentifierAbsolute.convertTo(identifier, context, LittleGridContext.getMax());
int index = (pos.getX() + coord.getX()) + ((pos.getY() + coord.getY()) << 4) + ((pos.getZ() + coord.getZ()) << 8) + ((IntStream.of(array).parallel().sum()) << 16);
if (converted.containsKey(index)) {
if (!this.equals(converted.get(index)))
System.out.println("Found duplicate index=" + index + ", " + this + "!=" + converted.get(index));
} else
System.out.println(this + "+" + pos + "->" + index);
return index;
}
}
|
package edu.uci.vcsa.portal.portlets.announcements.controller;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.PortletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.validation.BindException;
import org.springframework.validation.Errors;
import org.springframework.web.portlet.bind.PortletRequestDataBinder;
import org.springframework.web.portlet.mvc.SimpleFormController;
import edu.uci.vcsa.portal.portlets.announcements.model.Announcement;
import edu.uci.vcsa.portal.portlets.announcements.model.Topic;
import edu.uci.vcsa.portal.portlets.announcements.service.IAnnouncementService;
/**
* @author Erik A. Olsson (eolsson@uci.edu)
*
* $LastChangedBy$
* $LastChangedDate$
*/
public class AddAnnouncementFormController extends SimpleFormController {
private IAnnouncementService announcementService;
private static Log log = LogFactory.getLog(AddAnnouncementFormController.class);
private String customDateFormat = "yyyy-MM-dd";
private String datePickerFormat = "format-y-m-d divider-dash";
/* (non-Javadoc)
* @see org.springframework.web.portlet.mvc.SimpleFormController#onSubmitAction(javax.portlet.ActionRequest, javax.portlet.ActionResponse, java.lang.Object, org.springframework.validation.BindException)
*/
@Override
protected void onSubmitAction(ActionRequest request,
ActionResponse response, Object command, BindException errors)
throws Exception {
Long topicId = null;
Announcement newAnn = (Announcement) command;
try {
topicId = Long.valueOf( request.getParameter("topicId") );
} catch (NumberFormatException ex) {
}
if (topicId != null) {
if (!newAnn.hasId()) {
Topic topic = announcementService.getTopic(topicId);
// add the automatic data
newAnn.setAuthor( request.getRemoteUser() );
newAnn.setCreated( new Date() );
newAnn.setParent( topic );
announcementService.addOrSaveAnnouncement(newAnn);
} else {
announcementService.mergeAnnouncement(newAnn);
}
response.setRenderParameter("topicId", topicId.toString());
response.setRenderParameter("action", "showTopic");
} else {
log.error("No topicId: "+Arrays.toString(errors.getAllErrors().toArray()));
response.setRenderParameter("action", "baseAdmin");
}
}
/* (non-Javadoc)
* @see org.springframework.web.portlet.mvc.SimpleFormController#referenceData(javax.portlet.PortletRequest, java.lang.Object, org.springframework.validation.Errors)
*/
@SuppressWarnings("unchecked")
@Override
protected Map referenceData(PortletRequest request, Object command,
Errors errors) throws Exception {
Map<String,Object> model = new HashMap<String,Object>();
Announcement newAnn = (Announcement) command;
Long topicId = null;
try {
topicId = Long.valueOf( request.getParameter("topicId") );
Topic topic = announcementService.getTopic(topicId);
model.put("topicId", topic.getId().toString());
model.put("topicTitle", topic.getTitle());
} catch (NumberFormatException ex) {
log.debug("No topicId inside portlet request");
}
try {
topicId = newAnn.getParent().getId();
model.put("topicId", newAnn.getParent().getId().toString());
model.put("topicTitle", newAnn.getParent().getTitle());
} catch (Exception exp) {
log.error("No topicId found by looking at parent topic id of command object: "+exp.getMessage());
}
model.put("datePickerFormat", datePickerFormat);
return model;
}
/* (non-Javadoc)
* @see org.springframework.web.portlet.mvc.BaseCommandController#initBinder(javax.portlet.PortletRequest, org.springframework.web.portlet.bind.PortletRequestDataBinder)
*/
@Override
protected void initBinder(PortletRequest request,
PortletRequestDataBinder binder) throws Exception {
SimpleDateFormat dateFormat = new SimpleDateFormat(customDateFormat);
dateFormat.setLenient(false);
binder.registerCustomEditor(Date.class, new CustomDateEditor(dateFormat, true));
binder.setAllowedFields(new String[] {"id","created","author","title","abstractText","message","link","startDisplay","endDisplay"});
}
/* (non-Javadoc)
* @see org.springframework.web.portlet.mvc.AbstractFormController#formBackingObject(javax.portlet.PortletRequest)
*/
@Override
protected Object formBackingObject(PortletRequest request) throws Exception {
Announcement ann = new Announcement();
Long topicId = null;
Long editId = null;
try {
editId = Long.valueOf( request.getParameter("editId") );
log.debug("formBackingObject: editId found. This is an edit request for announcement Id "+editId.toString());
ann = announcementService.getAnnouncement(editId);
// return immediately when we have our announcement
return ann;
} catch (NumberFormatException e) {
log.debug("formBackingObject: No editId found. This is not an edit request");
}
try {
topicId = Long.valueOf( request.getParameter("topicId") );
Topic topic = announcementService.getTopic(topicId);
ann.setParent(topic);
} catch (NumberFormatException e) {
log.error("formBackingObject: Unable to get topicId from request");
}
return ann;
}
@Override
protected void handleInvalidSubmit(ActionRequest request, ActionResponse response) {
log.error("Invalid submission. Going to baseAdmin");
response.setRenderParameter("action","baseAdmin");
}
/**
* @param announcementService the announcementService to set
*/
public void setAnnouncementService(IAnnouncementService announcementService) {
this.announcementService = announcementService;
}
/**
* When a custom date format is set by Spring, this method converts it immediately to a string of two CSS classes
* required by the date picker in the view.
* @param customDateFormat
*/
public void setCustomDateFormat(String customDateFormat) {
this.customDateFormat = customDateFormat;
if (log.isDebugEnabled()) {
log.debug("Trying to parse custom date input format: ["+customDateFormat+"]");
}
String[] finalPieces = {"", "", ""};
String[] pieces = {"", "", ""};
String divider = null;
// Ignore any custom date format requests if the requirements are not met
if (customDateFormat.contains("/") && !customDateFormat.contains("-") && !customDateFormat.contains(".")) {
pieces = customDateFormat.split("/");
divider = "slash";
}
else if (customDateFormat.contains("-") && !customDateFormat.contains("/") && !customDateFormat.contains(".")) {
pieces = customDateFormat.split("-");
divider = "dash";
}
else if (customDateFormat.contains(".") && !customDateFormat.contains("/") && !customDateFormat.contains("-")) {
pieces = customDateFormat.split("\\.");
divider = "dot";
}
else {
return;
}
// Ignore any custom date format requests if the requirements are not met
if (pieces.length > 3) {
return;
}
if (log.isDebugEnabled()) {
log.debug("Custom date input format: ["+pieces[0]+" "+divider+" "+pieces[1]+" "+divider+" "+pieces[2]+"]");
}
for (int i=0; i<pieces.length; i++) {
if (pieces[i].equalsIgnoreCase("mm")) {
finalPieces[i] = "m";
}
else if (pieces[i].equalsIgnoreCase("dd")) {
finalPieces[i] = "d";
}
else if (pieces[i].equalsIgnoreCase("yyyy")) {
finalPieces[i] = "y";
}
}
datePickerFormat = "format-" + finalPieces[0] + "-" + finalPieces[1] + "-" + finalPieces[2] + " divider-" + divider;
if (log.isDebugEnabled()) {
log.debug("Custom date input format parsed as: ["+datePickerFormat+"]");
}
}
}
|
package eu.dzhw.fdz.metadatamanagement.studymanagement.rest;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.CacheControl;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import eu.dzhw.fdz.metadatamanagement.common.domain.I18nString;
import eu.dzhw.fdz.metadatamanagement.studymanagement.repository.StudyRepository;
@RestController
public class StudySeriesesResourceController {
private StudyRepository studyRepository;
@Autowired
public StudySeriesesResourceController(StudyRepository studyRepository) {
this.studyRepository = studyRepository;
}
/**
* Get all available study serieses.
*/
@RequestMapping(value = "/api/study-serieses", method = RequestMethod.GET)
public ResponseEntity<List<I18nString>> findAllStudySerieses() {
List<I18nString> studySerieses = studyRepository.findAllStudySerieses();
return ResponseEntity.ok().cacheControl(CacheControl.noStore())
.body(studySerieses);
}
}
|
package fi.otavanopisto.kuntaapi.server.integrations.management;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import javax.ejb.AccessTimeout;
import javax.ejb.Asynchronous;
import javax.ejb.Singleton;
import javax.ejb.Timeout;
import javax.ejb.Timer;
import javax.ejb.TimerConfig;
import javax.ejb.TimerService;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.event.Observes;
import javax.inject.Inject;
import org.apache.commons.lang3.StringUtils;
import fi.metatavu.kuntaapi.server.rest.model.Menu;
import fi.metatavu.kuntaapi.server.rest.model.MenuItem;
import fi.metatavu.management.client.ApiResponse;
import fi.metatavu.management.client.DefaultApi;
import fi.metatavu.management.client.model.Menuitem;
import fi.otavanopisto.kuntaapi.server.cache.MenuCache;
import fi.otavanopisto.kuntaapi.server.cache.MenuItemCache;
import fi.otavanopisto.kuntaapi.server.cache.ModificationHashCache;
import fi.otavanopisto.kuntaapi.server.controllers.IdentifierController;
import fi.otavanopisto.kuntaapi.server.discover.EntityUpdater;
import fi.otavanopisto.kuntaapi.server.discover.IdUpdateRequestQueue;
import fi.otavanopisto.kuntaapi.server.discover.MenuIdRemoveRequest;
import fi.otavanopisto.kuntaapi.server.discover.MenuIdUpdateRequest;
import fi.otavanopisto.kuntaapi.server.id.IdController;
import fi.otavanopisto.kuntaapi.server.id.IdPair;
import fi.otavanopisto.kuntaapi.server.id.MenuId;
import fi.otavanopisto.kuntaapi.server.id.MenuItemId;
import fi.otavanopisto.kuntaapi.server.id.OrganizationId;
import fi.otavanopisto.kuntaapi.server.id.PageId;
import fi.otavanopisto.kuntaapi.server.integrations.KuntaApiConsts;
import fi.otavanopisto.kuntaapi.server.integrations.MenuProvider.MenuItemType;
import fi.otavanopisto.kuntaapi.server.persistence.model.Identifier;
import fi.otavanopisto.kuntaapi.server.settings.OrganizationSettingController;
import fi.otavanopisto.kuntaapi.server.system.SystemUtils;
@ApplicationScoped
@Singleton
@AccessTimeout (unit = TimeUnit.HOURS, value = 1l)
@SuppressWarnings ("squid:S3306")
public class ManagementMenuEntityUpdater extends EntityUpdater {
private static final int TIMER_INTERVAL = 5000;
@Inject
private Logger logger;
@Inject
private ManagementApi managementApi;
@Inject
private OrganizationSettingController organizationSettingController;
@Inject
private IdentifierController identifierController;
@Inject
private ModificationHashCache modificationHashCache;
@Inject
private IdController idController;
@Inject
private MenuCache menuCache;
@Inject
private MenuItemCache menuItemCache;
@Resource
private TimerService timerService;
private boolean stopped;
private IdUpdateRequestQueue<MenuIdUpdateRequest> queue;
@PostConstruct
public void init() {
queue = new IdUpdateRequestQueue<>(ManagementConsts.IDENTIFIER_NAME);
}
@Override
public String getName() {
return "management-menus";
}
@Override
public void startTimer() {
startTimer(TIMER_INTERVAL);
}
private void startTimer(int duration) {
stopped = false;
TimerConfig timerConfig = new TimerConfig();
timerConfig.setPersistent(false);
timerService.createSingleActionTimer(duration, timerConfig);
}
@Override
public void stopTimer() {
stopped = true;
}
@Asynchronous
public void onMenuIdUpdateRequest(@Observes MenuIdUpdateRequest event) {
if (!stopped) {
OrganizationId organizationId = event.getOrganizationId();
if (organizationSettingController.getSettingValue(organizationId, ManagementConsts.ORGANIZATION_SETTING_BASEURL) == null) {
return;
}
queue.add(event);
}
}
@Asynchronous
public void onMenuIdRemoveRequest(@Observes MenuIdRemoveRequest event) {
if (!stopped) {
MenuId menuId = event.getId();
if (!StringUtils.equals(menuId.getSource(), ManagementConsts.IDENTIFIER_NAME)) {
return;
}
deleteMenu(event.getOrganizationId(), menuId);
}
}
@Timeout
public void timeout(Timer timer) {
if (!stopped) {
MenuIdUpdateRequest updateRequest = queue.next();
if (updateRequest != null) {
updateManagementMenu(updateRequest);
}
startTimer(SystemUtils.inTestMode() ? 1000 : TIMER_INTERVAL);
}
}
private void updateManagementMenu(MenuIdUpdateRequest updateRequest) {
OrganizationId organizationId = updateRequest.getOrganizationId();
DefaultApi api = managementApi.getApi(organizationId);
MenuId managementMenuId = updateRequest.getId();
Long orderIndex = updateRequest.getOrderIndex();
ApiResponse<fi.metatavu.management.client.model.Menu> response = api.kuntaApiMenusIdGet(managementMenuId.getId());
if (response.isOk()) {
updateManagementMenu(api, organizationId, response.getResponse(), orderIndex);
} else {
logger.warning(String.format("Finding organization %s menu failed on [%d] %s", managementMenuId.getId(), response.getStatus(), response.getMessage()));
}
}
private void updateManagementMenu(DefaultApi api, OrganizationId organizationId, fi.metatavu.management.client.model.Menu managementMenu, Long orderIndex) {
Menu menu = updateManagementMenu(organizationId, managementMenu, orderIndex);
if (menu == null) {
logger.warning(String.format("Failed to update menu %d on organization %s", managementMenu.getId(), organizationId.getId()));
return;
}
MenuId menuId = new MenuId(organizationId, KuntaApiConsts.IDENTIFIER_NAME, menu.getId());
List<MenuItemId> existingKuntaApiMenuItemIds = menuItemCache.getBareChildIds(menuId);
List<Menuitem> managementMenuItems = listManagementMenuItems(api, managementMenu);
for (int i = 0, l = managementMenuItems.size(); i < l; i++) {
Menuitem managementMenuItem = managementMenuItems.get(i);
MenuItemId menuItemId = updateManagementMenuItem(organizationId, menuId, managementMenuItem, (long) i);
existingKuntaApiMenuItemIds.remove(menuItemId);
}
for (MenuItemId existingKuntaApiMenuItemId : existingKuntaApiMenuItemIds) {
deleteMenuItem(menuId, existingKuntaApiMenuItemId);
}
}
private List<Menuitem> listManagementMenuItems(DefaultApi api, fi.metatavu.management.client.model.Menu menu) {
List<Menuitem> result = new ArrayList<>();
String menuId = String.valueOf(menu.getId());
fi.metatavu.management.client.ApiResponse<List<Menuitem>> response = api.kuntaApiMenusMenuIdItemsGet(menuId);
if (response.isOk()) {
result.addAll(response.getResponse());
} else {
logger.warning(String.format("Listing menu %d items failed on [%d] %s", menu.getId(), response.getStatus(), response.getMessage()));
}
return result;
}
private Menu updateManagementMenu(OrganizationId organizationId, fi.metatavu.management.client.model.Menu managementMenu, Long orderIndex) {
MenuId managementMenuId = new MenuId(organizationId, ManagementConsts.IDENTIFIER_NAME, String.valueOf(managementMenu.getId()));
Identifier identifier = identifierController.findIdentifierById(managementMenuId);
if (identifier == null) {
identifier = identifierController.createIdentifier(orderIndex, managementMenuId);
} else {
identifierController.updateIdentifierOrderIndex(identifier, orderIndex);
}
MenuId kuntaApiMenuId = new MenuId(organizationId, KuntaApiConsts.IDENTIFIER_NAME, identifier.getKuntaApiId());
Menu menu = translateMenu(organizationId, managementMenu);
modificationHashCache.put(identifier.getKuntaApiId(), createPojoHash(menu));
menuCache.put(kuntaApiMenuId, menu);
return menu;
}
private MenuItemId updateManagementMenuItem(OrganizationId organizationId, MenuId menuId, Menuitem managementMenuItem, Long orderIndex) {
MenuItemId managementMenuItemId = new MenuItemId(organizationId, ManagementConsts.IDENTIFIER_NAME, String.valueOf(managementMenuItem.getId()));
Identifier identifier = identifierController.findIdentifierById(managementMenuItemId);
if (identifier == null) {
identifier = identifierController.createIdentifier(orderIndex, managementMenuItemId);
} else {
identifierController.updateIdentifierOrderIndex(identifier, orderIndex);
}
MenuItem menuItem = translateMenuItem(organizationId, managementMenuItem);
MenuItemId kuntaApiMenuItemId = new MenuItemId(organizationId, KuntaApiConsts.IDENTIFIER_NAME, identifier.getKuntaApiId());
modificationHashCache.put(identifier.getKuntaApiId(), createPojoHash(menuItem));
menuItemCache.put(new IdPair<MenuId, MenuItemId>(menuId,kuntaApiMenuItemId), menuItem);
return kuntaApiMenuItemId;
}
private Menu translateMenu(OrganizationId organizationId, fi.metatavu.management.client.model.Menu managementMenu) {
Menu menu = new Menu();
MenuId managementMenuId = new MenuId(organizationId, ManagementConsts.IDENTIFIER_NAME, String.valueOf(managementMenu.getId()));
MenuId kuntaApiMenuId = idController.translateMenuId(managementMenuId, KuntaApiConsts.IDENTIFIER_NAME);
if (kuntaApiMenuId == null) {
logger.info(String.format("Could not translate management menu %d into kunta api id", managementMenu.getId()));
return null;
}
menu.setId(kuntaApiMenuId.getId());
menu.setSlug(managementMenu.getSlug());
return menu;
}
private MenuItem translateMenuItem(OrganizationId organizationId, fi.metatavu.management.client.model.Menuitem managementMenuItem) {
MenuItem menuItem = new MenuItem();
MenuItemId managementMenuItemId = new MenuItemId(organizationId,ManagementConsts.IDENTIFIER_NAME, String.valueOf(managementMenuItem.getId()));
MenuItemId kuntaApiMenuItemId = idController.translateMenuItemId(managementMenuItemId, KuntaApiConsts.IDENTIFIER_NAME);
if (kuntaApiMenuItemId == null) {
logger.info(String.format("Could not translate management menu item %d into kunta api id", managementMenuItem.getId()));
return null;
}
MenuItemType itemType = getItemType(managementMenuItem);
if (itemType == null) {
logger.severe(String.format("Could not determine item type for %d", managementMenuItem.getId()));
return null;
}
PageId pageId = translatePageId(organizationId, managementMenuItem.getPageId());
MenuItemId parentMenuItemId = translateMenuItemId(organizationId, managementMenuItem.getParentItemId());
menuItem.setId(kuntaApiMenuItemId.getId());
menuItem.setLabel(managementMenuItem.getTitle());
menuItem.setFileId(null);
menuItem.setExternalUrl(itemType == MenuItemType.LINK ? managementMenuItem.getUrl() : null);
menuItem.setPageId(pageId != null ? pageId.getId() : null);
menuItem.setParentItemId(parentMenuItemId != null ? parentMenuItemId.getId() : null);
menuItem.setType(itemType.toString());
return menuItem;
}
private MenuItemId translateMenuItemId(OrganizationId organizationId, Long parentItemId) {
if (parentItemId == null) {
return null;
}
MenuItemId managementMenuItem = new MenuItemId(organizationId, ManagementConsts.IDENTIFIER_NAME, String.valueOf(parentItemId));
return idController.translateMenuItemId(managementMenuItem, KuntaApiConsts.IDENTIFIER_NAME);
}
private MenuItemType getItemType(fi.metatavu.management.client.model.Menuitem managementMenuItem) {
switch (managementMenuItem.getType()) {
case "page":
return MenuItemType.PAGE;
case "post":
return MenuItemType.NEWS_ARTICLE;
case "custom":
return MenuItemType.LINK;
default:
return null;
}
}
private PageId translatePageId(OrganizationId organizationId, Long pageId) {
if (pageId == null) {
return null;
}
return translatePageId(organizationId, pageId.intValue());
}
private PageId translatePageId(OrganizationId organizationId, Integer pageId) {
if (pageId == null) {
return null;
}
PageId managementId = new PageId(organizationId, ManagementConsts.IDENTIFIER_NAME, String.valueOf(pageId));
return idController.translatePageId(managementId, KuntaApiConsts.IDENTIFIER_NAME);
}
private void deleteMenu(OrganizationId organizationId, MenuId managementMenuId) {
Identifier menuIdentifier = identifierController.findIdentifierById(managementMenuId);
if (menuIdentifier != null) {
MenuId kuntaApiMenuId = new MenuId(organizationId, KuntaApiConsts.IDENTIFIER_NAME, menuIdentifier.getKuntaApiId());
queue.remove(managementMenuId);
modificationHashCache.clear(menuIdentifier.getKuntaApiId());
menuCache.clear(kuntaApiMenuId);
identifierController.deleteIdentifier(menuIdentifier);
}
}
private void deleteMenuItem(MenuId kuntaApiMenuId, MenuItemId kuntaApiMenuItemId) {
Identifier menuItemIdentifier = identifierController.findIdentifierById(kuntaApiMenuItemId);
if (menuItemIdentifier != null) {
MenuItemId managementMenuItemId = idController.translateMenuItemId(kuntaApiMenuItemId, ManagementConsts.IDENTIFIER_NAME);
queue.remove(managementMenuItemId);
modificationHashCache.clear(menuItemIdentifier.getKuntaApiId());
menuItemCache.clear(new IdPair<MenuId, MenuItemId>(kuntaApiMenuId, kuntaApiMenuItemId));
identifierController.deleteIdentifier(menuItemIdentifier);
}
}
}
|
package org.cyclops.integrateddynamics.capability.variablefacade;
import net.minecraftforge.common.capabilities.Capability;
import net.minecraftforge.common.capabilities.CapabilityInject;
import org.cyclops.commoncapabilities.CommonCapabilities;
import org.cyclops.cyclopscore.config.extendedconfig.CapabilityConfig;
import org.cyclops.cyclopscore.modcompat.capabilities.DefaultCapabilityStorage;
import org.cyclops.integrateddynamics.api.item.IVariableFacadeHolder;
import org.cyclops.integrateddynamics.capability.variablecontainer.VariableContainerDefault;
/**
* Config for the variable facade holder capability.
* @author rubensworks
*
*/
public class VariableFacadeHolderConfig extends CapabilityConfig {
/**
* The unique instance.
*/
public static VariableFacadeHolderConfig _instance;
@CapabilityInject(IVariableFacadeHolder.class)
public static Capability<IVariableFacadeHolder> CAPABILITY = null;
/**
* Make a new instance.
*/
public VariableFacadeHolderConfig() {
super(
CommonCapabilities._instance,
true,
"variable_facade_holder",
"Allows holding of variable facades.",
IVariableFacadeHolder.class,
new DefaultCapabilityStorage<IVariableFacadeHolder>(),
VariableFacadeHolderDefault.class
);
}
@Override
public boolean isDisableable() {
return false;
}
}
|
// LociFunctions.java
package loci.plugins;
import ij.IJ;
import ij.ImagePlus;
import ij.process.ImageProcessor;
import java.awt.Rectangle;
import java.io.IOException;
import loci.formats.*;
import loci.formats.meta.MetadataRetrieve;
public class LociFunctions extends MacroFunctions {
// -- Fields --
private IFormatReader r;
// -- Constructor --
public LociFunctions() {
r = new FileStitcher(new ChannelSeparator(), true);
r.setMetadataStore(MetadataTools.createOMEXMLMetadata());
}
// -- LociFunctions API methods - loci.formats.IFormatReader --
public void getImageCount(Double[] imageCount) {
imageCount[0] = new Double(r.getImageCount());
}
public void getSizeX(Double[] sizeX) { sizeX[0] = new Double(r.getSizeX()); }
public void getSizeY(Double[] sizeY) { sizeY[0] = new Double(r.getSizeY()); }
public void getSizeZ(Double[] sizeZ) { sizeZ[0] = new Double(r.getSizeZ()); }
public void getSizeC(Double[] sizeC) { sizeC[0] = new Double(r.getSizeC()); }
public void getSizeT(Double[] sizeT) { sizeT[0] = new Double(r.getSizeT()); }
public void getPixelType(String[] pixelType) {
pixelType[0] = FormatTools.getPixelTypeString(r.getPixelType());
}
public void getEffectiveSizeC(Double[] effectiveSizeC) {
effectiveSizeC[0] = new Double(r.getEffectiveSizeC());
}
public void getRGBChannelCount(Double[] rgbChannelCount) {
rgbChannelCount[0] = new Double(r.getRGBChannelCount());
}
public void isIndexed(String[] indexed) {
indexed[0] = r.isIndexed() ? "true" : "false";
}
public void getChannelDimCount(Double[] channelDimCount) {
channelDimCount[0] = new Double(r.getChannelDimLengths().length);
}
public void getChannelDimLength(Double i, Double[] channelDimLength) {
channelDimLength[0] = new Double(r.getChannelDimLengths()[i.intValue()]);
}
public void getChannelDimType(Double i, Double[] channelDimType) {
channelDimType[0] = new Double(r.getChannelDimTypes()[i.intValue()]);
}
// public void getThumbSizeX(Double[] thumbSizeX) {
// thumbSizeX[0] = new Double(r.getThumbSizeX());
// public void getThumbSizeY(Double[] thumbSizeY) {
// thumbSizeY[0] = new Double(r.getThumbSizeY());
public void isLittleEndian(String[] littleEndian) {
littleEndian[0] = r.isLittleEndian() ? "true" : "false";
}
public void getDimensionOrder(String[] dimOrder) {
dimOrder[0] = r.getDimensionOrder();
}
public void isOrderCertain(String[] orderCertain) {
orderCertain[0] = r.isOrderCertain() ? "true" : "false";
}
public void isInterleaved(String[] interleaved) {
interleaved[0] = r.isInterleaved() ? "true" : "false";
}
public void isInterleavedSubC(Double subC, String[] interleaved) {
interleaved[0] = r.isInterleaved(subC.intValue()) ? "true" : "false";
}
public void openImage(String title, Double no)
throws FormatException, IOException
{
ImageProcessor ip = Util.openProcessor(r, no.intValue());
new ImagePlus(title, ip).show();
}
public void openSubImage(String title, Double no, Double x, Double y,
Double width, Double height) throws FormatException, IOException
{
Rectangle crop = new Rectangle(x.intValue(), y.intValue(),
width.intValue(), height.intValue());
ImageProcessor ip = Util.openProcessor(r, no.intValue(), crop);
new ImagePlus(title, ip).show();
}
public void close() throws IOException { r.close(); }
public void closeFileOnly() throws IOException { r.close(true); }
public void getSeriesCount(Double[] seriesCount) {
seriesCount[0] = new Double(r.getSeriesCount());
}
public void setSeries(Double seriesNum) {
r.setSeries(seriesNum.intValue());
}
public void getSeries(Double[] seriesNum) {
seriesNum[0] = new Double(r.getSeries());
}
public void setNormalized(Boolean normalize) {
r.setNormalized(normalize.booleanValue());
}
public void isNormalized(Boolean[] normalize) {
normalize[0] = new Boolean(r.isNormalized());
}
public void setMetadataCollected(Boolean collect) {
r.setMetadataCollected(collect.booleanValue());
}
public void isMetadataCollected(Boolean[] collect) {
collect[0] = new Boolean(r.isMetadataCollected());
}
public void setOriginalMetadataPopulated(Boolean populate) {
r.setOriginalMetadataPopulated(populate.booleanValue());
}
public void isOriginalMetadataPopulated(Boolean[] populate) {
populate[0] = new Boolean(r.isOriginalMetadataPopulated());
}
public void setGroupFiles(String groupFiles) {
r.setGroupFiles("true".equalsIgnoreCase(groupFiles));
}
public void isGroupFiles(String[] groupFiles) {
groupFiles[0] = r.isGroupFiles() ? "true" : "false";
}
public void isMetadataComplete(String[] complete) {
complete[0] = r.isMetadataComplete() ? "true" : "false";
}
public void fileGroupOption(String id, String[] fileGroupOption)
throws FormatException, IOException
{
switch (r.fileGroupOption(id)) {
case IFormatReader.MUST_GROUP:
fileGroupOption[0] = "must";
break;
case IFormatReader.CAN_GROUP:
fileGroupOption[0] = "can";
break;
case IFormatReader.CANNOT_GROUP:
fileGroupOption[0] = "cannot";
break;
default:
fileGroupOption[0] = "unknown";
}
}
public void getUsedFileCount(Double[] count) {
count[0] = new Double(r.getUsedFiles().length);
}
public void getUsedFile(Double i, String[] used) {
used[0] = r.getUsedFiles()[i.intValue()];
}
public void getCurrentFile(String[] file) {
file[0] = r.getCurrentFile();
}
public void getIndex(Double z, Double c, Double t, Double[] index) {
index[0] = new Double(r.getIndex(z.intValue(), c.intValue(), t.intValue()));
}
public void getZCTCoords(Double index, Double[] z, Double[] c, Double[] t) {
int[] zct = r.getZCTCoords(index.intValue());
z[0] = new Double(zct[0]);
c[0] = new Double(zct[1]);
t[0] = new Double(zct[2]);
}
public void getMetadataValue(String field, String[] value) {
Object o = r.getMetadataValue(field);
value[0] = o == null ? null : o.toString();
}
public void setMetadataFiltered(String metadataFiltered) {
r.setMetadataFiltered("true".equalsIgnoreCase(metadataFiltered));
}
public void isMetadataFiltered(String[] metadataFiltered) {
metadataFiltered[0] = r.isMetadataFiltered() ? "true" : "false";
}
// -- LociFunction API methods - additional methods --
public void setId(String id) throws FormatException, IOException {
r.setId(id);
}
public void getSeriesName(String[] seriesName) {
MetadataRetrieve retrieve = (MetadataRetrieve) r.getMetadataStore();
seriesName[0] = retrieve.getImageName(r.getSeries());
}
// -- PlugIn API methods --
public void run(String arg) {
if (IJ.macroRunning()) super.run(arg);
else {
IJ.showMessage("LOCI Plugins for ImageJ",
"The macro extensions are designed to be used within a macro.\n" +
"Instructions on doing so will be printed to the Results window.");
IJ.write("To gain access to more advanced features of Bio-Formats");
IJ.write("from within a macro, put the following line at the");
IJ.write("beginning of your macro:");
IJ.write("");
IJ.write("run(\"Bio-Formats Macro Extensions\");");
IJ.write("");
IJ.write("This will enable the following macro functions:");
IJ.write("");
IJ.write("-= Usable any time =-");
IJ.write("");
IJ.write("Ext.setId(id)");
IJ.write("-- Initializes the given id (filename).");
IJ.write("Ext.isMetadataComplete(complete)");
IJ.write("-- True if Bio-Formats completely parses the current");
IJ.write("-- dataset's file format. If this function returns false,");
IJ.write("-- there are known limitations or missing features in how");
IJ.write("-- Bio-Formats handles this file format.");
IJ.write("Ext.fileGroupOption(id, fileGroupOption)");
IJ.write("-- Returns a code indicating the file grouping policy for");
IJ.write("-- for the current dataset. Possible values are:");
IJ.write("-- must, can, cannot, unknown");
IJ.write("");
IJ.write("-= Usable before initializing a file =-");
IJ.write("");
IJ.write("Ext.setNormalized(normalize)");
IJ.write("-- Sets whether to normalize floating point data to [0-1].");
IJ.write("Ext.isNormalized(normalize)");
IJ.write("-- Gets whether float data is being normalized to [0-1].");
IJ.write("Ext.setMetadataCollected(collect)");
IJ.write("-- Sets whether Bio-Formats should extract metadata at all.");
IJ.write("Ext.isMetadataCollected(collect)");
IJ.write("-- Gets whether Bio-Formats is supposed to extract metadata.");
IJ.write("Ext.setOriginalMetadataPopulated(populate)");
IJ.write("
IJ.write("-- to the OME metadata store as custom attributes.");
IJ.write("Ext.isOriginalMetadataPopulated(populate)");
IJ.write("
IJ.write("-- to the OME metadata store as custom attributes.");
IJ.write("Ext.setGroupFiles(group)");
IJ.write("-- For multi-file formats, sets whether to force grouping.");
IJ.write("Ext.isGroupFiles(group)");
IJ.write("-- Gets whether grouping is forced for multi-file formats..");
IJ.write("Ext.setMetadataFiltered(filter)");
IJ.write("-- Sets whether to filter out ugly metadata from the table");
IJ.write("-- (i.e., entries with unprintable characters, and extremely");
IJ.write("-- long values).");
IJ.write("Ext.isMetadataFiltered(filter)");
IJ.write("-- Gets whether ugly metadata is being filtered out.");
IJ.write("");
IJ.write("-== Usable after initializing a file ==-");
IJ.write("");
IJ.write("Ext.getSeriesCount(seriesCount)");
IJ.write("-- Gets the number of image series in the active dataset.");
IJ.write("Ext.setSeries(seriesNum)");
IJ.write("-- Sets the current series within the active dataset.");
IJ.write("Ext.getSeries(seriesNum)");
IJ.write("-- Gets the current series within the active dataset.");
IJ.write("Ext.getUsedFileCount(count)");
IJ.write("-- Gets the number of files that are part of this dataset.");
IJ.write("Ext.getUsedFile(i, used)");
IJ.write("-- Gets the i'th filename part of this dataset.");
IJ.write("Ext.getCurrentFile(file)");
IJ.write("-- Gets the base filename used to initialize this dataset.");
IJ.write("Ext.openImage(title, no)");
IJ.write("-- Opens the no'th plane in a new window named 'title'.");
IJ.write("Ext.openImage(title, no, x, y, width, height)");
IJ.write("-- Opens a subset of the no'th plane in a new window");
IJ.write("-- named 'title'.");
IJ.write("Ext.close()");
IJ.write("-- Closes the active dataset.");
IJ.write("Ext.closeFileOnly()");
IJ.write("-- Closes open files, leaving the current dataset active.");
IJ.write("");
IJ.write("-== Applying to the current series ==-");
IJ.write("");
IJ.write("Ext.getImageCount(imageCount)");
IJ.write("-- Gets the total number of planes in the current dataset.");
IJ.write("Ext.getSizeX(sizeX)");
IJ.write("-- Gets the width of each image plane in pixels.");
IJ.write("Ext.getSizeY(sizeY)");
IJ.write("-- Gets the height of each image plane in pixels.");
IJ.write("Ext.getSizeZ(sizeZ)");
IJ.write("-- Gets the number of focal planes in the dataset.");
IJ.write("Ext.getSizeC(sizeC)");
IJ.write("-- Gets the number of channels in the dataset.");
IJ.write("Ext.getSizeT(sizeT)");
IJ.write("-- Gets the number of time points in the dataset.");
IJ.write("Ext.getPixelType(pixelType)");
IJ.write("-- Gets a code representing the pixel type of the image.");
IJ.write("-- Possible values include:");
IJ.write("-- int8, uint8, int16, uint16, int32, uint32, float, double");
IJ.write("Ext.getEffectiveSizeC(effectiveSizeC)");
IJ.write("-- Gets the 'effective' number of channels, such that:");
IJ.write("-- effectiveSizeC * sizeZ * sizeT == imageCount");
IJ.write("Ext.getRGBChannelCount(rgbChannelCount)");
IJ.write("-- Gets the number of channels per composite image plane:");
IJ.write("-- sizeC / rgbChannelCount == effectiveSizeC");
IJ.write("Ext.isIndexed(indexed)");
IJ.write("-- Gets whether the image planes are stored as indexed color");
IJ.write("-- (i.e., whether they have embedded LUTs).");
IJ.write("Ext.getChannelDimCount(channelDimCount)");
IJ.write("-- For highly multidimensional image data, the C dimension");
IJ.write("-- may consist of multiple embedded 'sub' dimensions.");
IJ.write("-- This function returns the number of such dimensions.");
IJ.write("Ext.getChannelDimLength(i, channelDimLength)");
IJ.write("-- Gets the length of the i'th embedded 'sub' dimension.");
IJ.write("Ext.getChannelDimType(i, channelDimType)");
IJ.write("-- Gets a string label for the i'th embedded 'sub' channel.");
IJ.write("Ext.isLittleEndian(littleEndian)");
IJ.write("-- For multi-byte pixel types, get the data's endianness.");
IJ.write("Ext.getDimensionOrder(dimOrder)");
IJ.write("-- Gets a five-character string representing the dimensional");
IJ.write("-- rasterization order within the dataset. Valid orders are:");
IJ.write("-- XYCTZ, XYCZT, XYTCZ, XYTZC, XYZCT, XYZTC");
IJ.write("-- In cases where the channels are interleaved (e.g., CXYTZ),");
IJ.write("-- C will be the first dimension after X and Y (e.g., XYCTZ)");
IJ.write("-- and the isInterleaved function will return true.");
IJ.write("Ext.isOrderCertain(orderCertain)");
IJ.write("-- Gets whether the dimension order and sizes are known,");
IJ.write("-- or merely guesses.");
IJ.write("Ext.isInterleaved(interleaved)");
IJ.write("-- Gets whether or not the channels are interleaved.");
IJ.write("-- This function exists because X and Y must appear first");
IJ.write("-- in the dimension order. For interleaved data, XYCTZ or");
IJ.write("-- XYCZT is used, and this method returns true.");
IJ.write("Ext.isInterleavedSubC(subC, interleaved)");
IJ.write("-- Gets whether the given 'sub' channel is interleaved.");
IJ.write("-- This method exists because some data with multiple");
IJ.write("-- rasterized sub-dimensions within C have one sub-dimension");
IJ.write("-- interleaved, and the other not -- e.g., the SDT reader");
IJ.write("-- handles spectral-lifetime data with interleaved lifetime");
IJ.write("-- bins and non-interleaved spectral channels.");
IJ.write("Ext.getIndex(z, c, t, index)");
IJ.write("-- Gets the rasterized index corresponding to the given");
IJ.write("-- Z, C and T coordinates, according to the dataset's");
IJ.write("-- dimension order.");
IJ.write("Ext.getZCTCoords(index, z, c, t)");
IJ.write("-- Gets the Z, C and T coordinates corresponding to the given");
IJ.write("-- rasterized index value, according to the dataset's");
IJ.write("-- dimension order.");
IJ.write("Ext.getMetadataValue(field, value)");
IJ.write("-- Obtains the specified metadata field's value.");
IJ.write("Ext.getSeriesName(seriesName)");
IJ.write("-- Obtains the name of the current series.");
IJ.write("");
IJ.write("For more information, see the online Javadocs for the");
IJ.write("loci.formats.IFormatReader interface, available at:");
IJ.write("http:
}
}
}
|
/**
* @Title: RegexUtil.java
* @Package me.pc.mobile.helper.util
* @Description: TODO
* @author SilentKnight || happychinapc[at]gmail[dot]com
* @date 2014 2014124 4:45:45
* @version V1.0.0
*/
package me.pc.mobile.helper.v14.util;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @ClassName: RegexUtil
* @Description: TODO
* @author SilentKnight || happychinapc@gmail.com
* @date 2014124 4:45:45
*
*/
public final class RegexUtil {
private RegexUtil() {
}
public static final String EMAIL_REGEX = "^\\s*\\w+(?:\\.{0,1}[\\w-]+)*@[a-zA-Z0-9]+(?:[-.][a-zA-Z0-9]+)*\\.[a-zA-Z]+\\s*$";
public static final String MOBILE_NUM_REGEX = "^(\\+86)?((13[0-9])|(15[^4,\\D])|(18[^4,\\D])|(170))\\d{8}$";
public static final String QQ_REGEX = "^[1-9][0-9]{4,9}$";
public static final String PASSWORD_REGEX = "^[1-9a-zA-X]{6,20}$";
/**
* @Title: isPasswordValid
* @Description: verify a valid password.
* @param email
* @return boolean
* @throws null
*/
public static boolean isPasswordValid(String password) {
return password.matches(PASSWORD_REGEX);
}
/**
* @Title: isEmailValid
* @Description: verify a valid email address
* @param email
* @return boolean
* @throws null
*/
public static boolean isEmailValid(String email) {
boolean flag = false;
flag = email.matches(EMAIL_REGEX);
return flag;
}
/**
* @Title: isMobileNumValid
* @Description: verify a valid mobile number
* @param mobileNum
* @return boolean
* @throws null
*/
public static boolean isMobileNumValid(String mobileNum) {
boolean flag = false;
flag = mobileNum.matches(MOBILE_NUM_REGEX);
return flag;
}
/**
* @Title: isQQNumValid
* @Description: TODO
* @param qq
* @return boolean
* @throws
*/
public static boolean isQQNumValid(String qq) {
boolean flag = false;
flag = qq.matches(QQ_REGEX);
return flag;
}
/**
* @Title: compile
* @Description: TODO
* @param @param source
* @param @param regex
* @param @param newValue
* @param @return
* @return String
*/
public static String compile(String source, String regex, String newValue) {
Pattern patterns = Pattern.compile(regex);
Matcher matcher = patterns.matcher(source);
if (matcher.find()) {
for (int i = 0; i < matcher.groupCount(); i++) {
String group = matcher.group(i);
source.replace(group, newValue);
}
}
return source;
}
}
|
package org.dbflute.erflute.editor.controller.command.diagram_contents.element.node;
import java.util.ArrayList;
import java.util.List;
import org.dbflute.erflute.editor.VirtualDiagramEditor;
import org.dbflute.erflute.editor.controller.command.AbstractCommand;
import org.dbflute.erflute.editor.model.ERModelUtil;
import org.dbflute.erflute.editor.model.diagram_contents.element.node.ermodel.ERVirtualDiagram;
import org.dbflute.erflute.editor.model.diagram_contents.element.node.table.ERTable;
import org.dbflute.erflute.editor.model.diagram_contents.element.node.table.ERVirtualTable;
import org.dbflute.erflute.editor.view.dialog.dbexport.ErrorDialog;
import org.eclipse.draw2d.FigureCanvas;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.widgets.Display;
import org.eclipse.ui.PlatformUI;
public class PlaceTableCommand extends AbstractCommand {
private ERTable orgTable;
private List<ERTable> orgTables;
private ERVirtualTable virtualTable;
private List<ERVirtualTable> virtualTables;
public PlaceTableCommand(ERTable orgTable) {
this.orgTable = orgTable;
}
public PlaceTableCommand(List orgTables) {
this.orgTables = orgTables;
}
@Override
protected void doExecute() {
if (orgTables != null) {
final VirtualDiagramEditor modelEditor = (VirtualDiagramEditor) orgTables.get(0).getDiagram().getEditor().getActiveEditor();
final Point cursorLocation = Display.getCurrent().getCursorLocation();
final Point point = modelEditor.getGraphicalViewer().getControl().toControl(cursorLocation);
final FigureCanvas canvas = (FigureCanvas) modelEditor.getGraphicalViewer().getControl();
point.x += canvas.getHorizontalBar().getSelection();
point.y += canvas.getVerticalBar().getSelection();
virtualTables = new ArrayList<>();
for (final ERTable curTable : orgTables) {
boolean cantPlace = false;
for (final ERVirtualTable vtable : modelEditor.getVirtualDiagram().getVirtualTables()) {
if (vtable.getRawTable().equals(curTable)) {
cantPlace = true;
}
}
if (cantPlace)
continue;
final ERVirtualDiagram model = curTable.getDiagram().getCurrentVirtualDiagram();
virtualTable = new ERVirtualTable(model, curTable);
virtualTable.setPoint(point.x, point.y);
model.addTable(virtualTable);
virtualTables.add(virtualTable);
point.x += 32;
point.y += 32;
}
ERModelUtil.refreshDiagram(modelEditor.getDiagram());
} else {
final ERTable curTable = orgTable;
final VirtualDiagramEditor modelEditor = (VirtualDiagramEditor) curTable.getDiagram().getEditor().getActiveEditor();
for (final ERVirtualTable vtable : modelEditor.getVirtualDiagram().getVirtualTables()) {
if (vtable.getRawTable().equals(curTable)) {
final ErrorDialog dialog = new ErrorDialog(PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(),
"");
dialog.open();
return;
}
}
final Point cursorLocation = Display.getCurrent().getCursorLocation();
final Point point = modelEditor.getGraphicalViewer().getControl().toControl(cursorLocation);
final FigureCanvas canvas = (FigureCanvas) modelEditor.getGraphicalViewer().getControl();
point.x += canvas.getHorizontalBar().getSelection();
point.y += canvas.getVerticalBar().getSelection();
final ERVirtualDiagram model = curTable.getDiagram().getCurrentVirtualDiagram();
virtualTable = new ERVirtualTable(model, curTable);
virtualTable.setPoint(point.x, point.y);
model.addTable(virtualTable);
ERModelUtil.refreshDiagram(modelEditor.getDiagram());
}
}
@Override
protected void doUndo() {
if (orgTables != null) {
final ERVirtualDiagram model = orgTables.get(0).getDiagram().getCurrentVirtualDiagram();
for (final ERVirtualTable vtable : virtualTables) {
model.remove(vtable);
}
final VirtualDiagramEditor modelEditor = (VirtualDiagramEditor) orgTables.get(0).getDiagram().getEditor().getActiveEditor();
modelEditor.setContents(model);
} else {
final ERVirtualDiagram model = orgTable.getDiagram().getCurrentVirtualDiagram();
model.remove(virtualTable);
final VirtualDiagramEditor modelEditor = (VirtualDiagramEditor) orgTable.getDiagram().getEditor().getActiveEditor();
modelEditor.setContents(model);
}
}
}
|
package org.helioviewer.jhv.plugins.eveplugin.lines.model;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.swing.Timer;
import org.helioviewer.jhv.base.Range;
import org.helioviewer.jhv.base.interval.Interval;
import org.helioviewer.jhv.base.logging.Log;
import org.helioviewer.jhv.plugins.eveplugin.draw.DrawController;
import org.helioviewer.jhv.plugins.eveplugin.draw.PlotAreaSpace;
import org.helioviewer.jhv.plugins.eveplugin.draw.PlotAreaSpaceListener;
import org.helioviewer.jhv.plugins.eveplugin.draw.TimingListener;
import org.helioviewer.jhv.plugins.eveplugin.draw.ValueSpaceListener;
import org.helioviewer.jhv.plugins.eveplugin.draw.YAxisElement;
import org.helioviewer.jhv.plugins.eveplugin.draw.YAxisElement.YAxisLocation;
import org.helioviewer.jhv.plugins.eveplugin.lines.data.Band;
import org.helioviewer.jhv.plugins.eveplugin.lines.data.BandColors;
import org.helioviewer.jhv.plugins.eveplugin.lines.data.DownloadController;
import org.helioviewer.jhv.plugins.eveplugin.lines.data.EVECacheController;
import org.helioviewer.jhv.plugins.eveplugin.lines.data.EVECacheControllerListener;
import org.helioviewer.jhv.plugins.eveplugin.lines.data.EVEValues;
import org.helioviewer.jhv.plugins.eveplugin.lines.gui.EVEDrawableElement;
import org.helioviewer.jhv.plugins.eveplugin.settings.BandType;
import org.helioviewer.jhv.plugins.eveplugin.view.linedataselector.LineDataSelectorModel;
public class EVEDrawController implements TimingListener, EVECacheControllerListener, PlotAreaSpaceListener, ValueSpaceListener {
private final Map<YAxisElement, Map<Band, EVEValues>> dataMapPerUnitLabel = new HashMap<YAxisElement, Map<Band, EVEValues>>();
private final DrawController drawController;
private final Set<BandType> bandTypes;
private final Map<YAxisElement, EVEDrawableElement> eveDrawableElementMap;
private final Map<Band, YAxisElement> yAxisElementMap;
private final Map<YAxisElement, List<Band>> bandsPerYAxis;
private final PlotAreaSpace plotAreaSpace;
private static EVEDrawController instance;
private final Timer selectedIntervalChangedTimer;
private boolean selectedIntervalChanged;
private boolean keepFullValueRange;
private final LineDataSelectorModel selectorModel;
private EVEDrawController() {
DrawController.getSingletonInstance().addTimingListener(this);
EVECacheController.getSingletonInstance().addControllerListener(this);
selectorModel = LineDataSelectorModel.getSingletonInstance();
drawController = DrawController.getSingletonInstance();
eveDrawableElementMap = new HashMap<YAxisElement, EVEDrawableElement>();
bandTypes = new HashSet<BandType>();
yAxisElementMap = new HashMap<Band, YAxisElement>();
bandsPerYAxis = new HashMap<YAxisElement, List<Band>>();
plotAreaSpace = PlotAreaSpace.getSingletonInstance();
plotAreaSpace.addPlotAreaSpaceListener(this);
selectedIntervalChanged = false;
selectedIntervalChangedTimer = new Timer(300, new SelectedIntervalTimerTask());
selectedIntervalChangedTimer.start();
}
public static EVEDrawController getSingletonInstance() {
if (instance == null) {
instance = new EVEDrawController();
}
return instance;
}
private void addToMap(final Band band) {
Interval<Date> interval = drawController.getSelectedInterval();
Rectangle plotArea = drawController.getPlotArea();
YAxisElement yAxisElement = drawController.getYAxisElementForUnit(band.getUnitLabel());
if (yAxisElement == null && drawController.hasAxisAvailable()) {
yAxisElement = new YAxisElement();
yAxisElement.addValueSpaceListener(this);
}
if (yAxisElement != null) {
plotAreaSpace.addValueSpace(yAxisElement);
yAxisElementMap.put(band, yAxisElement);
addToBandsPerYAxis(yAxisElement, band);
EVEValues data = retrieveData(band, interval, plotArea);
if (!dataMapPerUnitLabel.containsKey(yAxisElement)) {
dataMapPerUnitLabel.put(yAxisElement, new HashMap<Band, EVEValues>());
}
if (data != null) {
dataMapPerUnitLabel.get(yAxisElement).put(band, data);
}
} else {
Log.debug("band could not be added. No Yaxis Available ");
}
fireRedrawRequest(true);
}
private void addToBandsPerYAxis(YAxisElement yAxisElement, Band band) {
List<Band> bands = new ArrayList<Band>();
if (bandsPerYAxis.containsKey(yAxisElement)) {
bands = bandsPerYAxis.get(yAxisElement);
}
bands.add(band);
bandsPerYAxis.put(yAxisElement, bands);
}
private void removeFromMap(final Band band) {
YAxisElement yAxisElement = yAxisElementMap.get(band);
if (dataMapPerUnitLabel.containsKey(yAxisElement)) {
if (dataMapPerUnitLabel.get(yAxisElement).containsKey(band)) {
dataMapPerUnitLabel.get(yAxisElement).remove(band);
List<Band> bands = bandsPerYAxis.get(yAxisElement);
bands.remove(band);
if (bands.isEmpty()) {
EVEDrawableElement removed = eveDrawableElementMap.remove(yAxisElement);
yAxisElementMap.remove(band);
bandsPerYAxis.remove(yAxisElement);
plotAreaSpace.removeValueSpace(yAxisElement);
drawController.removeDrawableElement(removed);
}
resetAvailableRange();
fireRedrawRequest(true);
}
}
}
private void updateBand(final Band band) {
Interval<Date> interval = drawController.getSelectedInterval();
Rectangle plotArea = drawController.getPlotArea();
EVEValues data = retrieveData(band, interval, plotArea);
YAxisElement yAxisElement = yAxisElementMap.get(band);
if (!dataMapPerUnitLabel.containsKey(yAxisElement)) {
dataMapPerUnitLabel.put(yAxisElement, new HashMap<Band, EVEValues>());
}
Range newAvailableRange = new Range(yAxisElement.getAvailableRange());
for (EVEValues v : dataMapPerUnitLabel.get(yAxisElement).values()) {
if (v != null) {
newAvailableRange.setMin(v.getMinimumValue());
newAvailableRange.setMax(v.getMaximumValue());
}
}
newAvailableRange.setMin(data.getMinimumValue());
newAvailableRange.setMax(data.getMaximumValue());
double avMin = newAvailableRange.min;
double avMax = newAvailableRange.max;
if (avMin == avMax) {
if (avMin == 0) {
yAxisElement.getAvailableRange().setMin(-1.0);
yAxisElement.getAvailableRange().setMax(1.0);
} else {
yAxisElement.getAvailableRange().setMin(avMin - avMin / 10);
yAxisElement.getAvailableRange().setMax(avMax + avMax / 10);
}
}
yAxisElement.setAvailableRange(newAvailableRange);
dataMapPerUnitLabel.get(yAxisElement).put(band, data);
}
private void updateBands() {
for (Map<Band, EVEValues> value : dataMapPerUnitLabel.values()) {
for (final Band band : value.keySet()) {
updateBand(band);
}
}
}
public void setSelectedRangeMaximal() {
fireRedrawRequest(true);
}
private void fireRedrawRequest(final boolean maxRange) {
Interval<Date> interval = drawController.getSelectedInterval();
for (Map.Entry<YAxisElement, Map<Band, EVEValues>> entry : dataMapPerUnitLabel.entrySet()) {
YAxisElement yAxisElement = entry.getKey();
Map<Band, EVEValues> bandMap = entry.getValue();
final Band[] bands = bandMap.keySet().toArray(new Band[0]);
String unitLabel = "";
boolean isLog = false;
if (bands.length > 0) {
unitLabel = bands[0].getUnitLabel();
isLog = bands[0].getBandType().isLogScale();
}
if (!eveDrawableElementMap.containsKey(yAxisElement)) {
eveDrawableElementMap.put(yAxisElement, new EVEDrawableElement());
}
Range newAvailableRange = new Range();
for (EVEValues v : bandMap.values()) {
newAvailableRange.setMin(v.getMinimumValue());
newAvailableRange.setMax(v.getMaximumValue());
}
if (newAvailableRange.max == newAvailableRange.min) {
newAvailableRange.setMin(newAvailableRange.min - newAvailableRange.min / 10);
newAvailableRange.setMax(newAvailableRange.max + newAvailableRange.max / 10);
}
yAxisElement.setAvailableRange(new Range(newAvailableRange));
if (maxRange) {
yAxisElement.setSelectedRange(new Range(newAvailableRange));
}
yAxisElement.set(unitLabel, isLog);
EVEDrawableElement eveDrawableElement = eveDrawableElementMap.get(yAxisElement);
eveDrawableElement.set(interval, bands, yAxisElement);
if (bands.length > 0) {
drawController.updateDrawableElement(eveDrawableElement, false);
} else {
drawController.removeDrawableElement(eveDrawableElement);
}
}
drawController.fireRedrawRequest();
}
private final EVEValues retrieveData(final Band band, final Interval<Date> interval, Rectangle plotArea) {
return EVECacheController.getSingletonInstance().downloadData(band, interval, plotArea);
}
// Zoom Controller Listener
@Override
public void availableIntervalChanged() {
}
@Override
public void selectedIntervalChanged(boolean keepFullValueRange) {
this.keepFullValueRange = keepFullValueRange;
selectedIntervalChanged = true;
}
// Band Controller Listener
public void bandAdded(final BandType bandType) {
if (!bandTypes.contains(bandType)) {
bandTypes.add(bandType);
Band band = new Band(bandType);
band.setDataColor(BandColors.getNextColor());
DownloadController.getSingletonInstance().updateBand(band, DrawController.getSingletonInstance().getAvailableInterval(), DrawController.getSingletonInstance().getSelectedInterval());
addToMap(band);
selectorModel.addLineData(band);
}
}
public void bandUpdated(final Band band) {
if (band.isVisible()) {
addToMap(band);
} else {
removeFromMap(band);
}
}
public void bandRemoved(final Band band) {
bandTypes.remove(band.getBandType());
DownloadController.getSingletonInstance().stopDownloads(band);
removeFromMap(band);
fixAxis();
selectorModel.removeLineData(band);
}
// EVE Cache Controller Listener
private void fixAxis() {
boolean hadLeftAxis = false;
List<Band> rightAxisBands = null;
for (Entry<YAxisElement, List<Band>> yEntry : bandsPerYAxis.entrySet()) {
if (drawController.getYAxisLocation(yEntry.getKey()) == YAxisLocation.LEFT) {
hadLeftAxis = true;
} else {
rightAxisBands = yEntry.getValue();
}
}
if (!hadLeftAxis && rightAxisBands != null) {
for (Band b : rightAxisBands) {
if (canChangeAxis(b)) {
changeAxis(b);
}
}
}
}
@Override
public void dataAdded(final Band band) {
selectedIntervalChanged = true;
}
@Override
public void plotAreaSpaceChanged(double scaledMinTime, double scaledMaxTime, double scaledSelectedMinTime, double scaledSelectedMaxTime, boolean forced) {
fireRedrawRequest(false);
}
@Override
public void availablePlotAreaSpaceChanged(double oldMinTime, double oldMaxTime, double newMinTime, double newMaxTime) {
}
public EVEValues getValues(Band band, Interval<Date> interval, Rectangle graphArea) {
return dataMapPerUnitLabel.get(yAxisElementMap.get(band)).get(band);
}
public void bandColorChanged(Band band) {
fireRedrawRequest(false);
}
public boolean hasDataInSelectedInterval(Band band) {
return EVECacheController.getSingletonInstance().hasDataInSelectedInterval(band, DrawController.getSingletonInstance().getSelectedInterval());
}
public void changeAxis(Band band) {
YAxisElement currentYAxisElement = yAxisElementMap.get(band);
if (((bandsPerYAxis.size() == 1 && bandsPerYAxis.get(currentYAxisElement).size() > 1) || bandsPerYAxis.size() == 2) && drawController.canChangeAxis(band.getUnitLabel())) {
YAxisElement otherYAxisElement = getOtherAxisElement(currentYAxisElement);
if (otherYAxisElement != null) {
plotAreaSpace.addValueSpace(otherYAxisElement);
yAxisElementMap.put(band, otherYAxisElement);
List<Band> bandsPerList = new ArrayList<Band>();
if (bandsPerYAxis.containsKey(otherYAxisElement)) {
bandsPerList = bandsPerYAxis.get(otherYAxisElement);
}
bandsPerList.add(band);
bandsPerYAxis.put(otherYAxisElement, bandsPerList);
bandsPerYAxis.get(currentYAxisElement).remove(band);
Map<Band, EVEValues> valuesPerBand = new HashMap<Band, EVEValues>();
if (!dataMapPerUnitLabel.containsKey(otherYAxisElement)) {
dataMapPerUnitLabel.put(otherYAxisElement, valuesPerBand);
}
dataMapPerUnitLabel.get(otherYAxisElement).put(band, dataMapPerUnitLabel.get(currentYAxisElement).get(band));
dataMapPerUnitLabel.get(currentYAxisElement).remove(band);
if (!eveDrawableElementMap.containsKey(otherYAxisElement)) {
eveDrawableElementMap.put(otherYAxisElement, new EVEDrawableElement());
}
resetAvailableRange();
updateBand(band);
fireRedrawRequest(true);
}
}
}
private void resetAvailableRange() {
for (YAxisElement yAxisElement : dataMapPerUnitLabel.keySet()) {
yAxisElement.reset();
}
}
private YAxisElement getOtherAxisElement(YAxisElement currentYAxisElement) {
if (drawController.canChangeAxis(currentYAxisElement.getOriginalLabel())) {
Set<YAxisElement> allYAxisElements = bandsPerYAxis.keySet();
if (allYAxisElements.size() == 2) {
for (YAxisElement el : allYAxisElements) {
if (!el.equals(currentYAxisElement)) {
return el;
}
}
}
YAxisElement other = new YAxisElement();
other.addValueSpaceListener(this);
return other;
}
return null;
}
public boolean canChangeAxis(Band band) {
return DrawController.getSingletonInstance().canChangeAxis(band.getUnitLabel()) && yAxisElementMap.size() > 1 && (drawController.getYAxisLocation(yAxisElementMap.get(band)) == YAxisElement.YAxisLocation.RIGHT || (drawController.getYAxisLocation(yAxisElementMap.get(band)) == YAxisElement.YAxisLocation.LEFT && bandsPerYAxis.get(yAxisElementMap.get(band)).size() > 1));
}
public int getAxisLocation(Band band) {
return drawController.getYAxisLocation(yAxisElementMap.get(band)) == YAxisElement.YAxisLocation.LEFT ? 0 : 1;
}
@Override
public void valueSpaceChanged(Range availableRange, Range selectedRange) {
fireRedrawRequest(false);
}
private class SelectedIntervalTimerTask implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
if (selectedIntervalChanged) {
selectedIntervalChanged = false;
updateBands();
fireRedrawRequest(keepFullValueRange);
}
}
}
public Set<Band> getAllBands() {
return yAxisElementMap.keySet();
}
public boolean containsBandType(BandType value) {
return bandTypes.contains(value);
}
}
|
package be.peopleware.persistence_II.junit.hibernate;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import junit.framework.TestCase;
import net.sf.hibernate.Criteria;
import net.sf.hibernate.HibernateException;
import net.sf.hibernate.ObjectNotFoundException;
import net.sf.hibernate.Query;
import net.sf.hibernate.Session;
import net.sf.hibernate.SessionFactory;
import net.sf.hibernate.Transaction;
import net.sf.hibernate.cfg.Configuration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import be.peopleware.persistence_II.PersistentBean;
/**
* A simple helper class for hibernate actions within jUnit tests.
*
* @author David Van Keer
* @author Peopleware n.v.
* @todo (nsmeets) Copied from WoundPilot.
*/
public abstract class AbstractHibernateTest extends TestCase {
/*<section name="Meta Information">*/
/** {@value} */
public static final String CVS_REVISION = "$Revision$"; //$NON-NLS-1$
/** {@value} */
public static final String CVS_DATE = "$Date$"; //$NON-NLS-1$
/** {@value} */
public static final String CVS_STATE = "$State$"; //$NON-NLS-1$
/** {@value} */
public static final String CVS_TAG = "$Name$"; //$NON-NLS-1$
/*</section>*/
private static final Log _LOG = LogFactory.getLog(AbstractHibernateTest.class);
private static SessionFactory $sessionFactory;
private static final String JUNIT_CONFIG_FILE_LOCATION =
"/hibernate_junit.cfg.xml";
static {
_LOG.debug("reading Hibernate config from " + JUNIT_CONFIG_FILE_LOCATION);
Configuration configuration = new Configuration();
try {
configuration.configure(JUNIT_CONFIG_FILE_LOCATION);
$sessionFactory = configuration.buildSessionFactory();
_LOG.debug("Hibernate config read ok.");
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Hibernate configuration is invalid.");
}
}
public void openSession() {
try {
$session = $sessionFactory.openSession();
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Couldn't open a new hibernate session.");
}
}
public void closeSession() {
try {
$session.close();
$session = null;
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to close the hibernate session.");
}
}
public void beginTransaction() {
try {
$tx = $session.beginTransaction();
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Couldn't start a hibernate transaction.");
}
}
public void commitTransaction() {
try {
$tx.commit();
$tx = null;
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to commit the hibernate transaction.");
}
}
public void rollbackTransaction() {
try {
$tx.rollback();
$tx = null;
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to cancel the hibernate transaction.");
}
}
public Long create(final Object object) {
try {
$session.save(object);
if (object instanceof PersistentBean) {
return ((PersistentBean)object).getId();
}
else {
return null;
}
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to create the object in the database.");
}
return null;
}
public void update(final Object object) {
try {
$session.update(object);
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to update the object in the database.");
}
}
public void delete(final Object object) {
try {
$session.delete(object);
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to delete the object to the database.");
}
}
public Object retrieve(final Class clazz, final Long id) {
Object result = null;
try {
result = $session.load(clazz, id);
}
catch (ObjectNotFoundException onfExc) {
return null;
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to retrieve the object from the database.");
}
return result;
}
public Set retrieve(final Class persistentObjectType) {
Criteria crit = $session.createCriteria(persistentObjectType);
return retrieve(crit);
}
public Set retrieve(final Criteria criteria) {
Set results = new HashSet();
try {
results.addAll(criteria.list());
}
catch (HibernateException hExc) {
hExc.printStackTrace();
fail("Failed to retrieve objects from database");
}
return results;
}
public List retrieve(String HqlQueryString) {
List roles = null;
try {
Query q = getSession().createQuery(HqlQueryString);
roles = q.list();
}
catch (HibernateException e) {
assert false : "HibernateExceptionshould not happen: " + e;
}
return roles;
}
public Session getSession() {
return $session;
}
private Session $session;
public Transaction getTransaction() {
return $tx;
}
private Transaction $tx;
}
|
package org.wildfly.extension.undertow.security.jaspi;
import io.undertow.security.api.AuthenticatedSessionManager;
import io.undertow.security.api.AuthenticationMechanism;
import io.undertow.security.api.SecurityContext;
import io.undertow.security.idm.Account;
import io.undertow.server.ConduitWrapper;
import io.undertow.server.HttpServerExchange;
import io.undertow.servlet.handlers.ServletRequestContext;
import io.undertow.util.AttachmentKey;
import io.undertow.util.ConduitFactory;
import org.jboss.security.SecurityConstants;
import org.jboss.security.auth.callback.JBossCallbackHandler;
import org.jboss.security.auth.message.GenericMessageInfo;
import org.jboss.security.identity.plugins.SimpleRole;
import org.jboss.security.identity.plugins.SimpleRoleGroup;
import org.jboss.security.plugins.auth.JASPIServerAuthenticationManager;
import org.wildfly.extension.undertow.logging.UndertowLogger;
import org.wildfly.extension.undertow.security.AccountImpl;
import javax.security.auth.Subject;
import javax.security.auth.message.AuthException;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.security.Principal;
import java.util.HashSet;
import java.util.Set;
import org.jboss.security.auth.callback.JASPICallbackHandler;
import org.jboss.security.identity.Role;
import org.jboss.security.identity.RoleGroup;
import org.wildfly.extension.undertow.security.UndertowSecurityAttachments;
import org.xnio.conduits.StreamSinkConduit;
/**
* <p>
* {@link AuthenticationMechanism} implementation that enables JASPI-based authentication.
* </p>
*
* @author Pedro Igor
* @author <a href="mailto:sguilhen@redhat.com">Stefan Guilhen</a>
*/
public class JASPIAuthenticationMechanism implements AuthenticationMechanism {
private static final String JASPI_HTTP_SERVLET_LAYER = "HttpServlet";
private static final String MECHANISM_NAME = "JASPIC";
private static final String JASPI_AUTH_TYPE = "javax.servlet.http.authType";
private static final String JASPI_REGISTER_SESSION = "javax.servlet.http.registerSession";
public static final AttachmentKey<HttpServerExchange> HTTP_SERVER_EXCHANGE_ATTACHMENT_KEY = AttachmentKey.create(HttpServerExchange.class);
public static final AttachmentKey<SecurityContext> SECURITY_CONTEXT_ATTACHMENT_KEY = AttachmentKey.create(SecurityContext.class);
private static final AttachmentKey<Boolean> ALREADY_WRAPPED = AttachmentKey.create(Boolean.class);
private final String securityDomain;
private final String configuredAuthMethod;
public JASPIAuthenticationMechanism(final String securityDomain, final String configuredAuthMethod) {
this.securityDomain = securityDomain;
this.configuredAuthMethod = configuredAuthMethod;
}
@Override
public AuthenticationMechanismOutcome authenticate(final HttpServerExchange exchange, final SecurityContext sc) {
final ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
final JASPIServerAuthenticationManager sam = createJASPIAuthenticationManager();
final GenericMessageInfo messageInfo = createMessageInfo(exchange, sc);
final String applicationIdentifier = buildApplicationIdentifier(requestContext);
final JASPICallbackHandler cbh = new JASPICallbackHandler();
UndertowLogger.ROOT_LOGGER.debugf("validateRequest for layer [%s] and applicationContextIdentifier [%s]", JASPI_HTTP_SERVLET_LAYER, applicationIdentifier);
Account cachedAccount = null;
final JASPICSecurityContext jaspicSecurityContext = (JASPICSecurityContext) exchange.getSecurityContext();
final AuthenticatedSessionManager sessionManager = exchange.getAttachment(AuthenticatedSessionManager.ATTACHMENT_KEY);
if (sessionManager != null) {
AuthenticatedSessionManager.AuthenticatedSession authSession = sessionManager.lookupSession(exchange);
cachedAccount = authSession.getAccount();
// if there is a cached account we set it in the security context so that the principal is available to
// SAM modules via request.getUserPrincipal().
if (cachedAccount != null) {
jaspicSecurityContext.setCachedAuthenticatedAccount(cachedAccount);
}
}
AuthenticationMechanismOutcome outcome = AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
Account authenticatedAccount = null;
boolean isValid = sam.isValid(messageInfo, new Subject(), JASPI_HTTP_SERVLET_LAYER, applicationIdentifier, cbh);
jaspicSecurityContext.setCachedAuthenticatedAccount(null);
if (isValid) {
// The CBH filled in the JBOSS SecurityContext, we need to create an Undertow account based on that
org.jboss.security.SecurityContext jbossSct = SecurityActions.getSecurityContext();
authenticatedAccount = createAccount(cachedAccount, jbossSct);
}
// authType resolution (check message info first, then check for the configured auth method, then use mech-specific name).
String authType = (String) messageInfo.getMap().get(JASPI_AUTH_TYPE);
if (authType == null)
authType = this.configuredAuthMethod != null ? this.configuredAuthMethod : MECHANISM_NAME;
if (isValid && authenticatedAccount != null) {
outcome = AuthenticationMechanismOutcome.AUTHENTICATED;
Object registerObj = messageInfo.getMap().get(JASPI_REGISTER_SESSION);
boolean cache = false;
if(registerObj != null && (registerObj instanceof String)) {
cache = Boolean.valueOf((String)registerObj);
}
sc.authenticationComplete(authenticatedAccount, authType, cache);
} else if (isValid && authenticatedAccount == null && !isMandatory(requestContext)) {
outcome = AuthenticationMechanismOutcome.NOT_ATTEMPTED;
} else {
outcome = AuthenticationMechanismOutcome.NOT_AUTHENTICATED;
sc.authenticationFailed("JASPIC authentication failed.", authType);
}
// A SAM can wrap the HTTP request/response objects - update the servlet request context with the values found in the message info.
ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
servletRequestContext.setServletRequest((HttpServletRequest) messageInfo.getRequestMessage());
servletRequestContext.setServletResponse((HttpServletResponse) messageInfo.getResponseMessage());
secureResponse(exchange, sam, messageInfo, cbh);
return outcome;
}
@Override
public ChallengeResult sendChallenge(final HttpServerExchange exchange, final SecurityContext securityContext) {
return new ChallengeResult(true);
}
private boolean wasAuthExceptionThrown(HttpServerExchange exchange) {
return exchange.getAttachment(UndertowSecurityAttachments.SECURITY_CONTEXT_ATTACHMENT).getData().get(AuthException.class.getName()) != null;
}
private JASPIServerAuthenticationManager createJASPIAuthenticationManager() {
return new JASPIServerAuthenticationManager(this.securityDomain, new JBossCallbackHandler());
}
private String buildApplicationIdentifier(final ServletRequestContext attachment) {
ServletRequest servletRequest = attachment.getServletRequest();
return servletRequest.getServletContext().getVirtualServerName() + " " + servletRequest.getServletContext().getContextPath();
}
private GenericMessageInfo createMessageInfo(final HttpServerExchange exchange, final SecurityContext securityContext) {
ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
GenericMessageInfo messageInfo = new GenericMessageInfo();
messageInfo.setRequestMessage(servletRequestContext.getServletRequest());
messageInfo.setResponseMessage(servletRequestContext.getServletResponse());
messageInfo.getMap().put("javax.security.auth.message.MessagePolicy.isMandatory", isMandatory(servletRequestContext).toString());
// additional context data, useful to provide access to Undertow resources during the modules processing
messageInfo.getMap().put(SECURITY_CONTEXT_ATTACHMENT_KEY, securityContext);
messageInfo.getMap().put(HTTP_SERVER_EXCHANGE_ATTACHMENT_KEY, exchange);
return messageInfo;
}
private Account createAccount(final Account cachedAccount, final org.jboss.security.SecurityContext jbossSct) {
if (jbossSct == null) {
throw UndertowLogger.ROOT_LOGGER.nullParamter("org.jboss.security.SecurityContext");
}
// null principal: SAM has opted out of the authentication process.
Principal userPrincipal = jbossSct.getUtil().getUserPrincipal();
if (userPrincipal == null) {
return null;
}
// SAM handled the same principal found in the cached account: indicates we must use the cached account.
if (cachedAccount != null && cachedAccount.getPrincipal() == userPrincipal) {
// populate the security context using the cached account data.
jbossSct.getUtil().createSubjectInfo(userPrincipal, ((AccountImpl) cachedAccount).getCredential(), null);
RoleGroup roleGroup = new SimpleRoleGroup(SecurityConstants.ROLES_IDENTIFIER);
for (String role : cachedAccount.getRoles())
roleGroup.addRole(new SimpleRole(role));
jbossSct.getUtil().setRoles(roleGroup);
return cachedAccount;
}
// SAM handled a different principal or there is no cached account: build a new account.
Set<String> stringRoles = new HashSet<String>();
RoleGroup roleGroup = jbossSct.getUtil().getRoles();
if (roleGroup != null) {
for (Role role : roleGroup.getRoles()) {
stringRoles.add(role.getRoleName());
}
}
Object credential = jbossSct.getUtil().getCredential();
Principal original = null;
if(cachedAccount != null) {
original = cachedAccount.getPrincipal();
}
return new AccountImpl(userPrincipal, stringRoles, credential, original);
}
private void secureResponse(final HttpServerExchange exchange, final JASPIServerAuthenticationManager sam, final GenericMessageInfo messageInfo, final JASPICallbackHandler cbh) {
if(exchange.getAttachment(ALREADY_WRAPPED) != null || exchange.isResponseStarted()) {
return;
}
exchange.putAttachment(ALREADY_WRAPPED, true);
// we add a response wrapper to properly invoke the secureResponse, after processing the destination
exchange.addResponseWrapper(new ConduitWrapper<StreamSinkConduit>() {
@Override
public StreamSinkConduit wrap(final ConduitFactory<StreamSinkConduit> factory, final HttpServerExchange exchange) {
ServletRequestContext requestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
String applicationIdentifier = buildApplicationIdentifier(requestContext);
if (!wasAuthExceptionThrown(exchange)) {
UndertowLogger.ROOT_LOGGER.debugf("secureResponse for layer [%s] and applicationContextIdentifier [%s].", JASPI_HTTP_SERVLET_LAYER, applicationIdentifier);
sam.secureResponse(messageInfo, new Subject(), JASPI_HTTP_SERVLET_LAYER, applicationIdentifier, cbh);
// A SAM can unwrap the HTTP request/response objects - update the servlet request context with the values found in the message info.
ServletRequestContext servletRequestContext = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY);
servletRequestContext.setServletRequest((HttpServletRequest) messageInfo.getRequestMessage());
servletRequestContext.setServletResponse((HttpServletResponse) messageInfo.getResponseMessage());
}
return factory.create();
}
});
}
/**
* <p>The authentication is mandatory if the servlet has http constraints (eg.: {@link
* javax.servlet.annotation.HttpConstraint}).</p>
*
* @param attachment
* @return
*/
private Boolean isMandatory(final ServletRequestContext attachment) {
return attachment.getExchange().getSecurityContext() != null && attachment.getExchange().getSecurityContext().isAuthenticationRequired();
}
}
|
package org.devgateway.ocds.web.rest.controller;
import com.mongodb.DBObject;
import io.swagger.annotations.ApiOperation;
import java.util.List;
import javax.validation.Valid;
import org.devgateway.ocds.persistence.mongo.constants.MongoConstants;
import org.devgateway.ocds.web.rest.controller.request.YearFilterPagingRequest;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.domain.Sort;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import org.springframework.data.mongodb.core.aggregation.Fields;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.group;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.limit;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.match;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.newAggregation;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.project;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.skip;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.sort;
import static org.springframework.data.mongodb.core.aggregation.Aggregation.unwind;
import static org.springframework.data.mongodb.core.query.Criteria.where;
/**
* @author mpostelnicu
*/
@RestController
@CacheConfig(keyGenerator = "genericPagingRequestKeyGenerator", cacheNames = "genericPagingRequestJson")
@Cacheable
public class CorruptionRiskDashboardTablesController extends GenericOCDSController {
@ApiOperation(value = "Returns data to show in the table on corruption risk overview page."
+ "This is presented as releases only with the information in the table present and unwinded by "
+ "flags.flaggedStats")
@RequestMapping(value = "/api/corruptionRiskOverviewTable",
method = {RequestMethod.POST, RequestMethod.GET},
produces = "application/json")
public List<DBObject> corruptionRiskOverviewTable(
@ModelAttribute @Valid final YearFilterPagingRequest filter) {
Aggregation agg = newAggregation(
match(where("flags.flaggedStats.0").exists(true)
.andOperator(getYearDefaultFilterCriteria(filter,
MongoConstants.FieldNames.TENDER_PERIOD_START_DATE))),
unwind("flags.flaggedStats"),
project("ocid", "tender.procuringEntity.name", "tender.tenderPeriod", "flags",
"tender.title", "tag")
.and("tender.value").as("tender.value").and("awards.value").as("awards.value")
.and("awards.status").as("awards.status")
.andExclude(Fields.UNDERSCORE_ID),
sort(Sort.Direction.DESC, "flags.flaggedStats.count"),
skip(filter.getSkip()),
limit(filter.getPageSize())
);
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, "release",
DBObject.class);
List<DBObject> list = results.getMappedResults();
return list;
}
@ApiOperation(value = "Counts data to show in the table on corruption risk overview page.")
@RequestMapping(value = "/api/corruptionRiskOverviewTable/count",
method = {RequestMethod.POST, RequestMethod.GET},
produces = "application/json")
public List<DBObject> corruptionRiskOverviewTableCount(
@ModelAttribute @Valid final YearFilterPagingRequest filter) {
Aggregation agg = newAggregation(
match(where("flags.flaggedStats.0").exists(true)
.andOperator(getYearDefaultFilterCriteria(filter,
MongoConstants.FieldNames.TENDER_PERIOD_START_DATE))),
unwind("flags.flaggedStats"),
group().count().as("count")
);
AggregationResults<DBObject> results = mongoTemplate.aggregate(agg, "release",
DBObject.class);
List<DBObject> list = results.getMappedResults();
return list;
}
}
|
package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.action.UnavailableShardsException;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsResponse;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.LongConsumer;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
public class ShardFollowNodeTaskRandomTests extends ESTestCase {
public void testSingleReaderWriter() throws Exception {
TestRun testRun = createTestRun(randomNonNegativeLong(), randomNonNegativeLong(), randomIntBetween(1, 2048));
ShardFollowNodeTask task = createShardFollowTask(1, testRun);
startAndAssertAndStopTask(task, testRun);
}
public void testMultipleReaderWriter() throws Exception {
int concurrency = randomIntBetween(2, 8);
TestRun testRun = createTestRun(0, 0, between(1, 1024));
ShardFollowNodeTask task = createShardFollowTask(concurrency, testRun);
startAndAssertAndStopTask(task, testRun);
}
private void startAndAssertAndStopTask(ShardFollowNodeTask task, TestRun testRun) throws Exception {
task.start(testRun.startSeqNo - 1, testRun.startSeqNo - 1, testRun.startSeqNo - 1, testRun.startSeqNo - 1);
assertBusy(() -> {
ShardFollowNodeTask.Status status = task.getStatus();
assertThat(status.leaderGlobalCheckpoint(), equalTo(testRun.finalExpectedGlobalCheckpoint));
assertThat(status.followerGlobalCheckpoint(), equalTo(testRun.finalExpectedGlobalCheckpoint));
final long numberOfFailedFetches =
testRun.responses.values().stream().flatMap(List::stream).filter(f -> f.exception != null).count();
assertThat(status.numberOfFailedFetches(), equalTo(numberOfFailedFetches));
// the failures were able to be retried so fetch failures should have cleared
assertThat(status.fetchExceptions().entrySet(), hasSize(0));
assertThat(status.mappingVersion(), equalTo(testRun.finalMappingVersion));
});
task.markAsCompleted();
assertBusy(() -> {
ShardFollowNodeTask.Status status = task.getStatus();
assertThat(status.numberOfConcurrentReads(), equalTo(0));
assertThat(status.numberOfConcurrentWrites(), equalTo(0));
});
}
private ShardFollowNodeTask createShardFollowTask(int concurrency, TestRun testRun) {
AtomicBoolean stopped = new AtomicBoolean(false);
ShardFollowTask params = new ShardFollowTask(null, new ShardId("follow_index", "", 0),
new ShardId("leader_index", "", 0), testRun.maxOperationCount, concurrency,
ShardFollowNodeTask.DEFAULT_MAX_BATCH_SIZE_IN_BYTES, concurrency, 10240,
TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), Collections.emptyMap());
ThreadPool threadPool = new TestThreadPool(getClass().getSimpleName());
BiConsumer<TimeValue, Runnable> scheduler = (delay, task) -> {
assert delay.millis() < 100 : "The delay should be kept to a minimum, so that this test does not take to long to run";
if (stopped.get() == false) {
threadPool.schedule(delay, ThreadPool.Names.GENERIC, task);
}
};
List<Translog.Operation> receivedOperations = Collections.synchronizedList(new ArrayList<>());
LocalCheckpointTracker tracker = new LocalCheckpointTracker(testRun.startSeqNo - 1, testRun.startSeqNo - 1);
return new ShardFollowNodeTask(
1L, "type", ShardFollowTask.NAME, "description", null, Collections.emptyMap(), params, scheduler, System::nanoTime) {
private volatile long mappingVersion = 0L;
private final Map<Long, Integer> fromToSlot = new HashMap<>();
@Override
protected void innerUpdateMapping(LongConsumer handler, Consumer<Exception> errorHandler) {
handler.accept(mappingVersion);
}
@Override
protected void innerSendBulkShardOperationsRequest(
List<Translog.Operation> operations,
Consumer<BulkShardOperationsResponse> handler,
Consumer<Exception> errorHandler) {
for(Translog.Operation op : operations) {
tracker.markSeqNoAsCompleted(op.seqNo());
}
receivedOperations.addAll(operations);
// Emulate network thread and avoid SO:
final BulkShardOperationsResponse response = new BulkShardOperationsResponse();
response.setGlobalCheckpoint(tracker.getCheckpoint());
response.setMaxSeqNo(tracker.getMaxSeqNo());
threadPool.generic().execute(() -> handler.accept(response));
}
@Override
protected void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer<ShardChangesAction.Response> handler,
Consumer<Exception> errorHandler) {
// Emulate network thread and avoid SO:
Runnable task = () -> {
List<TestResponse> items = testRun.responses.get(from);
if (items != null) {
final TestResponse testResponse;
synchronized (fromToSlot) {
int slot;
if (fromToSlot.get(from) == null) {
slot = fromToSlot.getOrDefault(from, 0);
fromToSlot.put(from, slot);
} else {
slot = fromToSlot.get(from);
}
testResponse = items.get(slot);
fromToSlot.put(from, ++slot);
// if too many invocations occur with the same from then AOBE occurs, this ok and then something is wrong.
}
mappingVersion = testResponse.mappingVersion;
if (testResponse.exception != null) {
errorHandler.accept(testResponse.exception);
} else {
handler.accept(testResponse.response);
}
} else {
assert from >= testRun.finalExpectedGlobalCheckpoint;
final long globalCheckpoint = tracker.getCheckpoint();
final long maxSeqNo = tracker.getMaxSeqNo();
handler.accept(new ShardChangesAction.Response(0L,globalCheckpoint, maxSeqNo, new Translog.Operation[0]));
}
};
threadPool.generic().execute(task);
}
@Override
protected boolean isStopped() {
return stopped.get();
}
@Override
public void markAsCompleted() {
stopped.set(true);
tearDown();
}
@Override
public void markAsFailed(Exception e) {
stopped.set(true);
tearDown();
}
private void tearDown() {
threadPool.shutdown();
List<Translog.Operation> expectedOperations = testRun.responses.values().stream()
.flatMap(List::stream)
.map(testResponse -> testResponse.response)
.filter(Objects::nonNull)
.flatMap(response -> Arrays.stream(response.getOperations()))
.sorted(Comparator.comparingLong(Translog.Operation::seqNo))
.collect(Collectors.toList());
assertThat(receivedOperations.size(), equalTo(expectedOperations.size()));
receivedOperations.sort(Comparator.comparingLong(Translog.Operation::seqNo));
for (int i = 0; i < receivedOperations.size(); i++) {
Translog.Operation actual = receivedOperations.get(i);
Translog.Operation expected = expectedOperations.get(i);
assertThat(actual, equalTo(expected));
}
}
};
}
private static TestRun createTestRun(long startSeqNo, long startMappingVersion, int maxOperationCount) {
long prevGlobalCheckpoint = startSeqNo;
long mappingVersion = startMappingVersion;
int numResponses = randomIntBetween(16, 256);
Map<Long, List<TestResponse>> responses = new HashMap<>(numResponses);
for (int i = 0; i < numResponses; i++) {
long nextGlobalCheckPoint = prevGlobalCheckpoint + maxOperationCount;
if (sometimes()) {
mappingVersion++;
}
if (sometimes()) {
List<TestResponse> item = new ArrayList<>();
// Sometimes add a random retryable error
if (sometimes()) {
Exception error = new UnavailableShardsException(new ShardId("test", "test", 0), "");
item.add(new TestResponse(error, mappingVersion, null));
}
List<Translog.Operation> ops = new ArrayList<>();
for (long seqNo = prevGlobalCheckpoint; seqNo <= nextGlobalCheckPoint; seqNo++) {
String id = UUIDs.randomBase64UUID();
byte[] source = "{}".getBytes(StandardCharsets.UTF_8);
ops.add(new Translog.Index("doc", id, seqNo, 0, source));
}
item.add(new TestResponse(null, mappingVersion,
new ShardChangesAction.Response(mappingVersion, nextGlobalCheckPoint, nextGlobalCheckPoint, ops.toArray(EMPTY))));
responses.put(prevGlobalCheckpoint, item);
} else {
// Simulates a leader shard copy not having all the operations the shard follow task thinks it has by
// splitting up a response into multiple responses AND simulates maxBatchSizeInBytes limit being reached:
long toSeqNo;
for (long fromSeqNo = prevGlobalCheckpoint; fromSeqNo <= nextGlobalCheckPoint; fromSeqNo = toSeqNo + 1) {
toSeqNo = randomLongBetween(fromSeqNo, nextGlobalCheckPoint);
List<TestResponse> item = new ArrayList<>();
// Sometimes add a random retryable error
if (sometimes()) {
Exception error = new UnavailableShardsException(new ShardId("test", "test", 0), "");
item.add(new TestResponse(error, mappingVersion, null));
}
// Sometimes add an empty shard changes response to also simulate a leader shard lagging behind
if (sometimes()) {
ShardChangesAction.Response response =
new ShardChangesAction.Response(mappingVersion, prevGlobalCheckpoint, prevGlobalCheckpoint, EMPTY);
item.add(new TestResponse(null, mappingVersion, response));
}
List<Translog.Operation> ops = new ArrayList<>();
for (long seqNo = fromSeqNo; seqNo <= toSeqNo; seqNo++) {
String id = UUIDs.randomBase64UUID();
byte[] source = "{}".getBytes(StandardCharsets.UTF_8);
ops.add(new Translog.Index("doc", id, seqNo, 0, source));
}
// Report toSeqNo to simulate maxBatchSizeInBytes limit being met or last op to simulate a shard lagging behind:
long localLeaderGCP = randomBoolean() ? ops.get(ops.size() - 1).seqNo() : toSeqNo;
ShardChangesAction.Response response =
new ShardChangesAction.Response(mappingVersion, localLeaderGCP, localLeaderGCP, ops.toArray(EMPTY));
item.add(new TestResponse(null, mappingVersion, response));
responses.put(fromSeqNo, Collections.unmodifiableList(item));
}
}
prevGlobalCheckpoint = nextGlobalCheckPoint + 1;
}
return new TestRun(maxOperationCount, startSeqNo, startMappingVersion, mappingVersion,
prevGlobalCheckpoint - 1, responses);
}
// Instead of rarely(), which returns true very rarely especially not running in nightly mode or a multiplier have not been set
private static boolean sometimes() {
return randomIntBetween(0, 10) == 5;
}
private static class TestRun {
final int maxOperationCount;
final long startSeqNo;
final long startMappingVersion;
final long finalMappingVersion;
final long finalExpectedGlobalCheckpoint;
final Map<Long, List<TestResponse>> responses;
private TestRun(int maxOperationCount, long startSeqNo, long startMappingVersion, long finalMappingVersion,
long finalExpectedGlobalCheckpoint, Map<Long, List<TestResponse>> responses) {
this.maxOperationCount = maxOperationCount;
this.startSeqNo = startSeqNo;
this.startMappingVersion = startMappingVersion;
this.finalMappingVersion = finalMappingVersion;
this.finalExpectedGlobalCheckpoint = finalExpectedGlobalCheckpoint;
this.responses = Collections.unmodifiableMap(responses);
}
}
private static class TestResponse {
final Exception exception;
final long mappingVersion;
final ShardChangesAction.Response response;
private TestResponse(Exception exception, long mappingVersion, ShardChangesAction.Response response) {
this.exception = exception;
this.mappingVersion = mappingVersion;
this.response = response;
}
}
private static final Translog.Operation[] EMPTY = new Translog.Operation[0];
}
|
package org.ensembl.healthcheck.testcase.funcgen;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Species;
import org.ensembl.healthcheck.Team;
import org.ensembl.healthcheck.testcase.Priority;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
/**
* Checks the *_stable_id tables to ensure they are populated, have no orphan
* references, and have valid versions. Also prints some examples from the table
* for checking by eye.
*
* <p>
* Group is <b>check_stable_ids </b>
* </p>
*
* <p>
* To be run after the stable ids have been assigned.
* </p>
*/
public class FuncgenStableID extends SingleDatabaseTestCase {
/**
* Create a new instance of StableID.
*/
public FuncgenStableID() {
addToGroup("id_mapping");
addToGroup("post_regulatorybuild");
addToGroup("release");
addToGroup("funcgen");//do we need this group and the funcgen-release group?
addToGroup("funcgen-release");
//setHintLongRunning(true);// ?Only take about 10 mins for mouse
setTeamResponsible(Team.FUNCGEN);
setDescription("Checks regulatory_feature stable_id fields are valid.");
setPriority(Priority.RED);
setEffect("RegulatoryFeatures will not have valid stable IDs.");
setFix("Re-run stable ID mapping or fix manually.");
}
/**
* This only applies to core and Vega databases.
*/
public void types() {
removeAppliesToType(DatabaseType.OTHERFEATURES);
removeAppliesToType(DatabaseType.CDNA);
removeAppliesToType(DatabaseType.CORE);
removeAppliesToType(DatabaseType.VARIATION);
removeAppliesToType(DatabaseType.COMPARA);
//add COMPARA here?
}
/**
* Run the test.
*
* @param dbre
* The database to use.
* @return true if the test passed.
*
*/
public boolean run(DatabaseRegistryEntry dbre) {
//boolean result = true;
Connection con = dbre.getConnection();
//result &= checkStableIDs(con, "regulatory_feature");
//No need for any of this prefix stuff as we only store ints
//Keep just in case of future 'imports'
// there are several species where ID mapping is not done
//Species s = dbre.getSpecies();
//if (s != Species.CAENORHABDITIS_ELEGANS && s != Species.DROSOPHILA_MELANOGASTER &&
// s != Species.SACCHAROMYCES_CEREVISIAE && s != Species.ANOPHELES_GAMBIAE) {
// result &= checkPrefixes(dbre);
// result &= checkStableIDEventTypes(con);
//result = checkStableIDTimestamps(con);
//return result;
//Just merge this with checkStableIDs?
return checkStableIDs(con, "regulatory_feature");
}
/**
* Checks that the typeName_stable_id table is valid. The table is valid if it
* has >0 rows, and there are no orphan references between typeName table and
* typeName_stable_id. Also prints some example data from the
* typeName_stable_id table via ReportManager.info().
*
* @param con
* connection to run queries on.
* @param typeName
* name of the type to check, e.g. "exon"
* @return true if the table and references are valid, otherwise false.
*/
public boolean checkStableIDs(Connection con, String typeName) {
boolean result = true;
String stableIDtable = typeName; // + "_stable_id";
//WARNING THis is not truly generic as we always check for regulatory type in feature_set
//Need to change this if we ever want to check other stable IDs
//Need to do this for each DISPLAYABLE regulatory feature_set
//String[] fsetIDs = getColumnValues(conn, "SELECT feature_set_id from feature_set where type='regulatory'");
//This would be better with a ResultSet as we want the nametoo
try {
Statement stmt = con.createStatement();
String sql = "SELECT fs.feature_set_id, fs.name from feature_set fs, status s, status_name sn " +
"where fs.type='regulatory' and fs.feature_set_id=s.table_id and s.table_name='feature_set' " +
"and s.status_name_id=sn.status_name_id and sn.name='DISPLAYABLE'";
//System.out.println("Executing " + sql);
ResultSet fsetIdNames = stmt.executeQuery(sql);
while (fsetIdNames != null && fsetIdNames.next()) {
sql = "SELECT count(stable_id) from regulatory_feature where " +
"stable_id is not NULL and feature_set_id=" + fsetIdNames.getString(1);
System.out.println("Executing " + sql);
int nStableIDs = getRowCount(con, sql);
//We could really do with a HC to check we have RegFeats on all main Chrs
if (nStableIDs < 1) {
ReportManager.problem(this, con, stableIDtable +
" contains no valid stable_ids for FeatureSet:\t" + fsetIdNames.getString(2));
result = false;
}
else{
nStableIDs = getRowCount(con, "SELECT count(stable_id) from regulatory_feature where stable_id " +
"is NULL and feature_set_id=" + fsetIdNames.getString(1));
if (nStableIDs > 0) {
ReportManager.problem(this, con, stableIDtable + " contains " + nStableIDs +
" NULL stable_ids for FeatureSet:\t" + fsetIdNames.getString(2));
result = false;
}
else{
ReportManager.correct(this, con, "No NULL stable_ids for FeatureSet " + fsetIdNames.getString(2));
}
}
//Test for duplicates within set
//Could remove this if there is a unique feature_set_id, stble_id key?
int duplicates = getRowCount(con, "SELECT COUNT(stable_id)-COUNT(DISTINCT stable_id) FROM " +
stableIDtable + " WHERE feature_set_id=" + fsetIdNames.getString(1));
if (duplicates > 0) {
ReportManager.problem(this, con, stableIDtable + " has " + duplicates +
" duplicate stable IDs for FeatureSet:\t" + fsetIdNames.getString(2));
result = false;
} else {
ReportManager.correct(this, con, "No duplicate stable IDs for FeatureSet:\t" + fsetIdNames.getString(2));
}
// check for invalid or missing stable ID versions
//int nInvalidVersions = getRowCount(con, "SELECT COUNT(*) AS " + typeName + "_with_invalid_version" + " FROM " + stableIDtable
// + " WHERE version < 1 OR version IS NULL;");
//if (nInvalidVersions > 0) {
// ReportManager.problem(this, con, "Invalid " + typeName + " versions in " + stableIDtable);
// DBUtils.printRows(this, con, "SELECT DISTINCT(version) FROM " + stableIDtable);
// result = false;
// make sure stable ID versions in the typeName_stable_id table matches those in stable_id_event
// for the latest mapping_session
//String mappingSessionId = getRowColumnValue(con, "SELECT mapping_session_id FROM mapping_session " +
//"ORDER BY created DESC LIMIT 1");
//if (mappingSessionId.equals("")) {
// ReportManager.info(this, con, "No mapping_session found");
// return result;
//int nVersionMismatch = getRowCount(con, "SELECT COUNT(*) FROM stable_id_event sie, " + stableIDtable +
// " si WHERE sie.mapping_session_id = " + Integer.parseInt(mappingSessionId) +
// " AND sie.new_stable_id = si.stable_id AND sie.new_version <> si.version");
//if (nVersionMismatch > 0) {
// ReportManager.problem(this, con, "Version mismatch between " + nVersionMismatch + " " + typeName + " versions in " +
// stableIDtable + " and stable_id_event");
// DBUtils.printRows(this, con, "SELECT si.stable_id FROM stable_id_event sie, " + stableIDtable +
// " si WHERE sie.mapping_session_id = " + Integer.parseInt(mappingSessionId) +
// " AND sie.new_stable_id = si.stable_id AND sie.new_version <> si.version");
// result = false;
}
}catch (SQLException e){
e.printStackTrace();
}
return result;
}
/**
* Check that all stable IDs in the table have the correct prefix. The prefix
* is defined in Species.java
*/
private boolean checkPrefixes(DatabaseRegistryEntry dbre) {
boolean result = true;
Connection con = dbre.getConnection();
Map tableToLetter = new HashMap();
tableToLetter.put("gene", "G");
tableToLetter.put("transcript", "T");
tableToLetter.put("translation", "P");
tableToLetter.put("exon", "E");
Iterator it = tableToLetter.keySet().iterator();
while (it.hasNext()) {
String type = (String) it.next();
String table = type + "_stable_id";
String prefix = Species.getStableIDPrefixForSpecies(dbre.getSpecies(), dbre.getType());
if (prefix == null || prefix == "") {
ReportManager.problem(this, con, "Can't get stable ID prefix for " + dbre.getSpecies().toString() + " - please add to Species.java");
result = false;
} else {
if (prefix.equalsIgnoreCase("IGNORE")) {
return true;
}
String prefixLetter = prefix + (String) tableToLetter.get(type);
int wrong = getRowCount(con, "SELECT COUNT(*) FROM " + table + " WHERE stable_id NOT LIKE '" + prefixLetter + "%'");
if (wrong > 0) {
ReportManager.problem(this, con, wrong + " rows in " + table + " do not have the correct (" + prefixLetter + ") prefix");
result = false;
} else {
ReportManager.correct(this, con, "All rows in " + table + " have the correct prefix (" + prefixLetter + ")");
}
}
}
return result;
}
/**
* Check for any stable ID events where the 'type' column does not match the
* identifier type.
*
*/
// private boolean checkStableIDEventTypes(Connection con) {
// boolean result = true;
// String[] types = { "gene", "transcript", "translation" };
// for (int i = 0; i < types.length; i++) {
// String type = types[i];
// String prefix = getPrefixForType(con, type);
// String sql = "SELECT COUNT(*) FROM stable_id_event WHERE (old_stable_id LIKE '" + prefix + "%' OR new_stable_id LIKE '"
// + prefix + "%') AND type != '" + type + "'";
// int rows = getRowCount(con, sql);
// if (rows > 0) {
// ReportManager.problem(this, con, rows + " rows of type " + type + " (prefix " + prefix
// + ") in stable_id_event have identifiers that do not correspond to " + type + "s");
// result = false;
// } else {
// ReportManager.correct(this, con, "All types in stable_id_event correspond to identifiers");
// return result;
// private String getPrefixForType(Connection con, String type) {
// String prefix = "";
// // hope the first row of the _type_stable_id table is correct
// String stableID = getRowColumnValue(con, "SELECT stable_id FROM " + type + "_stable_id LIMIT 1");
// prefix = stableID.replaceAll("[0-9]", "");
// if (prefix.equals("")) {
// System.err.println("Error, can't get prefix for " + type + " from stable ID " + stableID);
// return prefix;
// private boolean checkStableIDTimestamps(Connection con) {
// boolean result = true;
// String[] types = { "gene", "transcript", "translation" };
// for (int i = 0; i < types.length; i++) {
// String table = types[i] + "_stable_id";
// String sql = "SELECT COUNT(*) FROM " + table + " WHERE created_date=0 OR modified_date=0";
// int rows = getRowCount(con, sql);
// if (rows > 0) {
// ReportManager.problem(this, con, rows + " rows in " + table + " have created or modified dates of 0000-00-00 00:00:00");
// result = false;
// } else {
// ReportManager.correct(this, con, "All entries in " + table + " have valid created/modified timestamps");
// return result;
}
|
package org.ensembl.healthcheck.testcase.generic;
import java.sql.Connection;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
/**
* An EnsEMBL Healthcheck test case that looks for broken foreign-key relationships.
*/
public class CoreForeignKeys extends SingleDatabaseTestCase {
/**
* Create an OrphanTestCase that applies to a specific set of databases.
*/
public CoreForeignKeys() {
addToGroup("post_genebuild");
addToGroup("release");
addToGroup("compara-ancestral");
addToGroup("id_mapping");
setDescription("Check for broken foreign-key relationships.");
}
/**
* Look for broken foreign key relationships.
*
* @param dbre
* The database to use.
* @return true if all foreign key relationships are valid.
*/
public boolean run(DatabaseRegistryEntry dbre) {
boolean result = true;
Connection con = dbre.getConnection();
// Check stable IDs all correspond to an existing object
String[] stableIDtypes = { "gene", "transcript", "translation", "exon" };
for (String stableIDType : stableIDtypes) {
// exception for gene-gene_stable_id relations in otherfeatures databases as some non-genes are stored in the gene table
if (dbre.getType() == DatabaseType.OTHERFEATURES) {
if (stableIDType.equals("gene")) {
result &= checkForOrphansWithConstraint(con, "gene", "gene_id", "gene_stable_id", "gene_id", "biotype NOT IN ('est','cdna')");
}
} else {
result &= checkStableIDKeys(con, stableIDType);
}
}
result &= checkForOrphans(con, "exon", "exon_id", "exon_transcript", "exon_id", false);
result &= checkForOrphans(con, "transcript", "transcript_id", "exon_transcript", "transcript_id", false);
result &= checkForOrphans(con, "gene", "gene_id", "transcript", "gene_id", false);
result &= checkForOrphans(con, "object_xref", "xref_id", "xref", "xref_id", true);
result &= checkForOrphans(con, "xref", "external_db_id", "external_db", "external_db_id", true);
result &= checkForOrphans(con, "dna", "seq_region_id", "seq_region", "seq_region_id", true);
result &= checkForOrphans(con, "seq_region", "coord_system_id", "coord_system", "coord_system_id", true);
result &= checkForOrphans(con, "assembly", "cmp_seq_region_id", "seq_region", "seq_region_id", true);
result &= checkForOrphans(con, "marker_feature", "marker_id", "marker", "marker_id", true);
result &= checkForOrphans(con, "seq_region_attrib", "seq_region_id", "seq_region", "seq_region_id", true);
result &= checkForOrphans(con, "seq_region_attrib", "attrib_type_id", "attrib_type", "attrib_type_id", true);
result &= checkForOrphans(con, "misc_feature_misc_set", "misc_feature_id", "misc_feature", "misc_feature_id", true);
result &= checkForOrphans(con, "misc_feature_misc_set", "misc_set_id", "misc_set", "misc_set_id", true);
// for a sanger_vega db, ignore misc_featres whcih have no annotation
if (dbre.getType() == DatabaseType.SANGER_VEGA) {
result &= checkForOrphansWithConstraint(con, "misc_feature", "misc_feature_id", "misc_attrib", "misc_feature_id",
"misc_feature_id NOT IN (select mfms.misc_feature_id from misc_feature_misc_set as mfms join misc_set as ms on mfms.misc_set_id=ms.misc_set_id and ms.code='noAnnotation')");
} else {
result &= checkForOrphans(con, "misc_feature", "misc_feature_id", "misc_attrib", "misc_feature_id", true);
}
result &= checkForOrphans(con, "misc_attrib", "attrib_type_id", "attrib_type", "attrib_type_id", true);
result &= checkForOrphans(con, "assembly_exception", "seq_region_id", "seq_region", "seq_region_id", true);
result &= checkForOrphans(con, "assembly_exception", "exc_seq_region_id", "seq_region", "seq_region_id", true);
result &= checkForOrphans(con, "protein_feature", "translation_id", "translation", "translation_id", true);
result &= checkForOrphans(con, "marker_synonym", "marker_id", "marker", "marker_id", true);
result &= checkForOrphans(con, "translation_attrib", "translation_id", "translation", "translation_id", true);
result &= checkForOrphans(con, "transcript_attrib", "transcript_id", "transcript", "transcript_id", true);
/*
* // now redundant (done for all tables with analysis_id) result &= checkForOrphans(con, "analysis_id", "analysis",
* "analysis_id", true); result &= checkForOrphans(con, "transcript", "analysis_id", "analysis", "analysis_id", true);
*/
result &= checkForOrphans(con, "external_synonym", "xref_id", "xref", "xref_id", true);
result &= checkForOrphans(con, "identity_xref", "object_xref_id", "object_xref", "object_xref_id", true);
result &= checkForOrphans(con, "supporting_feature", "exon_id", "exon", "exon_id", true);
result &= checkForOrphans(con, "translation", "transcript_id", "transcript", "transcript_id", true);
result &= checkForOrphans(con, "ontology_xref", "object_xref_id", "object_xref", "object_xref_id", true);
// stable ID archive
result &= checkForOrphansWithConstraint(con, "gene_archive", "peptide_archive_id", "peptide_archive", "peptide_archive_id", "peptide_archive_id != 0");
result &= checkForOrphans(con, "peptide_archive", "peptide_archive_id", "gene_archive", "peptide_archive_id", true);
result &= checkForOrphans(con, "stable_id_event", "mapping_session_id", "mapping_session", "mapping_session_id", false);
result &= checkForOrphans(con, "gene_archive", "mapping_session_id", "mapping_session", "mapping_session_id", true);
// Check object xrefs point to existing objects
String[] types = { "Gene", "Transcript", "Translation" };
for (int i = 0; i < types.length; i++) {
result &= checkKeysByEnsemblObjectType(con, "object_xref", types[i]);
}
// Ensure that feature tables reference existing seq_regions
String[] featTabs = getCoreFeatureTables();
for (int i = 0; i < featTabs.length; i++) {
String featTab = featTabs[i];
// skip large tables as this test takes an inordinately long time
// if (featTab.equals("protein_align_feature") || featTab.equals("dna_align_feature") || featTab.equals("repeat_feature")) {
// continue;
result &= checkForOrphans(con, featTab, "seq_region_id", "seq_region", "seq_region_id", true);
}
result &= checkForOrphans(con, "analysis_description", "analysis_id", "analysis", "analysis_id", true);
result &= checkForOrphans(con, "gene_attrib", "gene_id", "gene", "gene_id", true);
result &= checkForOrphans(con, "gene_attrib", "attrib_type_id", "attrib_type", "attrib_type_id", true);
result &= checkForOrphans(con, "transcript_attrib", "attrib_type_id", "attrib_type", "attrib_type_id", true);
result &= checkForOrphans(con, "translation_attrib", "attrib_type_id", "attrib_type", "attrib_type_id", true);
result &= checkForOrphans(con, "translation", "end_exon_id", "exon", "exon_id", true);
result &= checkForOrphans(con, "translation", "start_exon_id", "exon", "exon_id", true);
result &= checkForOrphans(con, "alt_allele", "gene_id", "gene", "gene_id", true);
result &= checkForOrphans(con, "marker_map_location", "map_id", "map", "map_id", true);
result &= checkForOrphans(con, "marker_map_location", "marker_id", "marker", "marker_id", true);
result &= checkForOrphans(con, "marker_map_location", "marker_synonym_id", "marker_synonym", "marker_synonym_id", true);
result &= checkForOrphans(con, "qtl_feature", "qtl_id", "qtl", "qtl_id", true);
result &= checkForOrphans(con, "qtl_synonym", "qtl_id", "qtl", "qtl_id", true);
result &= checkForOrphans(con, "assembly", "asm_seq_region_id", "seq_region", "seq_region_id", true);
result &= checkForOrphans(con, "unmapped_object", "unmapped_reason_id", "unmapped_reason", "unmapped_reason_id", true);
result &= checkForOrphans(con, "unmapped_object", "analysis_id", "analysis", "analysis_id", true);
result &= checkForOrphansWithConstraint(con, "supporting_feature", "feature_id", "dna_align_feature", "dna_align_feature_id", "feature_type = 'dna_align_feature'");
result &= checkForOrphansWithConstraint(con, "supporting_feature", "feature_id", "protein_align_feature", "protein_align_feature_id", "feature_type = 'protein_align_feature'");
result &= checkForOrphansWithConstraint(con, "transcript_supporting_feature", "feature_id", "dna_align_feature", "dna_align_feature_id", "feature_type = 'dna_align_feature'");
result &= checkForOrphansWithConstraint(con, "transcript_supporting_feature", "feature_id", "protein_align_feature", "protein_align_feature_id", "feature_type = 'protein_align_feature'");
result &= checkForOrphans(con, "density_feature", "density_type_id", "density_type", "density_type_id");
result &= checkForOrphans(con, "prediction_exon", "prediction_transcript_id", "prediction_transcript", "prediction_transcript_id");
// result &= checkForOrphans(con, "prediction_exon", "prediction_exon_id", "exon", "exon_id");
result &= checkForOrphans(con, "marker", "display_marker_synonym_id", "marker_synonym", "marker_synonym_id");
// optional relations
result &= checkOptionalRelation(con, "qtl", "flank_marker_id_1", "marker", "marker_id");
result &= checkOptionalRelation(con, "qtl", "flank_marker_id_2", "marker", "marker_id");
result &= checkOptionalRelation(con, "qtl", "peak_marker_id", "marker", "marker_id");
result &= checkOptionalRelation(con, "unmapped_object", "external_db_id", "external_db", "external_db_id");
/*
* don't test
*
* // too slow result &= checkForOrphans(con, "repeat_feature", "repeat_consensus_id", "repeat_consensus",
* "repeat_consensus_id");
*/
// Check tables which reference the analysis table
String[] analysisTabs = getCoreTablesWithAnalysisID();
for (int i = 0; i < analysisTabs.length; i++) {
String analysisTab = analysisTabs[i];
// skip large tables as this test takes an inordinately long time
if (analysisTab.equals("protein_align_feature") || analysisTab.equals("dna_align_feature") || analysisTab.equals("repeat_feature")) {
continue;
}
if (countOrphansWithConstraint(con, analysisTab, "analysis_id", "analysis", "analysis_id", "analysis_id IS NOT NULL") > 0) {
ReportManager.problem(this, con, "FAILED object_xref -> analysis using FK analysis_id relationships");
result = false;
}
}
// end new tests
// added by dr2: check the canonical_transcript_id column points to a right transcript_id that belongs to that
// gene and there are no null values in this column
result &= checkCanonicalTranscriptIDKey(con);
// added by dr2: check that the foreign key display_marker_synonym_id points to a synonym
// for the marker
result &= checkDisplayMarkerSynonymID(con);
// check for transcript_supporting_feature - transcript links, but transcripts from certain logic names are allowed to not have
// supporting features.
result &= checkTranscriptSupportingFeatures(con);
return result;
}
private boolean checkTranscriptSupportingFeatures(Connection con) {
boolean result = true;
// list of transcript analysis logic_names which are allowed to not have supporting features
String allowedNoSupporting = "('BGI_Augustus_geneset', 'BGI_Genewise_geneset', 'BGI_Genscan_geneset', 'zfish_RNASeq', 'gorilla_RNASeq', 'ccds_import', 'refseq_human_import',"
+ " 'Medaka_Genome_Project', 'oxford_FGU', 'MT_genbank_import', 'LRG_import', 'ncRNA', 'havana', 'havana_ig_gene')";
String sql = "SELECT COUNT(*) FROM transcript t LEFT JOIN transcript_supporting_feature tsf ON t.transcript_id = tsf.transcript_id JOIN analysis a ON a.analysis_id=t.analysis_id WHERE a.analysis_id=t.analysis_id and tsf.transcript_id IS NULL AND a.logic_name NOT IN "
+ allowedNoSupporting;
int rows = getRowCount(con, sql);
if (rows > 0) {
ReportManager.problem(this, con, rows + " transcripts which should have transcript_supporting_features do not have them\nUseful SQL: " + sql);
result = false;
} else {
ReportManager.correct(this, con, "All transcripts that require supporting features have them");
}
return result;
}
private boolean checkStableIDKeys(Connection con, String type) {
if (tableHasRows(con, type + "_stable_id")) {
return checkForOrphans(con, type, type + "_id", type + "_stable_id", type + "_id", false);
}
return true;
} // checkStableIDKeys
public boolean checkKeysByEnsemblObjectType(Connection con, String baseTable, String type) {
// Need to handle under scores in tables here
// e.g. ProbeFeature > probe_feature
String table = type.replaceAll("([a-z])([A-Z])", "$1_$2");
table = table.toLowerCase();
// Where is ensembl_object_id used?
String column = baseTable.equals("object_xref") ? "ensembl_id" : "ensembl_object_id";
return checkForOrphansWithConstraint(con, baseTable, column, table, table + "_id", "ensembl_object_type=\'" + type + "\'");
/**
* Is this not just checkForOrphansWithConstraint?
*
*
* //int rows = getRowCount(con, "SELECT COUNT(*) FROM " + baseTable + " x LEFT JOIN " + table + " ON x." + column + "=" + table
* + "." + table + // "_id WHERE x.ensembl_object_type=\'" + type + "\' AND " + table + "." + table + "_id IS NULL");
*
*
* if (rows > 0) {
*
* ReportManager.problem(this, con, rows + " rows in " + baseTable + " refer to non-existent " + table + "s"); return false;
*
* } else {
*
* ReportManager.correct(this, con, "All rows in " + baseTable + " refer to valid " + table + "s"); return true; }
**/
} // checkKeysByEnsemblObjectType
private boolean checkCanonicalTranscriptIDKey(Connection con) {
boolean result = true;
// check first if there are NULL values in the canonical_transcript_id column (there shouldn't, force
// by schema
result &= checkNoNulls(con, "gene", "canonical_transcript_id");
// check the canonical_transcript_id column contains right transcript_id
result &= checkForOrphans(con, "gene", "canonical_transcript_id", "transcript", "transcript_id", true);
// and finally check that all canonical_transcript_id belong to gene
int rows = getRowCount(con, "SELECT COUNT(*) FROM gene g, transcript t where g.canonical_transcript_id=" + "t.transcript_id and g.gene_id <> t.gene_id");
if (rows > 0) {
// problem, the canonical transcript does not belong to the gene
String useful_sql = "SELECT g.gene_id,g.canonical_transcript_id FROM gene g, transcript t where g.canonical_transcript_id=" + "t.transcript_id and g.gene_id <> t.gene_id";
ReportManager.problem(this, con, rows + " rows in gene have a canonical transcript it doesn't belong to the gene" + " Try '" + useful_sql + "' to find out the offending genes");
result = false;
}
return result;
} // checkCanonicalTranscriptIDKey
private boolean checkDisplayMarkerSynonymID(Connection con) {
boolean result = true;
// the foreign key has been checked before, but might not point to a marker_synonym
// of this markers=
int rows = getRowCount(con, "select count(*) from marker m where m.display_marker_synonym_id not in " + "(select ms.marker_synonym_id from marker_synonym ms where m.marker_id = ms.marker_id)");
if (rows > 0) {
// problem, there are markers that have display_marker_synonym_id that is not part of the
// synonyms for the marker
String useful_sql = "select m.marker_id, m.display_marker_synonym_id, ms1.marker_synonym_id, ms1.name from marker m, marker_synonym ms1 where m.marker_id = ms1.marker_id and m.display_marker_synonym_id not in (select ms.marker_synonym_id from marker_synonym ms where m.marker_id = ms.marker_id)";
ReportManager.problem(this, con, rows + " rows in marker table have a display_marker_synonym that is not part of the synonyms for this marker" + " Try '" + useful_sql
+ "' to find out the offending markers");
result = false;
}
return result;
} // checkDisplayMarkerSynonymID
} // CoreForeignKeys
|
// This file was generated by RobotBuilder. It contains sections of
// code that are automatically generated and assigned by robotbuilder.
// These sections will be updated in the future when you export to
// Java from RobotBuilder. Do not put any code or make any change in
// the blocks indicating autogenerated code or it will be lost on an
// update. Deleting the comments indicating the section will prevent
// it from being updated in th future.
package org.usfirst.frc330.Beachbot2013Java.commands;
import edu.wpi.first.wpilibj.command.AutoSpreadsheetCommand;
import edu.wpi.first.wpilibj.command.Command;
import org.usfirst.frc330.Beachbot2013Java.Robot;
public class TurnGyroRel extends TurnGyroAbs implements AutoSpreadsheetCommand {
public TurnGyroRel(double angle) {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
this(angle, 0, 15, false);
}
public TurnGyroRel(double angle, double tolerance)
{
this(angle, tolerance, 15, false);
}
public TurnGyroRel(double angle, double tolerance, double timeout, boolean stopAtEnd) {
super(angle,tolerance,timeout,stopAtEnd);
}
// Called just before this Command runs the first time
protected void initialize() {
super.initialize();
if (!Robot.chassis.getShiftState())
{
Robot.chassis.gyroPIDLow.setSetpoint(angle+Robot.chassis.getAngle());
Robot.chassis.gyroPIDLow.enable();
}
else
{
Robot.chassis.gyroPIDHigh.setSetpoint(angle+Robot.chassis.getAngle());
Robot.chassis.gyroPIDHigh.enable();
}
}
public Command copy() {
return new TurnGyroRel(0);
}
}
|
package org.yeastrc.xlink.www.web_utils;
import org.apache.log4j.Logger;
import org.yeastrc.xlink.www.constants.PeptideViewLinkTypesConstants;
/**
*
* Get Link types for searchers.
*
* Return null if all link types are selected, otherwise returns link types for use in searchers
*/
public class GetLinkTypesForSearchers {
private static final Logger log = Logger.getLogger(GetLinkTypesForSearchers.class);
private GetLinkTypesForSearchers() { }
public static GetLinkTypesForSearchers getInstance() { return new GetLinkTypesForSearchers(); }
/**
* @param linkTypes
* @return null if all link types are selected, otherwise returns link types for use in searchers
* @throws Exception
*/
public String[] getLinkTypesForSearchers ( String[] linkTypes ) throws Exception {
String[] resultLinkTypes = linkTypes;
// boolean allWebLinkTypesSelected = false;
boolean webLinkTypeSelected_CROSSLINK = false;
boolean webLinkTypeSelected_LOOPLINK = false;
boolean webLinkTypeSelected_UNLINKED = false;
for ( String linkType : linkTypes ) {
if ( PeptideViewLinkTypesConstants.CROSSLINK_PSM.equals( linkType ) ) {
webLinkTypeSelected_CROSSLINK = true;
} else if ( PeptideViewLinkTypesConstants.LOOPLINK_PSM.equals( linkType ) ) {
webLinkTypeSelected_LOOPLINK = true;
} else if ( PeptideViewLinkTypesConstants.UNLINKED_PSM.equals( linkType ) ) {
webLinkTypeSelected_UNLINKED = true;
} else {
String msg = "linkType is invalid, linkType: " + linkType;
log.error( linkType );
throw new Exception( msg );
}
}
if ( webLinkTypeSelected_CROSSLINK
&& webLinkTypeSelected_LOOPLINK
&& webLinkTypeSelected_UNLINKED ) {
// allWebLinkTypesSelected = true;
resultLinkTypes = null;
}
return resultLinkTypes;
}
}
|
package edu.cmu.pocketsphinx;
import static android.content.Context.MODE_PRIVATE;
import java.io.*;
import java.util.HashSet;
import java.util.Set;
import android.content.Context;
import android.content.res.AssetManager;
import android.util.Log;
import static android.os.Environment.getExternalStorageState;
/**
* Provides utility methods for copying asset files to external storage.
*
* @author Alexander Solovets
*/
public class Assets {
private static final String TAG = Assets.class.getSimpleName();
private static final String ASSET_LIST_NAME = "assets.lst";
/**
* Synchronizes asset files with the content on external storage. There
* must be special file {@value #ASSET_LIST_NAME} among the application
* assets containing relative paths of assets to synchronize. If the
* corresponding path does not exist on the external storage it is copied.
* If the path exists checksums are compared and the asset is copied only
* if there is a mismatch. Checksum is stored in a separate asset with the
* name that consists of the original name and a suffix that depends on the
* checksum algorithm (e.g. MD5). Checksum files are copied along with the
* corresponding asset files.
*
* @param Context application context
* @return path to the root of resources directory on external storage
* @throws IOException if an I/O error occurs or "assets.lst" is missing
*/
public static File syncAssets(Context context) throws IOException {
AssetManager assets = context.getAssets();
Reader reader = new InputStreamReader(assets.open(ASSET_LIST_NAME));
BufferedReader br = new BufferedReader(reader);
File appDir = getApplicationDir(context);
Set<String> assetPaths = new HashSet<String>();
String path;
while (null != (path = br.readLine())) {
File extFile = new File(appDir, path);
String md5Path = path + ".md5";
File extHash = new File(appDir, md5Path);
extFile.getParentFile().mkdirs();
assetPaths.add(extFile.getPath());
try {
// Read asset hash.
reader = new InputStreamReader(assets.open(md5Path));
String hash = new BufferedReader(reader).readLine();
// Read file hash and compare.
reader = new InputStreamReader(new FileInputStream(extHash));
if (hash.equals(new BufferedReader(reader).readLine())) {
Log.i(TAG, "skip " + path + ", checksums match");
continue;
}
} catch (IOException e) {
}
Log.i(TAG, "copy " + path + " to " + extFile);
copyStream(assets.open(path), new FileOutputStream(extFile));
InputStream hashStream = assets.open(md5Path);
copyStream(hashStream, new FileOutputStream(extHash));
}
removeUnusedAssets(new File(appDir, ASSET_LIST_NAME), assetPaths);
return appDir;
}
/**
* Copies application asset files to external storage. Recursively copies
* asset files to a directory located on external storage and unique for
* application. If a file already exists it will be overwritten.
*
* <p>In general this method should not be used to
* synchronize application resources and is only provided for compatibility
* with projects without "smart" asset setup. If you are looking for quick
* and "smart" synchronization that does not overwrite existing files use
* {@link #syncAssets(Context, String)}.
*
* @param context application context
* @param path relative path to asset file or directory
* @return path to the root of resources directory on external storage
*
* @see #syncAssets
*/
public static File copyAssets(Context context, String path)
throws IOException
{
File appDir = getApplicationDir(context);
File externalFile = new File(appDir, path);
AssetManager assets = context.getAssets();
String[] content = assets.list(path);
Set<String> assetPaths = new HashSet<String>();
if (content.length > 0) {
for (String item : content)
copyAssets(context, new File(path, item).getPath());
} else {
Log.i(TAG, "copy " + path + " to " + externalFile);
externalFile.getParentFile().mkdirs();
copyStream(assets.open(path), new FileOutputStream(externalFile));
assetPaths.add(externalFile.getPath());
}
removeUnusedAssets(new File(appDir, ASSET_LIST_NAME), assetPaths);
return externalFile;
}
private static void removeUnusedAssets(File assetsListFile,
Set<String> usedAssets)
throws IOException
{
try {
InputStream istream = new FileInputStream(assetsListFile);
Reader reader = new InputStreamReader(istream);
BufferedReader br = new BufferedReader(reader);
Set<String> unusedAssets = new HashSet<String>();
String line;
while (null != (line = br.readLine()))
unusedAssets.add(line);
unusedAssets.removeAll(usedAssets);
for (String path : unusedAssets) {
if (new File(path).delete()) // Skip missing files.
Log.i(TAG, "delete unused asset " + path);
new File(path + ".md5").delete();
}
istream.close();
} catch (FileNotFoundException e) {
Log.i(TAG, assetsListFile +
" does not exist, unused assets are not removed");
}
OutputStream ostream = new FileOutputStream(assetsListFile);
PrintStream ps = new PrintStream(ostream);
for (String path : usedAssets)
ps.println(path);
ps.close();
}
/**
* Returns external files directory for the application. Returns path to
* directory on external storage which is guaranteed to be unique for the
* running application.
*
* @param content application context
* @return path to application directory or null if it does not exists
* @throws IOException if the directory does not exist
*
* @see android.content.Context#getExternalFilesDir
* @see android.os.Environment#getExternalStorageState
*/
public static File getApplicationDir(Context context) throws IOException {
File dir = context.getExternalFilesDir(null);
if (null == dir)
throw new IOException("cannot get external files dir, " +
"external storage state is " +
getExternalStorageState());
return dir;
}
/**
* Copies raw asset resources to external storage of the device. Copies
* raw asset resources to external storage of the device. Implementation
* is borrowed from Apache Commons.
*
* @param source source stream
* @param dest destination stream
* @throws IOException if an I/O error occurs
*/
private static void copyStream(InputStream source, OutputStream dest)
throws IOException
{
byte[] buffer = new byte[1024];
int nread;
while ((nread = source.read(buffer)) != -1) {
if (nread == 0) {
nread = source.read();
if (nread < 0)
break;
dest.write(nread);
continue;
}
dest.write(buffer, 0, nread);
}
}
}
/* vim: set ts=4 sw=4: */
|
package edu.miamioh.cse283.htw;
import java.util.*;
public class Room {
public static final int EMPTY = 0;
public static final int WUMPUS = 1;
public static final int GOLD = 2;
public static final int HOLE = 3;
public static final int BATS = 4;
public static final int OTHER_PLAYERS = 5;
public ArrayList<ClientProxy> players;
private ArrayList<Room> connections;
private int n;
private int contents;
public Room(int n) {
players = new ArrayList<ClientProxy>();
connections = new ArrayList<Room>();
this.n = n;
contents = WUMPUS;
}
public void addBidirectionalConnection(Room other) {
connections.add(other);
other.connections.add(this);
}
public Room[] getConnections() {
return (Room[]) connections.toArray();
}
public int getNumber() {
return n;
}
public void addPlayer(ClientProxy client) {
players.add(client);
}
public void removePlayer(ClientProxy client) {
for (int i = 0; i < players.size(); i++) {
if (players.get(i) == client) {
players.remove(i);
}
}
}
public boolean hasSense() {
return contents != EMPTY;
}
public String getSense() {
switch (contents) {
case WUMPUS:
return "You smell the smelly smell of a Wumpus";
case OTHER_PLAYERS:
return "You hear another adventurer knocking an arrow";
case BATS:
return "You hear the screech of the bats";
case HOLE:
return "You hear the whistling wind";
case GOLD:
return "You see the shimmering light of gold!";
}
return "You see nothing... nothing!";
}
public String[] getSenses() {
ArrayList<String> senses = new ArrayList<String>();
for (Room r : connections) {
if (r.hasSense()) {
senses.add(r.getSense());
}
}
return senses.toArray(new String[senses.size()]);
}
}
|
package edu.msoe.smv.raspi;
import com.google.gson.GsonBuilder;
/**
* This class represents the state of the vehicle at a specific point in time
*
* @author matt
*/
public class DataNode {
/**
* The UNIX epoch when this DataNode was created
*/
private final long unixTime;
/**
* The rpm at this DataNode
*/
private final double rpm;
/**
* The speed at this DataNode
*/
private final double speed;
/**
* Constructs a new DataNode
* @param rpm the RPM at this DataNode
* @param speed the speed at this DataNode
*/
public DataNode(double rpm, double speed) {
this.rpm = rpm;
this.speed = speed;
this.unixTime = System.currentTimeMillis();
}
/**
* Returns the UNIX epoch time when this DataNode was created
* @return the UNIX epoch time when this DataNode was created
*/
public long getUnixTime() {
return unixTime;
}
/**
* Returns the RPM at this DataNode
* @return the RPM at this DataNode
*/
public double getRpm() {
return rpm;
}
/**
* Returns the speed at this DataNode
* @return the speed at this DataNode
*/
public double getSpeed() {
return speed;
}
/**
* Returns a string representation of this node formatted in JSON.
* </p>
* Pretty printing is used, that is the resulting string spans multiple lines. For example: </p>
* <pre>{
* "altitude": 5.0,
* "batteryVoltage": 2.0,
* "unixTime": 1414263863,
* "latitude": 3.0,
* "longitude": 5.0,
* "rpm": 1.0
* }</pre>
*
* @return a string representation of this node formatted in JSON
*/
@Override
public String toString() {
return new GsonBuilder().setPrettyPrinting().create().toJson(this);
}
/**
* Returns a string representation of this DataNode as comma-separated values.
* <p/>
* Example result: <pre>12345678,200,20</pre> where the first value is the Unix time, the second is the RPM, and
* the third is the speed.
*
* @return a string representation of this DataNode as comma-separated values
*/
public String toCSV() {
return unixTime + "," + rpm + "," + speed;
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package edu.team3182.main;
import com.sun.squawk.util.MathUtils;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.AnalogPotentiometer;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DoubleSolenoid;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot3182 extends IterativeRobot {
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
//Initialization of code for robot drive functions
private RobotDrive drive;
private Joystick rightJoystick;
private Joystick leftJoystick;
private AxisCamera camera;
//Initialization of code for robot appendage functions
private Joystick buttonsJoystick;
private Talon shooterMotors;
private Talon collectorMotor;
private DoubleSolenoid leftShifter;
private DoubleSolenoid rightShifter;
private DoubleSolenoid leftCollector;
private DoubleSolenoid rightCollector;
private Compressor compressor;
// Initialization of code for robot sensors
private Encoder rightDriveEncoder;
private Encoder leftDriveEncoder;
public SmartDashboard dash;
private DigitalInput limitLED;
private AnalogPotentiometer shooterPot;
// Initialize variables to support functions above
// yAxisLeft/Right read in values of joysticks, values of joysticks are output inversely like airplane drive
double yAxisRight;
double yAxisLeft;
double distance;
boolean toggleOut;
boolean toggleIn;
boolean collectorButton9;
boolean collectorButton10;
boolean shoot = false;
boolean reverseShooter = false;
boolean collect = false;
boolean collectReverse = false;
boolean collectorFoward = false;
boolean quarterTurnLeft = false;
boolean quarterTurnRight = false;
boolean halfTurnLeft = false;
boolean halfTurnRight = false;
boolean rightTrigger = false;
boolean leftTrigger = false;
boolean limitStat;
double p = 0.25; //dead zone of joysticks for drive is between -p and p
double smoothVarRight = 0; //for making joysticks linear function between of zero to 1
double smoothVarLeft = 0;
final int endLoopDrive = 10; //length of for loops that control maneuver timing/ shooting timing
final int endLoopShoot = 10;
int shooterPotVal; //position of catapult
//Coefficients of exponential function to ramp up speed of catapult (so ball doesn't fall out)
final double a = .005;
final double b = .9;
boolean isReloading = false; //prevents shooting when reloading
/**
* Called when the robot is first turned on. This is a substitute for using
* the constructor in the class for consistency. This method is only called
* once
*/
public void robotInit() {
//camera = AxisCamera.getInstance();
drive = new RobotDrive(1, 2);
drive.setSafetyEnabled(false);
rightJoystick = new Joystick(1);
leftJoystick = new Joystick(2);
buttonsJoystick = new Joystick(3);
//the paramater will probably change depending on where the limit switch is
// limitLED = new DigitalInput(1);
// limitStat = limitLED.get();
//UNCOMMENT WHEN remainder of electronics board is complete
shooterMotors = new Talon(4);
collectorMotor = new Talon(3);
//UNCOMMENT WHEN potentiometer is hooked up
shooterPot = new AnalogPotentiometer(1);
rightDriveEncoder = new Encoder(4, 3);
leftDriveEncoder = new Encoder(2, 1);
rightDriveEncoder.reset();
rightDriveEncoder.setDistancePerPulse(.08168);
// UNCOMMENT WHEN solenoids are available on electronics board
leftShifter = new DoubleSolenoid(5, 6);
rightShifter = new DoubleSolenoid(7, 8);
leftCollector = new DoubleSolenoid(1, 2);
rightCollector = new DoubleSolenoid(3, 4);
//compressor = new Compressor(0,0);
}
/**
* Called when the robot enters the autonomous period for the first time.
* This is called on a transition from any other state.
*/
public void autonomousInit() {
rightDriveEncoder.start();
//Send command to Arduino for the light strip
// set the variable distance to the distance of encoder since reset
distance = rightDriveEncoder.getDistance();
//Drive forward for 2 seconds with linear acceleration function
for (int i = 1; i <= 30; i++) { //takes 1.5 seconds reach full speed
drive.drive(0, (i / 100));
Timer.delay(.05);
}
drive.drive(0.3, 0.0);
Timer.delay(1.0);
drive.drive(0.0, 0.0);
//Shoot:
// SHOULD WE ADD LOGIC TO TURN AROUND AFTER FIRING
//quickly speed up motors, then wait for the ball to be shot
shoot();
Timer.delay(1);
pivot(180);
}
public void autonomousPeriodic() {
//what is this for?? - RJJ
Timer.delay(.01);
}
/**
* Called when the robot enters the teleop period for the first time. This
* is called on a transition from any other state.
*/
public void teleopInit() {
rightDriveEncoder.start();
leftDriveEncoder.start();
compressor.start();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
// T E L E O P D R I V E C O D E
SmartDashboard.putBoolean("Collector Extended: ", toggleOut);
// Read commands from the joysticks
//sets yAxisRight and yAxisLeft to the axis of corresponding joysticks
yAxisRight = rightJoystick.getAxis(Joystick.AxisType.kY);
yAxisLeft = leftJoystick.getAxis(Joystick.AxisType.kY);
//shoot is button 1, collect is 2, ground pass/dump is 3
// collector is buttons 10 (out) and 11 (in)
shoot = buttonsJoystick.getRawButton(1);
collect = buttonsJoystick.getRawButton(2);
collectReverse = buttonsJoystick.getRawButton(3);
collectorButton9 = buttonsJoystick.getRawButton(9);
collectorButton10 = buttonsJoystick.getRawButton(10);
//Maneuvers (trigger on left is half turn, trigger on right is quarter turn)
//NOTE: Reloading will be stopped when a maneuver is activated
//NOTE: Maneuvers will not be activated if the collector motor is on
//Buttons changed to 2 and 3, trigger is shifters
rightTrigger = rightJoystick.getRawButton(1);
leftTrigger = leftJoystick.getRawButton(1);
quarterTurnLeft = leftJoystick.getRawButton(2);
quarterTurnRight = rightJoystick.getRawButton(2);
halfTurnLeft = leftJoystick.getRawButton(3);
halfTurnRight = rightJoystick.getRawButton(3);
// collector code
// if button 10 is pressed the collector will come out
// if button 11 is pressed the collector will come in
if (collectorButton9 == true) {
toggleOut = true;
}
if (collectorButton10 == true) {
toggleIn = true;
}
if (toggleOut && !collectorButton9) { //when button 10 is let go, the toggle will comence
collectOut();
toggleOut = false;
}
if (toggleIn && !collectorButton10) { //when button 11 is let go, the toggle will comence
collectIn();
toggleIn = false;
}
//shifter code
//while both of the triggers are clicked, the shifter are switched to ??????high gear????????
if (rightTrigger && leftTrigger) {
if (rightShifter.get() == false) {
shiftIn();
}
} else if (rightTrigger == false && leftTrigger == false) {
if (leftShifter.get() == true) {
shiftOut();
}
}
//makes sure joystick will not work at +/-25% throttle
//smoothVarRight/Left are output variables from a function
//to get power from 0 to 1 between P and full throttle on the joysticks
//same for full reverse throttle to -P
if (yAxisRight < p && yAxisRight > (-p)) {
smoothVarRight = 0;
}
if (yAxisLeft < p && yAxisLeft > (-p)) {
smoothVarLeft = 0;
}
// yAxisLeft greater than P, which is pull back on the joystick
if (yAxisLeft >= p) {
smoothVarLeft = ((1 / (1 - p)) * yAxisLeft + (1 - (1 / (1 - p))));
}
// yAxisLeft less than -P, which is push forward on the joystick
if (yAxisLeft <= (-p)) {
smoothVarLeft = ((1 / (1 - p)) * yAxisLeft - (1 - (1 / (1 - p))));
}
//smooth right joystick
// yAxisRight greater than P, which is pull back on the joystick
if (yAxisRight >= p) {
smoothVarRight = ((1 / (1 - p)) * yAxisRight + (1 - (1 / (1 - p))));
}
// yAxisRight less than -P, which is push forward on the joystick
if (yAxisRight <= (-p)) {
smoothVarRight = ((1 / (1 - p)) * yAxisRight - (1 - (1 / (1 - p))));
}
//drive using the joysticks
drive.tankDrive(smoothVarLeft, smoothVarRight);
//does a clockwise 90 degree turn quickly
if (quarterTurnRight == true && collect == false && collectReverse == false) {
pivot(90);
}
//does a counter-clockwise 90 degree turn quickly
if (quarterTurnLeft == true && collect == false && collectReverse == false) {
pivot(-90);
}
if (halfTurnRight == true && collect == false && collectReverse == false) {
pivot(180);
}
// T E L E O P S H O O T C O D E
//Shooting
//NOTE: You CANNOT shoot when the catapult is reloading OR when the collector spinning in reverse OR when the collector is in
if (shoot == true && isReloading == false && collectReverse == false && rightCollector.get()) {
shoot();
}
// if button 2 on support function joystick is pressed, run the collector motor at 90%
// if button 3 on support function joystick is pressed, run the collector motor in reverse at 90% (ground pass)
if (collect == true) {
collect();
}
if (collectReverse == true) {
pass();
}
if (limitStat) {
//make LED some color
} else if (limitStat == false) {
/* make LED's do whatever they normally do when not notifying
* the drivers that the bot is in some state
*/
}
//Display rate of encoder to the dashboard
//SmartDashboard.putNumber("Encoder Rate", rightDriveEncoder.getRate());
System.out.println(distance);
System.out.println(rightDriveEncoder.get());
System.out.println("Encoder rate: " + rightDriveEncoder.getRate());
System.out.println("Encoder rate left: " + leftDriveEncoder.getRate());
}
public void disabledInit() {
}
public void disabledPeriodic() {
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
if (buttonsJoystick.getRawButton(1)) {
shoot();
}
if (buttonsJoystick.getRawButton(2)) {
collect();
}
if (buttonsJoystick.getRawButton(3)) {
pass();
}
if (buttonsJoystick.getRawButton(4)) {
pivot(90);
}
if (buttonsJoystick.getRawButton(5)) {
collectOut();
}
if (buttonsJoystick.getRawButton(6)) {
collectIn();
}
if (buttonsJoystick.getRawButton(7)) {
shiftIn();
}
if (buttonsJoystick.getRawButton(8)) {
shiftOut();
}
if (buttonsJoystick.getRawButton(9)) {
pivot(-90);
}
if (buttonsJoystick.getRawButton(10)) {
pivot(180);
}
}
// bring shooter up then down
private void shoot() {
for (int i = 1; i <= endLoopShoot; i++) { //takes half a second to reach full speed
shooterMotors.set(a * MathUtils.exp(b * i));
Timer.delay(.01);
}
shooterMotors.set(1);
Timer.delay(.1);
shooterMotors.set(0);
Timer.delay(.5);
//start reload
shooterMotors.set(-.2);
Timer.delay(.4);
shooterMotors.set(0);
// isReloading = true; //prevents shooting when being reloaded
// if (shoot == false && isReloading == false) {
// shooterMotors.set(0);
// //continues reloading if it was stopped
// if (shooterPotVal > 500) {
// shooterMotors.set(-.2);
// //finish reload
// shooterPotVal = (int) shooterPot.get();
// if (shooterPotVal <= 500) {
// shooterMotors.set(-.05);
// if (shooterPotVal <= 300) {
// shooterMotors.set(0);
// isReloading = false;
}
// runs collect forward relies on safety config disabling
private void collect() {
collectorMotor.set(.9);
}
// runs collect backward relies on safety config disabling
private void pass() {
collectorMotor.set(-.9);
}
// pivots robot by some angle, positive is right, negative is left
private void pivot(float angle_deg) {
//for (int i = 1; i <= endLoopDrive; i++) { ///takes 1/10th of a second reach full speed
//drive.drive(0, (i / endLoopDrive));
//Timer.delay(.01);
drive.drive(0, signum(angle_deg));
Timer.delay(Math.abs(angle_deg / 300));
drive.drive(0, 0);
}
private int signum(float num) {
if (num > 0) {
return 1;
} else if (num < 0) {
return -1;
} else {
return 0;
}
}
private void collectOut() {
rightCollector.set(true);
leftCollector.set(true);
}
private void collectIn() {
rightCollector.set(false);
leftCollector.set(false);
}
private void shiftIn() {
leftShifter.set(true);
rightShifter.set(true);
}
private void shiftOut() {
leftShifter.set(false);
rightShifter.set(false);
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package edu.team3182.main;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.AnalogPotentiometer;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DigitalOutput;
import edu.wpi.first.wpilibj.DoubleSolenoid;
import edu.wpi.first.wpilibj.Ultrasonic;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import java.lang.Thread;
import edu.wpi.first.wpilibj.communication.Semaphore;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot3182 extends IterativeRobot {
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
//Initialization of code for robot drive functions
private RobotDrive drive;
private Joystick rightJoystick;
private Joystick leftJoystick;
//Initialization of code for robot appendage functions
private Joystick buttonsJoystick;
private Talon shooterMotors;
private Talon collectorMotor;
private DoubleSolenoid leftShifter;
private DoubleSolenoid rightShifter;
private DoubleSolenoid leftCollector;
private DoubleSolenoid rightCollector;
private Compressor compressor;
// Initialization of code for robot sensors
private Encoder rightDriveEncoder;
private Encoder leftDriveEncoder;
public SmartDashboard dash;
private DigitalInput limitLED;
private AnalogPotentiometer shooterPot;
private DigitalOutput arduinoSignal;
private DigitalOutput arduinoSignifier;
private Ultrasonic rangeFinder;
// Initialize variables to support functions above
// yAxisLeft/Right read in values of joysticks, values of joysticks are output inversely like airplane drive
double yAxisRight;
double yAxisLeft;
double distance; //ultrasonic
boolean toggleOut;
boolean toggleIn;
boolean collectorButton11;
boolean collectorButton9;
boolean shoot = false;
boolean reverseShooter = false;
boolean collect = false;
boolean collectReverse = false;
boolean collectorFoward = false;
boolean quarterTurnLeft = false;
boolean quarterTurnRight = false;
boolean halfTurnRight = false;
boolean rightTrigger = false;
boolean leftTrigger = false;
boolean limitStat;
double p = 0.10; //dead zone of joysticks for drive is between -p and p
double smoothVarRight = 0; //for making joysticks linear function between of zero to 1
double smoothVarLeft = 0;
int shooterPotVal; //position of catapult
double distanceRange;
//Coefficients of exponential function to ramp up speed of catapult (so ball doesn't fall out)
final double a = .005;
final double b = .9;
boolean isReloading = false; //prevents shooting when reloading
//
double x;
/**
* Called when the robot is first turned on. This is a substitute for using
* the constructor in the class for consistency. This method is only called
* once
*/
public void robotInit() {
//camera = AxisCamera.getInstance();
drive = new RobotDrive(1, 2);
drive.setSafetyEnabled(false);
rightJoystick = new Joystick(1);
leftJoystick = new Joystick(2);
buttonsJoystick = new Joystick(3);
//the paramater will probably change depending on where the limit switch is
// limitLED = new DigitalInput(1);
// limitStat = limitLED.get();
arduinoSignal = new DigitalOutput(5); //sgnal with data
arduinoSignifier = new DigitalOutput(6); //tells arduino when to read data
//UNCOMMENT WHEN remainder of electronics board is complete
shooterMotors = new Talon(4);
collectorMotor = new Talon(3);
collectorMotor.setSafetyEnabled(true);
shooterMotors.setSafetyEnabled(false);
//UNCOMMENT WHEN potentiometer is hooked up
shooterPot = new AnalogPotentiometer(1);
rightDriveEncoder = new Encoder(4, 3);
leftDriveEncoder = new Encoder(2, 1);
rightDriveEncoder.reset();
rightDriveEncoder.setDistancePerPulse(.08168);
leftShifter = new DoubleSolenoid(5, 6);
rightShifter = new DoubleSolenoid(7, 8);
leftCollector = new DoubleSolenoid(1, 2);
rightCollector = new DoubleSolenoid(3, 4);
rangeFinder = new Ultrasonic(8, 9);
compressor = new Compressor(7, 1);
compressor.start();
}
/**
* Called when the robot enters the autonomous period for the first time.
* This is called on a transition from any other state.
*/
public void autonomousInit() {
rightDriveEncoder.start();
//Send command to Arduino for the light strip
// set the variable distance to the distance of encoder since reset
//Drive forward for 2 seconds with linear acceleration function
// for (int i = 1; i <= 30; i++) { //takes 1.5 seconds reach full speed
// drive.drive(0, (i / 100));
// Timer.delay(.05);
drive.drive(0.3, 0.0);
Timer.delay(2.0);
drive.drive(0.5, 0.0);
Timer.delay(2);
drive.drive(.4, 0.0);
Timer.delay(.1);
drive.drive(0.35, 0.0);
Timer.delay(.3);
drive.drive(0.0, 0.0);
collectorMotor.set(.8);
rightCollector.set(DoubleSolenoid.Value.kReverse);
leftCollector.set(DoubleSolenoid.Value.kReverse);
collectorMotor.set(.8);
Timer.delay(.5);
collectorMotor.set(.8);
Timer.delay(.5);
collectorMotor.set(.8);
Timer.delay(.5);
collectorMotor.set(0);
//Shoot:
// SHOULD WE ADD LOGIC TO TURN AROUND AFTER FIRING
//quickly speed up motors, then wait for the ball to be shot
shoot();
}
public void autonomousPeriodic() {
//what is this for?? - RJJ
Timer.delay(.01);
}
/**
* Called when the robot enters the teleop period for the first time. This
* is called on a transition from any other state.
*/
public void teleopInit() {
rightDriveEncoder.start();
leftDriveEncoder.start();
compressor.start();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
// T E L E O P D R I V E C O D E
getUltraRange();
SmartDashboard.putBoolean("Collector Extended: ", toggleOut);
distance = rightDriveEncoder.getDistance();
SmartDashboard.putNumber("Distance away: ", distanceRange);
// Read commands from the joysticks
//sets yAxisRight and yAxisLeft to the axis of corresponding joysticks
yAxisRight = rightJoystick.getAxis(Joystick.AxisType.kY);
yAxisLeft = leftJoystick.getAxis(Joystick.AxisType.kY);
//shoot is button 1, collect is 2, ground pass/dump is 3
// collector is buttons 10 (out) and 11 (in)
shoot = buttonsJoystick.getRawButton(1);
collect = buttonsJoystick.getRawButton(2);
collectReverse = buttonsJoystick.getRawButton(3);
collectorButton11 = buttonsJoystick.getRawButton(11);
collectorButton9 = buttonsJoystick.getRawButton(9);
//Maneuvers (trigger on left is half turn, trigger on right is quarter turn)
//NOTE: Reloading will be stopped when a maneuver is activated
//NOTE: Maneuvers will not be activated if the collector motor is on
//Buttons changed to 2 and 3, trigger is shifters
rightTrigger = rightJoystick.getRawButton(1);
leftTrigger = leftJoystick.getRawButton(1);
quarterTurnLeft = leftJoystick.getRawButton(2);
quarterTurnRight = rightJoystick.getRawButton(2);
halfTurnRight = rightJoystick.getRawButton(3);
// collector code
// if button 9 is pressed the collector will come out
// if button 10 is pressed the collector will come in
if (collectorButton11 == true) {
toggleOut = true;
}
if (collectorButton9 == true) {
toggleIn = true;
}
if (toggleOut && !collectorButton11) { //when button 10 is let go, the toggle will comence
collectOut();
toggleOut = false;
}
if (toggleIn && !collectorButton9) { //when button 11 is let go, the toggle will comence
collectIn();
toggleIn = false;
}
//shifter code
//while both of the triggers are clicked, the shifter are switched to ??????high gear????????
if (rightTrigger && leftTrigger) {
// if (rightShifter.get() == DoubleSolenoid.Value.kReverse) {
shiftHigh();
}
if (rightTrigger == false && leftTrigger == false) {
// if (leftShifter.get() == DoubleSolenoid.Value.kForward) {
shiftLow();
}
//makes sure joystick will not work at +/-25% throttle
//smoothVarRight/Left are output variables from a function
//to get power from 0 to 1 between P and full throttle on the joysticks
//same for full reverse throttle to -P
if (yAxisRight < p && yAxisRight > (-p)) {
smoothVarRight = 0;
}
if (yAxisLeft < p && yAxisLeft > (-p)) {
smoothVarLeft = 0;
}
// yAxisLeft greater than P, which is pull back on the joystick
if (yAxisLeft >= p) {
smoothVarLeft = ((1 / (1 - p)) * yAxisLeft + (1 - (1 / (1 - p))));
}
// yAxisLeft less than -P, which is push forward on the joystick
if (yAxisLeft <= (-p)) {
smoothVarLeft = ((1 / (1 - p)) * yAxisLeft - (1 - (1 / (1 - p))));
}
//smooth right joystick
// yAxisRight greater than P, which is pull back on the joystick
if (yAxisRight >= p) {
smoothVarRight = ((1 / (1 - p)) * yAxisRight + (1 - (1 / (1 - p))));
}
// yAxisRight less than -P, which is push forward on the joystick
if (yAxisRight <= (-p)) {
smoothVarRight = ((1 / (1 - p)) * yAxisRight - (1 - (1 / (1 - p))));
}
//drive using the joysticks
drive.tankDrive(-smoothVarLeft, -smoothVarRight);
//does a clockwise 90 degree turn quickly
if (quarterTurnRight == true && collect == false && collectReverse == false) {
pivot(90);
}
//does a counter-clockwise 90 degree turn quickly
if (quarterTurnLeft == true && collect == false && collectReverse == false) {
pivot(-90);
}
if (halfTurnRight == true && collect == false && collectReverse == false) {
pivot(180);
}
// T E L E O P S H O O T C O D E
//Shooting
//NOTE: You CANNOT shoot when the catapult is reloading OR when the collector spinning in reverse OR when the collector is in
if (shoot == true && isReloading == false && collectReverse == false && rightCollector.get() == DoubleSolenoid.Value.kReverse) {
shoot();
}
// if button 2 on support function joystick is pressed, run the collector motor at 90%
// if button 3 on support function joystick is pressed, run the collector motor in reverse at 90% (ground pass)
if (collect == true) {
collect();
}
if (collectReverse == true) {
pass();
}
if (limitStat) {
//make LED some color
} else if (limitStat == false) {
/* make LED's do whatever they normally do when not notifying
* the drivers that the bot is in some state
*/
}
//Display rate of encoder to the dashboard
SmartDashboard.putNumber("Speed", rightDriveEncoder.getRate());
SmartDashboard.putNumber("Speed", leftDriveEncoder.getRate());
}
public void disabledInit() {
}
public void disabledPeriodic() {
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
if (buttonsJoystick.getRawButton(1)) {
shoot();
}
if (buttonsJoystick.getRawButton(2)) {
collect();
}
if (buttonsJoystick.getRawButton(3)) {
pass();
}
if (buttonsJoystick.getRawButton(4)) {
pivot(90);
}
if (buttonsJoystick.getRawButton(5)) {
collectOut();
}
if (buttonsJoystick.getRawButton(6)) {
collectIn();
}
if (buttonsJoystick.getRawButton(7)) {
shiftHigh();
}
if (buttonsJoystick.getRawButton(8)) {
shiftLow();
}
if (buttonsJoystick.getRawButton(9)) {
pivot(-90);
}
if (buttonsJoystick.getRawButton(10)) {
pivot(180);
}
if (buttonsJoystick.getRawButton(11)) {
arduinoSignifier.set(true);
arduinoSignal.set(false);
Timer.delay(.01);
arduinoSignal.set(true);
Timer.delay(.01);
arduinoSignal.set(true);
arduinoSignifier.set(false);
}
x = buttonsJoystick.getAxis(Joystick.AxisType.kY);
if (x > .25) {
leftCollector.set(DoubleSolenoid.Value.kForward);
rightCollector.set(DoubleSolenoid.Value.kForward);
} else if (x < -.25) {
leftCollector.set(DoubleSolenoid.Value.kReverse);
rightCollector.set(DoubleSolenoid.Value.kReverse);
} else {
leftCollector.set(DoubleSolenoid.Value.kOff);
rightCollector.set(DoubleSolenoid.Value.kOff);
}
}
// bring shooter up then down
private void shoot() {
// for (int i = 1; i <= endLoopShoot; i++) { //takes half a second to reach full speed
// shooterMotors.set(1);
// Timer.delay(.01);
compressor.stop();
Timer.delay(.25);
shooterMotors.set(1);
Timer.delay(1.4);
shooterMotors.set(0);
Timer.delay(.5);
//start reload
// remember to set negative
shooterMotors.set(-.15);
Timer.delay(1.5);
shooterMotors.set(0);
compressor.start();
// compressor.start();
// isReloading = true; //prevents shooting when being reloaded
// if (shoot == false && isReloading == false) {
// shooterMotors.set(0);
// //continues reloading if it was stopped
// if (shooterPotVal > 500) {
// shooterMotors.set(-.2);
// //finish reload
// shooterPotVal = (int) shooterPot.get();
// if (shooterPotVal <= 500) {
// shooterMotors.set(-.05);
// if (shooterPotVal <= 300) {
// shooterMotors.set(0);
// isReloading = false;
}
// runs collect forward relies on safety config disabling
private void collect() {
collectorMotor.set(.8);
}
// runs collect backward relies on safety config disabling
private void pass() {
collectorMotor.set(-.9);
}
// pivots robot by some angle, positive is right, negative is left
private void pivot(float angle_deg) {
//for (int i = 1; i <= endLoopDrive; i++) { ///takes 1/10th of a second reach full speed
//drive.drive(0, (i / endLoopDrive));
//Timer.delay(.01);
drive.drive(1, signum(angle_deg));
Timer.delay(Math.abs(angle_deg / 90));
drive.drive(0, 0);
}
private int signum(float num) {
if (num > 0) {
return 1;
} else if (num < 0) {
return -1;
} else {
return 0;
}
}
private void collectOut() {
collectorMotor.set(.8);
rightCollector.set(DoubleSolenoid.Value.kForward);
leftCollector.set(DoubleSolenoid.Value.kForward);
}
private void collectIn() {
collectorMotor.set(.8);
rightCollector.set(DoubleSolenoid.Value.kReverse);
leftCollector.set(DoubleSolenoid.Value.kReverse);
}
private void shiftHigh() {
leftShifter.set(DoubleSolenoid.Value.kForward);
rightShifter.set(DoubleSolenoid.Value.kForward);
}
private void shiftLow() {
leftShifter.set(DoubleSolenoid.Value.kReverse);
rightShifter.set(DoubleSolenoid.Value.kReverse);
}
private void sendArduino(boolean one, boolean two, boolean three, boolean four) {
//the fuction to send certain data to the arduino
arduinoSignifier.set(true);
arduinoSignal.set(one);
Timer.delay(.01);
arduinoSignal.set(two);
Timer.delay(.01);
arduinoSignal.set(three);
Timer.delay(.01);
arduinoSignal.set(four);
arduinoSignifier.set(false);
}
private void getUltraRange(){
distanceRange = rangeFinder.getRangeInches();
if (distanceRange >= 60 && distanceRange <= 72){
sendArduino(false, true, true, false); //green
}
else if (distanceRange >= 3 && distanceRange < 60){
sendArduino(true, false, false, false); //red
}
else if (distanceRange >= 60 && distanceRange <= 72){
sendArduino(false, false, true, false); //yellow
}
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package edu.team3182.main;
import com.sun.squawk.util.Arrays;
import edu.wpi.first.wpilibj.AnalogChannel;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.Encoder;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.DigitalOutput;
import edu.wpi.first.wpilibj.DoubleSolenoid;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import java.lang.Thread;
import edu.wpi.first.wpilibj.communication.Semaphore;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot3182 extends IterativeRobot {
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
//Initialization of code for robot drive functions
private RobotDrive drive;
private Joystick rightJoystick;
private Joystick leftJoystick;
//Initialization of code for robot appendage functions
private Joystick buttonsJoystick;
private Talon shooterMotors;
private Talon collectorMotor;
private DoubleSolenoid leftShifter;
private DoubleSolenoid rightShifter;
private DoubleSolenoid leftCollector;
private DoubleSolenoid rightCollector;
private Compressor compressor;
// Initialization of code for robot sensors
private Encoder rightDriveEncoder;
private Encoder leftDriveEncoder;
public SmartDashboard dash;
private DigitalOutput arduinoSignal;
private DigitalOutput arduinoSignifier;
private AnalogChannel rangeFinder;
// Initialize variables to support functions above
// yAxisLeft/Right read in values of joysticks, values of joysticks are output inversely like airplane drive
double yAxisRight;
double yAxisLeft;
double distance; //ultrasonic
boolean toggleOut;
boolean toggleIn;
boolean collectorButton11;
boolean collectorButton9;
boolean signalLight = false;
boolean shoot = false;
boolean reverseShooter = false;
boolean collect = false;
boolean collectReverse = false;
boolean collectorFoward = false;
boolean quarterTurnLeft = false;
boolean quarterTurnRight = false;
boolean halfTurnRight = false;
boolean rightTrigger = false;
boolean leftTrigger = false;
boolean limitStat;
boolean[] lightData = new boolean[]{false, false, false, false};
boolean[] dummy = new boolean[4];
boolean isSame = false;
double p = 0.10; //dead zone of joysticks for drive is between -p and p
double smoothVarRight = 0; //for making joysticks linear function between of zero to 1
double smoothVarLeft = 0;
int shooterPotVal; //position of catapult
double distanceRange;
double getVoltage;
double getAverageVoltage;
//Coefficients of exponential function to ramp up speed of catapult (so ball doesn't fall out)
final double a = .005;
final double b = .9;
//
double x;
/**
* Called when the robot is first turned on. This is a substitute for using
* the constructor in the class for consistency. This method is only called
* once
*/
public void robotInit() {
//camera = AxisCamera.getInstance();
drive = new RobotDrive(1, 2);
drive.setSafetyEnabled(false);
rightJoystick = new Joystick(1);
leftJoystick = new Joystick(2);
buttonsJoystick = new Joystick(3);
//the paramater will probably change depending on where the limit switch is
arduinoSignal = new DigitalOutput(5); //data line
arduinoSignifier = new DigitalOutput(6); //tells arduino when to read data
//UNCOMMENT WHEN remainder of electronics board is complete
shooterMotors = new Talon(4);
collectorMotor = new Talon(3);
collectorMotor.setSafetyEnabled(true);
shooterMotors.setSafetyEnabled(false);
rightDriveEncoder = new Encoder(4, 3);
leftDriveEncoder = new Encoder(2, 1);
rightDriveEncoder.reset();
rightDriveEncoder.setDistancePerPulse(.08168);
leftShifter = new DoubleSolenoid(5, 6);
rightShifter = new DoubleSolenoid(7, 8);
leftCollector = new DoubleSolenoid(1, 2);
rightCollector = new DoubleSolenoid(3, 4);
rangeFinder = new AnalogChannel(1, 2);
compressor = new Compressor(7, 1);
compressor.start();
}
/**
* Called when the robot enters the autonomous period for the first time.
* This is called on a transition from any other state.
*/
public void autonomousInit() {
//Send command to Arduino for the light strip
sendArduino(true, false, true, false); //charging animation
sendArduino(false, false, false, false); //stop it imediatly after it finishes
//drive forward
drive.drive(0.3, 0.0);
Timer.delay(2.0);
drive.drive(0.5, 0.0);
Timer.delay(2);
drive.drive(.4, 0.0);
Timer.delay(.1);
drive.drive(0.35, 0.0);
Timer.delay(.3);
drive.drive(0.0, 0.0);
collectorMotor.set(.8);
rightCollector.set(DoubleSolenoid.Value.kReverse);
leftCollector.set(DoubleSolenoid.Value.kReverse);
collectorMotor.set(.8);
Timer.delay(.5);
collectorMotor.set(.8);
Timer.delay(.5);
collectorMotor.set(.8);
Timer.delay(.5);
collectorMotor.set(0);
//Shoot:
// SHOULD WE ADD LOGIC TO TURN AROUND AFTER FIRING
//quickly speed up motors, then wait for the ball to be shot
shoot();
}
public void autonomousPeriodic() {
//what is this for?? - RJJ
Timer.delay(.01);
}
/**
* Called when the robot enters the teleop period for the first time. This
* is called on a transition from any other state.
*/
public void teleopInit() {
rightDriveEncoder.start();
leftDriveEncoder.start();
compressor.start();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
// T E L E O P D R I V E C O D E
SmartDashboard.putBoolean("Collector Extended: ", toggleOut);
//testing voltage for analog rangefinder
distance = rightDriveEncoder.getDistance();
getAverageVoltage = rangeFinder.getAverageVoltage();
getVoltage = rangeFinder.getVoltage();
SmartDashboard.putNumber("Distance away: ", distanceRange);
//System.out.println("Average Voltage: " + getAverageVoltage);
System.out.println("Get Voltage : " + getVoltage);
// Read commands from the joysticks
//sets yAxisRight and yAxisLeft to the axis of corresponding joysticks
yAxisRight = rightJoystick.getAxis(Joystick.AxisType.kY);
yAxisLeft = leftJoystick.getAxis(Joystick.AxisType.kY);
//shoot is button 1, collect is 2, ground pass/dump is 3
// collector is buttons 9 (out) and 11 (in)
shoot = buttonsJoystick.getRawButton(1);
collect = buttonsJoystick.getRawButton(3);
signalLight = buttonsJoystick.getRawButton(4);
collectReverse = buttonsJoystick.getRawButton(5);
collectorButton11 = buttonsJoystick.getRawButton(11);
collectorButton9 = buttonsJoystick.getRawButton(9);
//Maneuvers (trigger on left is half turn, trigger on right is quarter turn)
//NOTE: Reloading will be stopped when a maneuver is activated
//NOTE: Maneuvers will not be activated if the collector motor is on
//Buttons changed to 2 and 3, trigger is shifters
rightTrigger = rightJoystick.getRawButton(1);
leftTrigger = leftJoystick.getRawButton(1);
quarterTurnLeft = leftJoystick.getRawButton(2);
quarterTurnRight = rightJoystick.getRawButton(2);
halfTurnRight = rightJoystick.getRawButton(3);
// collector code
// if button 9 is pressed the collector will come out
// if button 10 is pressed the collector will come in
if (collectorButton11 == true) {
toggleOut = true;
}
if (collectorButton9 == true) {
toggleIn = true;
}
if (toggleOut && !collectorButton11) { //when button 10 is let go, the toggle will comence
collectIn();
toggleOut = false;
}
if (toggleIn && !collectorButton9) { //when button 11 is let go, the toggle will comence
collectOut();
toggleIn = false;
}
// if button 2 on support function joystick is pressed, run the collector motor at 90%
// if button 3 on support function joystick is pressed, run the collector motor in reverse at 90% (ground pass)
if (collect) {
collect();
}
if (collectReverse) {
pass();
}
//shifter code
//while both of the triggers are clicked, the shifter are switched to ??????high gear????????
if (rightTrigger && leftTrigger) {
// if (rightShifter.get() == DoubleSolenoid.Value.kReverse) {
shiftHigh();
}
if (rightTrigger == false && leftTrigger == false) {
// if (leftShifter.get() == DoubleSolenoid.Value.kForward) {
shiftLow();
}
//makes sure joystick will not work at +/-25% throttle
//smoothVarRight/Left are output variables from a function
//to get power from 0 to 1 between P and full throttle on the joysticks
//same for full reverse throttle to -P
if (yAxisRight < p && yAxisRight > (-p)) {
smoothVarRight = 0;
}
if (yAxisLeft < p && yAxisLeft > (-p)) {
smoothVarLeft = 0;
}
// yAxisLeft greater than P, which is pull back on the joystick
if (yAxisLeft >= p) {
smoothVarLeft = ((1 / (1 - p)) * yAxisLeft + (1 - (1 / (1 - p))));
}
// yAxisLeft less than -P, which is push forward on the joystick
if (yAxisLeft <= (-p)) {
smoothVarLeft = ((1 / (1 - p)) * yAxisLeft - (1 - (1 / (1 - p))));
}
//smooth right joystick
// yAxisRight greater than P, which is pull back on the joystick
if (yAxisRight >= p) {
smoothVarRight = ((1 / (1 - p)) * yAxisRight + (1 - (1 / (1 - p))));
}
// yAxisRight less than -P, which is push forward on the joystick
if (yAxisRight <= (-p)) {
smoothVarRight = ((1 / (1 - p)) * yAxisRight - (1 - (1 / (1 - p))));
}
//drive using the joysticks
drive.tankDrive(-smoothVarLeft, -smoothVarRight);
//does a clockwise 90 degree turn quickly
if (quarterTurnRight == true && collect == false && collectReverse == false) {
pivot(90);
}
//does a counter-clockwise 90 degree turn quickly
if (quarterTurnLeft == true && collect == false && collectReverse == false) {
pivot(-90);
}
if (halfTurnRight == true && collect == false && collectReverse == false) {
pivot(180);
}
// T E L E O P S H O O T C O D E
//Shooting
//NOTE: You CANNOT shoot when the catapult is reloading OR when the collector spinning in reverse OR when the collector is in
if (shoot == true && collectReverse == false && rightCollector.get() == DoubleSolenoid.Value.kReverse) {
shoot();
sendArduino(false,true,false,false);
}
if (signalLight) {
//make LED some color as a signal to other teams
sendArduino(false, false, true, true);
}
//Display rate of encoder to the dashboard
SmartDashboard.putNumber("Speed", rightDriveEncoder.getRate());
SmartDashboard.putNumber("Speed", leftDriveEncoder.getRate());
if (getVoltage >= 60 && getVoltage <= 72){
sendArduino(false, true, true, false); //green
}
else if (getVoltage >= 3 && getVoltage < 60){
sendArduino(true, false, false, false); //red
}
else if (getVoltage >= 60 && getVoltage <= 72){
sendArduino(false, false, true, false); //yellow
}
//if nothing is happening
if (getVoltage > 60 && shoot == false && signalLight == false && collect == false && collectReverse == false){
sendArduino(false,false,false,true); //idle
}
}
public void disabledInit() {
sendArduino(true, true, false, false);
}
public void disabledPeriodic() {
}
public void testInit() {
DriveTrain driveTrainVar = new DriveTrain();
new Thread(driveTrainVar).start();
Collector collectVar = new Collector();
new Thread(collectVar).start();
Shooter shooterVar = new Shooter();
new Thread(shooterVar).start();
Sensors sensorsVar = new Sensors();
new Thread(sensorsVar).start();
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
Shooter.shootCommand = buttonsJoystick.getRawButton(1);
Collector.collectCommand = buttonsJoystick.getRawButton(2);
Collector.passCommand = buttonsJoystick.getRawButton(3);
DriveTrain.quarterTurnRightCommand = buttonsJoystick.getRawButton(4);
Collector.collectInCommand = buttonsJoystick.getRawButton(5);
Collector.collectOutCommand = buttonsJoystick.getRawButton(6);
if (buttonsJoystick.getRawButton(7)) {
shiftHigh();
}
if (buttonsJoystick.getRawButton(8)) {
shiftLow();
}
DriveTrain.quarterTurnLeftCommand = buttonsJoystick.getRawButton(9);
DriveTrain.halfTurnRightCommand = buttonsJoystick.getRawButton(10);
if (buttonsJoystick.getRawButton(11)) {
arduinoSignifier.set(true);
arduinoSignal.set(false);
Timer.delay(.01);
arduinoSignal.set(true);
Timer.delay(.01);
arduinoSignal.set(true);
Timer.delay(.01);
arduinoSignal.set(true);
arduinoSignifier.set(false);
sendArduino(false, true, false, true);
}
// x = buttonsJoystick.getAxis(Joystick.AxisType.kY);
// if (x > .25) {
// leftCollector.set(DoubleSolenoid.Value.kForward);
// rightCollector.set(DoubleSolenoid.Value.kForward);
// } else if (x < -.25) {
// leftCollector.set(DoubleSolenoid.Value.kReverse);
// rightCollector.set(DoubleSolenoid.Value.kReverse);
// } else {
// leftCollector.set(DoubleSolenoid.Value.kOff);
// rightCollector.set(DoubleSolenoid.Value.kOff);
}
// bring shooter up then down
private void shoot() {
compressor.stop();
collectorMotor.set(.8);
Timer.delay(.25);
collectIn();
Timer.delay(.3);
collectOut();
Timer.delay(.45);
shooterMotors.set(1);
Timer.delay(1.4);
shooterMotors.set(0);
Timer.delay(.5);
//start reload
collectorMotor.set(0);
// remember to set negative
shooterMotors.set(-.15);
Timer.delay(1.5);
shooterMotors.set(0);
compressor.start();
}
// runs collect forward relies on safety config disabling
private void collect() {
sendArduino(false, true, false, false);
collectorMotor.set(.8);
}
// runs collect backward relies on safety config disabling
private void pass() {
collectorMotor.set(-.9);
}
// pivots robot by some angle, positive is right, negative is left
private void pivot(float angle_deg) {
drive.drive(1, signum(angle_deg));
Timer.delay(Math.abs(angle_deg / 90));
drive.drive(0, 0);
}
private int signum(float num) {
if (num > 0) {
return 1;
} else if (num < 0) {
return -1;
} else {
return 0;
}
}
private void collectIn() {
collectorMotor.set(.8);
rightCollector.set(DoubleSolenoid.Value.kForward);
leftCollector.set(DoubleSolenoid.Value.kForward);
}
private void collectOut() {
collectorMotor.set(.8);
rightCollector.set(DoubleSolenoid.Value.kReverse);
leftCollector.set(DoubleSolenoid.Value.kReverse);
}
private void shiftHigh() {
leftShifter.set(DoubleSolenoid.Value.kForward);
rightShifter.set(DoubleSolenoid.Value.kForward);
}
private void shiftLow() {
leftShifter.set(DoubleSolenoid.Value.kReverse);
rightShifter.set(DoubleSolenoid.Value.kReverse);
}
private void sendArduino(boolean one, boolean two, boolean three, boolean four) {
//the fuction to send certain data to the arduino
dummy = new boolean[]{one, two, three, four};
isSame = Arrays.equals(dummy, lightData);
if (!isSame) {
arduinoSignifier.set(true);
arduinoSignal.set(one);
Timer.delay(.01);
arduinoSignal.set(two);
Timer.delay(.01);
arduinoSignal.set(three);
Timer.delay(.01);
arduinoSignal.set(four);
Timer.delay(.01);
arduinoSignal.set(false);
arduinoSignifier.set(false);
}
lightData = new boolean[]{one, two, three, four};
}
private void getUltraRange(){
if (getVoltage >= 60 && getVoltage <= 72){
sendArduino(false, true, true, false); //green
}
else if (getVoltage >= 3 && getVoltage < 60){
sendArduino(true, false, false, false); //red
}
else if (getVoltage >= 60 && getVoltage <= 72){
sendArduino(false, false, true, false); //yellow
}
else if (getVoltage > 60){
sendArduino(false,false,false,true); //idle
}
}
}
|
package eventBRefinementSlicer.ui.editors;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.viewers.CheckStateChangedEvent;
import org.eclipse.jface.viewers.ICheckStateListener;
import org.eclipse.jface.viewers.ILabelProviderListener;
import org.eclipse.jface.viewers.ITableColorProvider;
import org.eclipse.jface.viewers.ITableFontProvider;
import org.eclipse.jface.viewers.ITableLabelProvider;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.ITreeViewerListener;
import org.eclipse.jface.viewers.TreeExpansionEvent;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeColumn;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorSite;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.dialogs.ContainerCheckedTreeViewer;
import org.eclipse.ui.part.EditorPart;
import org.eclipse.ui.part.FileEditorInput;
import org.eventb.core.IContextRoot;
import org.eventb.core.IEventBRoot;
import org.eventb.core.IMachineRoot;
import org.eventb.core.ast.FormulaFactory;
import org.rodinp.core.IInternalElement;
import org.rodinp.core.IRodinFile;
import org.rodinp.core.RodinCore;
import org.rodinp.core.RodinDBException;
import eventBRefinementSlicer.internal.datastructures.EventBAttribute;
import eventBRefinementSlicer.internal.datastructures.EventBAxiom;
import eventBRefinementSlicer.internal.datastructures.EventBCondition;
import eventBRefinementSlicer.internal.datastructures.EventBConstant;
import eventBRefinementSlicer.internal.datastructures.EventBContext;
import eventBRefinementSlicer.internal.datastructures.EventBDependencies;
import eventBRefinementSlicer.internal.datastructures.EventBElement;
import eventBRefinementSlicer.internal.datastructures.EventBMachine;
import eventBRefinementSlicer.internal.datastructures.EventBUnit;
import eventBRefinementSlicer.ui.jobs.EventBDependencyAnalysisJob;
/**
* The editor in charge of selecting which parts of an EventB machine to use in
* the slicing of refinements
*
* @author Aivar Kripsaar
*
*/
public class SelectionEditor extends EditorPart {
private String LABEL_CHECKBOX = "";
private String LABEL_LABEL = "Label";
private String LABEL_CONTENT = "Content";
private String LABEL_COMMENT = "Comment";
private IRodinFile rodinFile;
private IMachineRoot machineRoot;
private EventBMachine machine;
private EventBTreeSubcategory[] treeCategories;
private Map<EventBElement, Integer> selectionDependencies = new HashMap<>();
private ContainerCheckedTreeViewer treeViewer = null;
public SelectionEditor() {
// TODO Auto-generated constructor stub
}
@Override
public void doSave(IProgressMonitor monitor) {
// TODO Auto-generated method stub
}
@Override
public void doSaveAs() {
// TODO Auto-generated method stub
}
@Override
public void init(IEditorSite site, IEditorInput input) throws PartInitException {
setSite(site);
setInput(input);
rodinFile = getRodinFileFromInput(input);
IInternalElement internalElementRoot = rodinFile.getRoot();
assert (internalElementRoot instanceof IContextRoot) || (internalElementRoot instanceof IMachineRoot);
machineRoot = (IMachineRoot) internalElementRoot;
try {
machine = new EventBMachine(machineRoot);
} catch (RodinDBException e) {
e.printStackTrace();
}
EventBDependencyAnalysisJob.doEventBDependencyAnalysis(machine);
}
protected IRodinFile getRodinFileFromInput(IEditorInput input) {
FileEditorInput editorInput = (FileEditorInput) input;
IFile inputFile = editorInput.getFile();
IRodinFile rodinFile = RodinCore.valueOf(inputFile);
return rodinFile;
}
public IRodinFile getRodinFile() {
if (rodinFile == null) {
throw new IllegalStateException("Editor has not been initialized yet");
}
return rodinFile;
}
public FormulaFactory getFormulaFactory() {
return ((IEventBRoot) machineRoot).getFormulaFactory();
}
@Override
public boolean isDirty() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isSaveAsAllowed() {
// TODO Auto-generated method stub
return false;
}
private void createTree(Composite parent) {
Tree tree = new Tree(parent, SWT.MULTI | SWT.FULL_SELECTION | SWT.BORDER | SWT.CHECK | SWT.V_SCROLL | SWT.H_SCROLL);
tree.setLinesVisible(true);
tree.setHeaderVisible(true);
GridData gridData = new GridData(SWT.FILL, SWT.FILL, true, true);
tree.setLayoutData(gridData);
String[] titles = { LABEL_CHECKBOX, LABEL_LABEL, LABEL_CONTENT, LABEL_COMMENT };
TreeColumn column;
for (String title : titles) {
column = new TreeColumn(tree, SWT.NONE);
column.setText(title);
if (title.equals(LABEL_CHECKBOX)) {
// column.setResizable(false);
// column.setWidth(27);
}
}
createContainerCheckedTreeViewer(tree, titles);
for (TreeColumn oneColumn : tree.getColumns()) {
oneColumn.pack();
}
}
private void createContainerCheckedTreeViewer(Tree tree, String[] titles) {
ContainerCheckedTreeViewer treeViewer = new ContainerCheckedTreeViewer(tree);
treeViewer.setColumnProperties(titles);
treeViewer.setUseHashlookup(true);
treeViewer.setLabelProvider(new LabelProvider());
treeViewer.addTreeListener(new ITreeViewerListener() {
@Override
public void treeExpanded(TreeExpansionEvent event) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
for (TreeColumn column : tree.getColumns()) {
column.pack();
}
}
});
}
@Override
public void treeCollapsed(TreeExpansionEvent event) {
Display.getDefault().asyncExec(new Runnable() {
@Override
public void run() {
for (TreeColumn column : tree.getColumns()) {
column.pack();
}
}
});
}
});
treeViewer.addCheckStateListener(new ICheckStateListener() {
@Override
public void checkStateChanged(CheckStateChangedEvent event) {
treeViewer.update(event.getElement(), null);
if (event.getElement() instanceof EventBTreeSubcategory) {
return;
}
// TODO: Maybe put this in its own method
EventBDependencies dependencies = machine.getDependencies();
handleSelectionDependencies(dependencies, event);
}
private void handleSelectionDependencies(EventBDependencies dependencies, CheckStateChangedEvent event) {
assert event.getElement() instanceof EventBTreeElement;
EventBElement element = ((EventBTreeElement) event.getElement()).getOriginalElement();
Set<EventBElement> dependees = dependencies.getDependeesForElement(element);
Set<EventBElement> dependers = dependencies.getDependersForElement(element);
handleSingleDependencyDirection(dependees, event);
handleSingleDependencyDirection(dependers, event);
}
private void handleSingleDependencyDirection(Set<EventBElement> dependencySet, CheckStateChangedEvent event) {
for (EventBElement dependency : dependencySet) {
if (event.getChecked()) {
if (!selectionDependencies.containsKey(dependency)) {
selectionDependencies.put(dependency, 0);
}
selectionDependencies.put(dependency, selectionDependencies.get(dependency) + 1);
} else {
if (selectionDependencies.containsKey(dependency)) {
selectionDependencies.put(dependency, selectionDependencies.get(dependency) - 1);
if (selectionDependencies.get(dependency).intValue() <= 0) {
selectionDependencies.remove(dependency);
}
}
}
for (Object category : treeCategories) {
EventBTreeSubcategory treeCategory = (EventBTreeSubcategory) category;
EventBTreeElement treeElement = treeCategory.findTreeElement(dependency);
if (treeElement != null) {
treeViewer.update(treeElement, null);
break;
}
}
}
}
});
treeViewer.setContentProvider(new ITreeContentProvider() {
@Override
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
// TODO Auto-generated method stub
}
@Override
public void dispose() {
// TODO Auto-generated method stub
}
@Override
public boolean hasChildren(Object element) {
if (element instanceof EventBMachine) {
EventBMachine machine = (EventBMachine) element;
if (machine.getInvariants().isEmpty() && machine.getVariables().isEmpty()) {
return false;
}
return true;
}
if (element instanceof EventBTreeSubcategory) {
return ((EventBTreeSubcategory) element).getChildren().length > 0;
}
return false;
}
@Override
public Object getParent(Object element) {
if (element instanceof EventBTreeElement) {
return ((EventBTreeElement) element).getParent();
}
return null;
}
@Override
public Object[] getElements(Object inputElement) {
EventBMachine machine = (EventBMachine) inputElement;
EventBTreeSubcategory invariants = new EventBTreeSubcategory("Invariants", machine, machine.getInvariants());
EventBTreeSubcategory variables = new EventBTreeSubcategory("Variables", machine, machine.getVariables());
List<EventBAxiom> axes = new ArrayList<>();
List<EventBConstant> consts = new ArrayList<>();
for (EventBContext context : machine.getSeenContexts()) {
axes.addAll(context.getAxioms());
consts.addAll(context.getConstants());
}
EventBTreeSubcategory axioms = new EventBTreeSubcategory("Axioms", machine, axes);
EventBTreeSubcategory constants = new EventBTreeSubcategory("Constants", machine, consts);
EventBTreeSubcategory[] treeChildren = { invariants, axioms, variables, constants };
treeCategories = treeChildren;
return treeChildren;
}
@Override
public Object[] getChildren(Object parentElement) {
if ((parentElement instanceof EventBMachine)) {
return getElements(parentElement);
}
if (parentElement instanceof EventBTreeSubcategory) {
return ((EventBTreeSubcategory) parentElement).children;
}
return null;
}
});
treeViewer.setInput(machine);
this.treeViewer = treeViewer;
}
class EventBTreeSubcategory {
final String label;
final EventBUnit parentUnit;
final EventBTreeElement parentElement;
final EventBTreeElement[] children;
public EventBTreeSubcategory(String label, EventBUnit parent, List<? extends EventBElement> children) {
this.label = label;
this.parentUnit = parent;
this.parentElement = null;
List<EventBTreeElement> treeChildren = new ArrayList<>();
for (EventBElement originalChild : children) {
EventBTreeElement treeChild = new EventBTreeElement(this, originalChild);
treeChildren.add(treeChild);
}
this.children = treeChildren.toArray(new EventBTreeElement[treeChildren.size()]);
}
public EventBTreeSubcategory(String label, EventBTreeElement parent, List<? extends EventBElement> children) {
this.label = label;
this.parentElement = parent;
this.parentUnit = null;
List<EventBTreeElement> treeChildren = new ArrayList<>();
for (EventBElement originalChild : children) {
EventBTreeElement treeChild = new EventBTreeElement(this, originalChild);
treeChildren.add(treeChild);
}
this.children = treeChildren.toArray(new EventBTreeElement[treeChildren.size()]);
}
public String getLabel() {
return label;
}
public EventBUnit getParentUnit() {
return parentUnit;
}
public EventBTreeElement getParentElement() {
return parentElement;
}
public EventBTreeElement[] getChildren() {
return children;
}
public EventBTreeElement findTreeElement(EventBElement originalElement) {
for (EventBTreeElement child : children) {
if (child.getOriginalElement().equals(originalElement)) {
return child;
}
}
return null;
}
}
class EventBTreeElement {
final EventBTreeSubcategory parent;
final EventBElement originalElement;
public EventBTreeElement(EventBTreeSubcategory parent, EventBElement originalElement) {
this.parent = parent;
this.originalElement = originalElement;
}
public EventBTreeSubcategory getParent() {
return parent;
}
public EventBElement getOriginalElement() {
return originalElement;
}
}
class LabelProvider implements ITableLabelProvider, ITableColorProvider, ITableFontProvider {
@Override
public void addListener(ILabelProviderListener listener) {
// Intentionally left empty
}
@Override
public void dispose() {
// Intentionally left empty
}
@Override
public boolean isLabelProperty(Object element, String property) {
return false;
}
@Override
public void removeListener(ILabelProviderListener listener) {
// Intentionally left empty
}
@Override
public Font getFont(Object element, int columnIndex) {
return null;
}
@Override
public Color getForeground(Object element, int columnIndex) {
if (element instanceof EventBTreeSubcategory) {
// TODO: Add color coding for categories
return null;
}
if (element instanceof EventBTreeElement) {
switch (columnIndex) {
case 0: // Selection Column
break;
case 1:
return Display.getDefault().getSystemColor(SWT.COLOR_DARK_CYAN);
case 2:
return Display.getDefault().getSystemColor(SWT.COLOR_DARK_MAGENTA);
case 3:
return Display.getDefault().getSystemColor(SWT.COLOR_DARK_GREEN);
default:
break;
}
}
return null;
}
@Override
public Color getBackground(Object element, int columnIndex) {
if (!(element instanceof EventBTreeElement)) {
// TODO: Add color coding for categories
return null;
}
if (treeViewer.getChecked(element)) {
return Display.getDefault().getSystemColor(SWT.COLOR_LIST_SELECTION);
}
// element = ((EventBTreeElement) element).getOriginalElement();
// EventBElement eventBElement = (EventBElement) element;
// if (eventBElement.isSelected()){
// return
// Display.getDefault().getSystemColor(SWT.COLOR_LIST_SELECTION);
if (selectionDependencies.containsKey(((EventBTreeElement) element).getOriginalElement())) {
return Display.getDefault().getSystemColor(SWT.COLOR_RED);
}
return null;
}
@Override
public Image getColumnImage(Object element, int columnIndex) {
// TODO Auto-generated method stub
return null;
}
@Override
public String getColumnText(Object element, int columnIndex) {
if (element instanceof EventBTreeSubcategory) {
if (columnIndex == 1) {
return ((EventBTreeSubcategory) element).getLabel();
}
}
if (!(element instanceof EventBTreeElement)) {
return null;
}
element = ((EventBTreeElement) element).getOriginalElement();
if (!(element instanceof EventBCondition || element instanceof EventBAttribute)) {
return null;
}
EventBElement eventBElement = (EventBElement) element;
switch (columnIndex) {
case 0: // Selection Column
return null;
case 1:
return eventBElement.getLabel();
case 2:
if (eventBElement instanceof EventBCondition) {
return ((EventBCondition) eventBElement).getPredicate();
}
return null;
case 3:
return eventBElement.getComment();
default:
return null;
}
}
}
@Override
public void createPartControl(Composite parent) {
GridLayout layout = new GridLayout();
layout.numColumns = 1;
parent.setLayout(layout);
createTree(parent);
// new Label(parent, SWT.NONE).setText("Invariants");
// createInvariantAndAxiomTable(parent);
// new Label(parent, SWT.NONE).setText("Variables");
// createVariableAndConstantTable(parent);
}
@Override
public void setFocus() {
// TODO Auto-generated method stub
}
}
|
package hex;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Frame;
import water.util.TwoDimTable;
import java.lang.reflect.Method;
import java.util.*;
/** Container to hold the metric for a model as scored on a specific frame.
*
* The MetricBuilder class is used in a hot inner-loop of a Big Data pass, and
* when given a class-distribution, can be used to compute CM's, and AUC's "on
* the fly" during ModelBuilding - or after-the-fact with a Model and a new
* Frame to be scored.
*/
public class ModelMetrics extends Keyed<ModelMetrics> {
public String _description;
final Key _modelKey;
final Key _frameKey;
final ModelCategory _model_category;
final long _model_checksum;
final long _frame_checksum;
public final long _scoring_time;
// Cached fields - cached them when needed
private transient Model _model;
private transient Frame _frame;
public final double _MSE; // Mean Squared Error (Every model is assumed to have this, otherwise leave at NaN)
public ModelMetrics(Model model, Frame frame, double MSE, String desc) {
super(buildKey(model, frame));
_description = desc;
// Do not cache fields now
_modelKey = model._key;
_frameKey = frame._key;
_model_category = model._output.getModelCategory();
_model_checksum = model.checksum();
_frame_checksum = frame.checksum();
_MSE = MSE;
_scoring_time = System.currentTimeMillis();
}
public long residual_degrees_of_freedom(){throw new UnsupportedOperationException("residual degrees of freedom is not supported for this metric class");}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("Model Metrics Type: " + this.getClass().getSimpleName().substring(12) + "\n");
sb.append(" Description: " + (_description == null ? "N/A" : _description) + "\n");
sb.append(" model id: " + _modelKey + "\n");
sb.append(" frame id: " + _frameKey + "\n");
sb.append(" MSE: " + (float)_MSE + "\n");
return sb.toString();
}
public Model model() { return _model==null ? (_model=DKV.getGet(_modelKey)) : _model; }
public Frame frame() { return _frame==null ? (_frame=DKV.getGet(_frameKey)) : _frame; }
public double mse() { return _MSE; }
public ConfusionMatrix cm() { return null; }
public float[] hr() { return null; }
public AUC2 auc_obj() { return null; }
static public double getMetricFromModel(Key<Model> key, String criterion) {
Model model = DKV.getGet(key);
if (null == model) throw new H2OIllegalArgumentException("Cannot find model " + key);
if (null == criterion || criterion.equals("")) throw new H2OIllegalArgumentException("Need a valid criterion, but got '" + criterion + "'.");
ModelMetrics m =
model._output._cross_validation_metrics != null ?
model._output._cross_validation_metrics :
model._output._validation_metrics != null ?
model._output._validation_metrics :
model._output._training_metrics;
Method method = null;
ConfusionMatrix cm = m.cm();
try {
method = m.getClass().getMethod(criterion);
}
catch (Exception e) {
// fall through
}
if (null == method && null != cm) {
try {
method = cm.getClass().getMethod(criterion);
}
catch (Exception e) {
// fall through
}
}
if (null == method)
throw new H2OIllegalArgumentException("Failed to find ModelMetrics for criterion: " + criterion);
double c;
try {
c = (double) method.invoke(m);
} catch(Exception fallthru) {
try {
c = (double)method.invoke(cm);
} catch (Exception e) {
throw new H2OIllegalArgumentException(
"Failed to get metric: " + criterion + " from ModelMetrics object: " + m,
"Failed to get metric: " + criterion + " from ModelMetrics object: " + m + ", criterion: " + method + ", exception: " + e
);
}
}
return c;
}
private static class MetricsComparator implements Comparator<Key<Model>> {
String _sort_by = null;
boolean decreasing = false;
public MetricsComparator(String sort_by, boolean decreasing) {
this._sort_by = sort_by;
this.decreasing = decreasing;
}
public int compare(Key<Model> key1, Key<Model> key2) {
double c1 = getMetricFromModel(key1, _sort_by);
double c2 = getMetricFromModel(key2, _sort_by);
return decreasing ? Double.compare(c2, c1) : Double.compare(c2, c1);
}
}
public static Set<String> getAllowedMetrics(Key<Model> key) {
Set<String> res = new HashSet<>();
Model model = DKV.getGet(key);
if (null == model) throw new H2OIllegalArgumentException("Cannot find model " + key);
ModelMetrics m =
model._output._cross_validation_metrics != null ?
model._output._cross_validation_metrics :
model._output._validation_metrics != null ?
model._output._validation_metrics :
model._output._training_metrics;
ConfusionMatrix cm = m.cm();
if (m!=null) {
for (Method meth : m.getClass().getMethods()) {
if (meth.getName().equals("makeSchema")) continue;
try {
double c = (double) meth.invoke(m);
res.add(meth.getName());
} catch (Exception e) {
// fall through
}
}
}
if (cm!=null) {
for (Method meth : cm.getClass().getMethods()) {
try {
double c = (double) meth.invoke(cm);
res.add(meth.getName());
} catch (Exception e) {
// fall through
}
}
}
return res;
}
/**
* Return a new list of models sorted by the named criterion, such as "auc", mse", "hr", "err", "errCount",
* "accuracy", "specificity", "recall", "precision", "mcc", "max_per_class_error", "F1", "F2", "F0point5". . .
* @param sort_by criterion by which we should sort
* @param decreasing sort by decreasing metrics or not
* @param modelKeys keys of models to sortm
* @return keys of the models, sorted by the criterion
*/
public static List<Key<Model>> sortModelsByMetric(String sort_by, boolean decreasing, List<Key<Model>>modelKeys) {
List<Key<Model>> sorted = new ArrayList<>();
sorted.addAll(modelKeys);
Comparator<Key<Model>> c = new MetricsComparator(sort_by, decreasing);
Collections.sort(sorted, c);
return sorted;
}
public static TwoDimTable calcVarImp(VarImp vi) {
if (vi == null) return null;
double[] dbl_rel_imp = new double[vi._varimp.length];
for (int i=0; i<dbl_rel_imp.length; ++i) {
dbl_rel_imp[i] = vi._varimp[i];
}
return calcVarImp(dbl_rel_imp, vi._names);
}
public static TwoDimTable calcVarImp(final float[] rel_imp, String[] coef_names) {
double[] dbl_rel_imp = new double[rel_imp.length];
for (int i=0; i<dbl_rel_imp.length; ++i) {
dbl_rel_imp[i] = rel_imp[i];
}
return calcVarImp(dbl_rel_imp, coef_names);
}
public static TwoDimTable calcVarImp(final double[] rel_imp, String[] coef_names) {
return calcVarImp(rel_imp, coef_names, "Variable Importances", new String[]{"Relative Importance", "Scaled Importance", "Percentage"});
}
public static TwoDimTable calcVarImp(final double[] rel_imp, String[] coef_names, String table_header, String[] col_headers) {
if(rel_imp == null) return null;
if(coef_names == null) {
coef_names = new String[rel_imp.length];
for(int i = 0; i < coef_names.length; i++)
coef_names[i] = "C" + String.valueOf(i+1);
}
// Sort in descending order by relative importance
Integer[] sorted_idx = new Integer[rel_imp.length];
for(int i = 0; i < sorted_idx.length; i++) sorted_idx[i] = i;
Arrays.sort(sorted_idx, new Comparator<Integer>() {
public int compare(Integer idx1, Integer idx2) {
return Double.compare(-rel_imp[idx1], -rel_imp[idx2]);
}
});
double total = 0;
double max = rel_imp[sorted_idx[0]];
String[] sorted_names = new String[rel_imp.length];
double[][] sorted_imp = new double[rel_imp.length][3];
// First pass to sum up relative importance measures
int j = 0;
for(int i : sorted_idx) {
total += rel_imp[i];
sorted_names[j] = coef_names[i];
sorted_imp[j][0] = rel_imp[i]; // Relative importance
sorted_imp[j++][1] = rel_imp[i] / max; // Scaled importance
}
// Second pass to calculate percentages
j = 0;
for(int i : sorted_idx)
sorted_imp[j++][2] = rel_imp[i] / total; // Percentage
String [] col_types = new String[3];
String [] col_formats = new String[3];
Arrays.fill(col_types, "double");
Arrays.fill(col_formats, "%5f");
return new TwoDimTable(table_header, null, sorted_names, col_headers, col_types, col_formats, "Variable",
new String[rel_imp.length][], sorted_imp);
}
private static Key<ModelMetrics> buildKey(Key model_key, long model_checksum, Key frame_key, long frame_checksum) {
return Key.make("modelmetrics_" + model_key + "@" + model_checksum + "_on_" + frame_key + "@" + frame_checksum);
}
public static Key<ModelMetrics> buildKey(Model model, Frame frame) {
return frame==null ? null : buildKey(model._key, model.checksum(), frame._key, frame.checksum());
}
public boolean isForModel(Model m) { return _model_checksum == m.checksum(); }
public boolean isForFrame(Frame f) { return _frame_checksum == f.checksum(); }
public static ModelMetrics getFromDKV(Model model, Frame frame) {
Value v = DKV.get(buildKey(model, frame));
return null == v ? null : (ModelMetrics)v.get();
}
@Override protected long checksum_impl() { return _frame_checksum * 13 + _model_checksum * 17; }
/** Class used to compute AUCs, CMs & HRs "on the fly" during other passes
* over Big Data. This class is intended to be embedded in other MRTask
* objects. The {@code perRow} method is called once-per-scored-row, and
* the {@code reduce} method called once per MRTask.reduce, and the {@code
* <init>} called once per MRTask.map.
*/
public static abstract class MetricBuilder<T extends MetricBuilder<T>> extends Iced {
transient public double[] _work;
public double _sumsqe; // Sum-squared-error
public long _count;
public double _wcount;
public double _wY; // (Weighted) sum of the response
public double _wYY; // (Weighted) sum of the squared response
public double weightedSigma() {
// double sampleCorrection = _count/(_count-1); //sample variance -> depends on the number of ACTUAL ROWS (not the weighted count)
double sampleCorrection = 1; //this will make the result (and R^2) invariant to globally scaling the weights
return _count <= 1 ? 0 : Math.sqrt(sampleCorrection*(_wYY/_wcount - (_wY*_wY)/(_wcount*_wcount)));
}
abstract public double[] perRow(double ds[], float yact[], Model m);
public double[] perRow(double ds[], float yact[],double weight, double offset, Model m) {
assert(weight==1 && offset == 0);
return perRow(ds, yact, m);
}
public void reduce( T mb ) {
_sumsqe += mb._sumsqe;
_count += mb._count;
_wcount += mb._wcount;
_wY += mb._wY;
_wYY += mb._wYY;
}
public void postGlobal() {}
/**
* Having computed a MetricBuilder, this method fills in a ModelMetrics
* @param m Model
* @param f Scored Frame
* @param adaptedFrame Adapted Frame
*@param preds Predictions of m on f (optional) @return Filled Model Metrics object
*/
public abstract ModelMetrics makeModelMetrics(Model m, Frame f, Frame adaptedFrame, Frame preds);
}
}
|
package com.cgi.eoss.ftep.worker.worker;
import com.cgi.eoss.ftep.clouds.service.Node;
import com.cgi.eoss.ftep.clouds.service.NodeProvisioningException;
import com.cgi.eoss.ftep.queues.service.FtepQueueService;
import com.cgi.eoss.ftep.worker.metrics.QueueAverage;
import com.cgi.eoss.ftep.worker.metrics.QueueMetricsService;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.time.Instant;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* <p>Service for autoscaling the number of worker nodes based on queue length</p>
*/
@Log4j2
@Service
@ConditionalOnProperty(name = "ftep.worker.autoscaler.enabled", havingValue = "true", matchIfMissing = true)
public class FtepWorkerAutoscaler {
private final JobEnvironmentService jobEnvironmentService;
private final FtepWorkerNodeManager nodeManager;
private final FtepQueueService queueService;
private final QueueMetricsService queueMetricsService;
// Scaling Configuration
private static final long QUEUE_CHECK_INTERVAL_MS = 5L * 1000L;
private static final long AUTOSCALER_INTERVAL_MS = 1L * 60L * 1000L;
private static final long STATISTICS_WINDOW_MS = 2L * 60L * 1000L;
private final int minWorkerNodes;
private final int maxWorkerNodes;
private final int maxJobsPerNode;
private final long minSecondsBetweenScalingActions;
private final long minimumHourFractionUptimeSeconds;
private final String jobMessageSelector;
private final Lock scalingLock = new ReentrantLock();
private Instant lastAutoscalingActionTime = Instant.MIN;
@Autowired
public FtepWorkerAutoscaler(FtepWorkerNodeManager nodeManager, FtepQueueService queueService, QueueMetricsService queueMetricsService,
JobEnvironmentService jobEnvironmentService,
@Qualifier("minWorkerNodes") int minWorkerNodes,
@Qualifier("maxWorkerNodes") int maxWorkerNodes,
@Qualifier("maxJobsPerNode") int maxJobsPerNode,
@Qualifier("minSecondsBetweenScalingActions") long minSecondsBetweenScalingActions,
@Qualifier("minimumHourFractionUptimeSeconds") long minimumHourFractionUptimeSeconds,
@Qualifier("workerId") String workerId,
@Qualifier("restrictedWorker") boolean restrictedWorker
) {
this.nodeManager = nodeManager;
this.queueService = queueService;
this.queueMetricsService = queueMetricsService;
this.jobEnvironmentService = jobEnvironmentService;
this.minWorkerNodes = minWorkerNodes;
this.maxWorkerNodes = maxWorkerNodes;
this.maxJobsPerNode = maxJobsPerNode;
this.minSecondsBetweenScalingActions = minSecondsBetweenScalingActions;
this.minimumHourFractionUptimeSeconds = minimumHourFractionUptimeSeconds;
this.jobMessageSelector = restrictedWorker ? String.format("workerId = '%s'", workerId) : "";
}
@Scheduled(fixedRate = QUEUE_CHECK_INTERVAL_MS, initialDelay = 10000L)
public void getCurrentQueueLength() {
long queueLength = queueService.getQueueLength(FtepQueueService.jobQueueName, jobMessageSelector);
queueMetricsService.updateMetric(queueLength, STATISTICS_WINDOW_MS / 1000L);
}
@Scheduled(fixedRate = AUTOSCALER_INTERVAL_MS, initialDelay = 10000L)
public void decide() {
if (scalingLock.tryLock()) {
try {
if (Instant.now().minusSeconds(minSecondsBetweenScalingActions).isBefore(lastAutoscalingActionTime)) {
return;
}
Set<Node> currentNodes = nodeManager.getCurrentNodes(FtepWorkerNodeManager.POOLED_WORKER_TAG);
if (currentNodes.size() < minWorkerNodes) {
LOG.debug("Detected fewer nodes than the minimum ({}/{}), scaling back up", currentNodes.size(), minWorkerNodes);
scaleTo(minWorkerNodes);
return;
}
QueueAverage queueAverage = queueMetricsService.getMetrics(STATISTICS_WINDOW_MS / 1000L);
double coverageFactor = 1.0 * QUEUE_CHECK_INTERVAL_MS / STATISTICS_WINDOW_MS;
double coverage = queueAverage.getCount() * coverageFactor;
if (coverage > 0.75) {
int averageLengthRounded = (int) Math.ceil(queueAverage.getAverageLength());
int scaleTarget = (int) Math.round(1.0 * averageLengthRounded / maxJobsPerNode);
LOG.debug("Avg queue length over the period is {}; scaling target is {}", queueAverage.getAverageLength(), scaleTarget);
scaleTo(Math.max(scaleTarget, minWorkerNodes));
} else {
LOG.debug("Metrics coverage of {} not enough to take scaling decision", coverage);
}
} finally {
scalingLock.unlock();
}
}
}
public void scaleTo(int target) {
LOG.info("Scale target: {} nodes", target);
int totalNodes = nodeManager.getCurrentNodes(FtepWorkerNodeManager.POOLED_WORKER_TAG).size();
int freeNodes = nodeManager.getNumberOfFreeNodes(FtepWorkerNodeManager.POOLED_WORKER_TAG);
LOG.debug("Current node balance: {} total nodes, {} free nodes", totalNodes, freeNodes);
if (target > freeNodes) {
Instant previousAutoScalingActionTime = lastAutoscalingActionTime;
try {
scaleUp(target - freeNodes);
lastAutoscalingActionTime = Instant.now();
} catch (NodeProvisioningException e) {
LOG.debug("Autoscaling failed because of node provisioning exception", e);
lastAutoscalingActionTime = previousAutoScalingActionTime;
}
} else if (target < freeNodes) {
scaleDown(freeNodes - target);
lastAutoscalingActionTime = Instant.now();
} else {
LOG.debug("No action needed as current free node count is equal to the target: {}", target);
}
}
public void scaleUp(int numToScaleUp) throws NodeProvisioningException {
LOG.info("Evaluating scale up of additional {} nodes", numToScaleUp);
Set<Node> currentNodes = nodeManager.getCurrentNodes(FtepWorkerNodeManager.POOLED_WORKER_TAG);
int scaleUpTarget = Math.min(currentNodes.size() + numToScaleUp, maxWorkerNodes);
int adjustedScaleUpTarget = scaleUpTarget - currentNodes.size();
LOG.info("Scaling up additional {} nodes. Max worker nodes are {}", adjustedScaleUpTarget, maxWorkerNodes);
nodeManager.provisionNodes(adjustedScaleUpTarget, FtepWorkerNodeManager.POOLED_WORKER_TAG, jobEnvironmentService.getBaseDir());
}
public void scaleDown(int numToScaleDown) {
LOG.info("Evaluating scale down of {} nodes", numToScaleDown);
Set<Node> currentNodes = nodeManager.getCurrentNodes(FtepWorkerNodeManager.POOLED_WORKER_TAG);
int scaleDownTarget = Math.max(currentNodes.size() - numToScaleDown, minWorkerNodes);
int adjustedScaleDownTarget = currentNodes.size() - scaleDownTarget;
LOG.info("Scaling down {} nodes. Min worker nodes are {}", adjustedScaleDownTarget, minWorkerNodes);
nodeManager.destroyNodes(adjustedScaleDownTarget, FtepWorkerNodeManager.POOLED_WORKER_TAG, jobEnvironmentService.getBaseDir(), minimumHourFractionUptimeSeconds);
}
}
|
package com.trail2peak.pdi.fastjsoninput;
import junit.framework.TestCase;
import org.pentaho.di.TestFailedException;
import org.pentaho.di.TestUtilities;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.trans.RowProducer;
import org.pentaho.di.trans.RowStepCollector;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class FastJsonInputTests extends TestCase {
private Properties myProperties = new Properties();
public FastJsonInputTests() {
InputStream propertiesInputStream = getClass().getResourceAsStream("test.properties");
try {
this.myProperties.load(propertiesInputStream);
propertiesInputStream.close();
} catch (Exception e) {
throw new ExceptionInInitializerError("There was a problem initializing the properties file: " + e.toString());
}
}
private StepMeta createFastJsonInputStep(String name, PluginRegistry registry, boolean ignoreMissingPath,
boolean defaultPathLeafToNull) {
FastJsonInputMeta fjim = new FastJsonInputMeta();
fjim.setInFields(true);
fjim.setFieldValue("json_data");
fjim.setRemoveSourceField(true);
fjim.setIgnoreMissingPath(ignoreMissingPath);
fjim.setDefaultPathLeafToNull(defaultPathLeafToNull);
FastJsonInputField if1 = new FastJsonInputField("id");
if1.setPath("$.[*].id");
if1.setType(ValueMeta.TYPE_INTEGER);
if1.setTrimType(FastJsonInputField.TYPE_TRIM_NONE);
FastJsonInputField if2 = new FastJsonInputField("first_name");
if2.setPath("$.[*].first_name");
if2.setType(ValueMeta.TYPE_STRING);
if2.setTrimType(FastJsonInputField.TYPE_TRIM_NONE);
FastJsonInputField if3 = new FastJsonInputField("last_name");
if3.setPath("$.[*].last_name");
if3.setType(ValueMeta.TYPE_STRING);
if3.setTrimType(FastJsonInputField.TYPE_TRIM_NONE);
FastJsonInputField if4 = new FastJsonInputField("city");
if4.setPath("$.[*].city");
if4.setType(ValueMeta.TYPE_STRING);
if4.setTrimType(FastJsonInputField.TYPE_TRIM_NONE);
FastJsonInputField[] inputFields = new FastJsonInputField[4];
inputFields[0] = if1;
inputFields[1] = if2;
inputFields[2] = if3;
inputFields[3] = if4;
fjim.setInputFields(inputFields);
String fjiPid = registry.getPluginId(StepPluginType.class, fjim);
return new StepMeta(fjiPid, name, fjim);
}
/**
* Creates a row meta interface for the fields that are defined
* @param valuesMeta defined ValueMetaInterface
* @return RowMetaInterface
*/
private RowMetaInterface createRowMetaInterface(ValueMetaInterface[] valuesMeta) {
RowMetaInterface rm = new RowMeta();
for (ValueMetaInterface aValuesMeta : valuesMeta) {
rm.addValueMeta(aValuesMeta);
}
return rm;
}
/**
* Create input data for test case 1
* @return list of metadata/data couples
*/
private List<RowMetaAndData> createInputData(String data) {
List<RowMetaAndData> list = new ArrayList<RowMetaAndData>();
ValueMetaInterface[] valuesMeta = {new ValueMeta("json_data", ValueMeta.TYPE_STRING)};
RowMetaInterface rm = createRowMetaInterface(valuesMeta);
Object[] r1 = new Object[] {data};
list.add(new RowMetaAndData(rm , r1));
return list;
}
/**
* Create result data for test case 1. Each list object should mirror the output of the parsed JSON
*
* @return list of metadata/data couples of how the result should look.
*/
private List<RowMetaAndData> createExpectedResults() {
List<RowMetaAndData> list = new ArrayList<RowMetaAndData>();
ValueMetaInterface[] valuesMeta =
{ new ValueMeta("id", ValueMeta.TYPE_INTEGER), new ValueMeta("first_name", ValueMeta.TYPE_STRING),
new ValueMeta("last_name", ValueMeta.TYPE_STRING), new ValueMeta("city", ValueMeta.TYPE_STRING)};
RowMetaInterface rm = createRowMetaInterface(valuesMeta);
Object[] r1 = new Object[] { "123", "Jesse", "Adametz", "Santa Barbara" };
Object[] r2 = new Object[] { "456", "James", "Ebentier", "Santa Barbara" };
list.add(new RowMetaAndData(rm, r1));
list.add(new RowMetaAndData(rm, r2));
return list;
}
/**
* Runs the transformation with the below input parameters
* @param inputData JSON string
* @param ignoreMissingPath boolean
* @param defaultPathLeafToNull boolean
* @return Transformation Results
*/
private List<RowMetaAndData> test(String inputData, boolean ignoreMissingPath, boolean defaultPathLeafToNull)
throws Exception {
KettleEnvironment.init();
// Create a new transformation
TransMeta transMeta = new TransMeta();
transMeta.setName("testFastJsonInput");
PluginRegistry registry = PluginRegistry.getInstance();
// Create Injector
String injectorStepName = "injector step";
StepMeta injectorStep = TestUtilities.createInjectorStep(injectorStepName, registry);
transMeta.addStep(injectorStep);
// Create a FastJsonInput step
String fastJsonInputName = "FastJsonInput step";
StepMeta fastJsonInputStep = createFastJsonInputStep(fastJsonInputName, registry, ignoreMissingPath,
defaultPathLeafToNull);
transMeta.addStep(fastJsonInputStep);
// TransHopMeta between injector step and FastJsonInput
TransHopMeta injector_hop_fjis = new TransHopMeta(injectorStep, fastJsonInputStep);
transMeta.addTransHop(injector_hop_fjis);
// Create a dummy step
String dummyStepName = "dummy step";
StepMeta dummyStep = TestUtilities.createDummyStep(dummyStepName, registry);
transMeta.addStep(dummyStep);
// TransHopMeta between FastJsonInput and Dummy
TransHopMeta fjis_hop_dummy = new TransHopMeta(fastJsonInputStep, dummyStep);
transMeta.addTransHop(fjis_hop_dummy);
// Execute the transformation
Trans trans = new Trans(transMeta);
trans.prepareExecution(null);
// Create a row collector and add it to the dummy step interface
StepInterface si = trans.getStepInterface(dummyStepName, 0);
RowStepCollector dummyRowCollector = new RowStepCollector();
si.addRowListener(dummyRowCollector);
// Create a row producer
RowProducer rowProducer = trans.addRowProducer(injectorStepName, 0);
trans.startThreads();
// create the rows
List<RowMetaAndData> inputList = createInputData(inputData);
for (RowMetaAndData rowMetaAndData : inputList) {
rowProducer.putRow(rowMetaAndData.getRowMeta(), rowMetaAndData.getData());
}
rowProducer.finished();
trans.waitUntilFinished();
return dummyRowCollector.getRowsWritten();
}
public void testWellStructuredJson() throws Exception {
List<RowMetaAndData> transformationResults = test(myProperties.getProperty("WELL_STRUCTURED_JSON"), false, false);
List<RowMetaAndData> expectedResults = createExpectedResults();
try {
TestUtilities.checkRows(transformationResults, expectedResults, 0);
} catch(TestFailedException tfe) {
fail(tfe.getMessage());
}
}
public void testNoIdJson() throws Exception {
List<RowMetaAndData> transformationResults = test(myProperties.getProperty("NO_ID_JSON"), true, false);
List<RowMetaAndData> expectedResults = createExpectedResults();
try {
TestUtilities.checkRows(transformationResults, expectedResults, 0);
} catch(TestFailedException tfe) {
fail(tfe.getMessage());
}
}
public void testMissingIdJson() throws Exception {
List<RowMetaAndData> transformationResults = test(myProperties.getProperty("MISSING_ID_JSON"), false, true);
List<RowMetaAndData> expectedResults = createExpectedResults();
try {
TestUtilities.checkRows(transformationResults, expectedResults, 0);
} catch(TestFailedException tfe) {
fail(tfe.getMessage());
}
}
public void testNoIdAndMissingCityJson() throws Exception {
List<RowMetaAndData> transformationResults = test(myProperties.getProperty("NO_ID_AND_MISSING_CITY_JSON"), true, true);
List<RowMetaAndData> expectedResults = createExpectedResults();
try {
TestUtilities.checkRows(transformationResults, expectedResults, 0);
} catch(TestFailedException tfe) {
fail(tfe.getMessage());
}
}
}
|
package com.bitmovin.bitcodin.api.test;
import static org.junit.Assert.*;
import java.io.FileNotFoundException;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import com.bitmovin.bitcodin.api.BitcodinApi;
import com.bitmovin.bitcodin.api.exception.BitcodinApiException;
import com.bitmovin.bitcodin.api.input.HTTPInputConfig;
import com.bitmovin.bitcodin.api.input.Input;
import com.bitmovin.bitcodin.api.input.InputList;
import com.bitmovin.bitcodin.api.input.InputType;
import com.bitmovin.bitcodin.api.job.Job;
import com.bitmovin.bitcodin.api.job.JobConfig;
import com.bitmovin.bitcodin.api.job.JobList;
import com.bitmovin.bitcodin.api.job.JobStatus;
import com.bitmovin.bitcodin.api.job.ManifestType;
import com.bitmovin.bitcodin.api.media.EncodingProfile;
import com.bitmovin.bitcodin.api.media.EncodingProfileConfig;
import com.bitmovin.bitcodin.api.media.EncodingProfileList;
import com.bitmovin.bitcodin.api.media.Preset;
import com.bitmovin.bitcodin.api.media.Profile;
import com.bitmovin.bitcodin.api.media.VideoStreamConfig;
import com.bitmovin.bitcodin.api.output.Output;
import com.bitmovin.bitcodin.api.output.OutputList;
import com.bitmovin.bitcodin.api.output.OutputType;
import com.bitmovin.bitcodin.api.statistics.Statistic;
import com.bitmovin.bitcodin.api.transfer.TransferConfig;
public class BitcodinApiTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
private Settings settings;
public BitcodinApiTest() throws FileNotFoundException {
this.settings = Settings.getInstance();
}
@Test
public void testApiInvalidKey() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi("THIS_IS_AN_INVALID_KEY");
thrown.expect(BitcodinApiException.class);
bitApi.listInputs(0);
}
@Test
public void createInvalidInput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
HTTPInputConfig httpInputConfig = new HTTPInputConfig();
httpInputConfig.url = "http://this/is/an/invalid/url.mkv";
thrown.expect(BitcodinApiException.class);
Input input = bitApi.createInput(httpInputConfig);
}
@Test
public void testApiKeyGetter() {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
assertEquals(this.settings.apikey, bitApi.getKey());
}
public Input createSintelInput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
HTTPInputConfig httpInputConfig = new HTTPInputConfig();
httpInputConfig.url = "http://ftp.nluug.nl/pub/graphics/blender/demo/movies/Sintel.2010.720p.mkv";
return bitApi.createInput(httpInputConfig);
}
@Test
public void createInput() throws BitcodinApiException {
Input input = this.createSintelInput();
assertEquals(input.filename, "Sintel.2010.720p.mkv");
assertEquals(input.mediaConfigurations.size(), 2);
assertEquals(input.mediaConfigurations.get(0).width, 1280);
assertEquals(input.mediaConfigurations.get(0).height, 544);
assertEquals(input.inputType, InputType.URL);
}
@Test
public void listInputs() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Input input = this.createSintelInput();
InputList inputList = bitApi.listInputs(0);
Input lastRecentInput = inputList.inputs.get(0);
assertEquals(lastRecentInput.filename, input.filename);
assertEquals(lastRecentInput.inputId, input.inputId);
}
@Test
public void getInput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Input input = this.createSintelInput();
Input sameInput = bitApi.getInput(input.inputId);
assertEquals(input.filename, sameInput.filename);
assertEquals(input.inputId, sameInput.inputId);
}
@Test
public void deleteInput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Input input = this.createSintelInput();
bitApi.deleteInput(input.inputId);
/* TODO: FIX API input delete is not working */
//assertNull(bitApi.getInput(input.inputId));
}
@Test
public void createS3Output() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output output = bitApi.createS3Output(this.settings.s3OutputEUWest);
assertEquals(output.type, OutputType.S3);
}
@Test
public void createGCSOutput() throws BitcodinApiException {
/* TODO Create public GCS bucket*/
}
@Test
public void createFTPOutput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output output = bitApi.createFTPOutput(this.settings.ftpOutput);
assertEquals(output.type, OutputType.FTP);
}
@Test
public void listOutputs() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output output = bitApi.createFTPOutput(this.settings.ftpOutput);
OutputList outputList = bitApi.listOutputs(0);
Output lastRecentOutput = outputList.outputs.get(0);
assertEquals(lastRecentOutput.outputId, output.outputId);
}
@Test
public void getOutput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output output = bitApi.createFTPOutput(this.settings.ftpOutput);
Output sameOutput = bitApi.getOutput(output.outputId);
assertEquals(output.name, sameOutput.name);
assertEquals(output.outputId, sameOutput.outputId);
}
@Test
public void deleteOutput() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output output = bitApi.createFTPOutput(this.settings.ftpOutput);
bitApi.deleteOutput(output.outputId);
thrown.expect(BitcodinApiException.class);
thrown.expectMessage("Resource not available");
bitApi.getOutput(output.outputId);
}
public EncodingProfileConfig createEncodingProfileConfig() {
VideoStreamConfig videoConfig = new VideoStreamConfig();
videoConfig.bitrate = 1 * 1024 * 1024;
videoConfig.width = 640;
videoConfig.height = 480;
videoConfig.profile = Profile.MAIN;
videoConfig.preset = Preset.STANDARD;
EncodingProfileConfig encodingProfileConfig = new EncodingProfileConfig();
encodingProfileConfig.name = "JUnitTestProfile";
encodingProfileConfig.videoStreamConfigs.add(videoConfig);
return encodingProfileConfig;
}
@Test
public void createEncodingProfile() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
EncodingProfileConfig config = this.createEncodingProfileConfig();
EncodingProfile encodingProfile = bitApi.createEncodingProfile(config);
assertEquals(encodingProfile.videoStreamConfigs.get(0).width, config.videoStreamConfigs.get(0).width);
assertEquals(encodingProfile.videoStreamConfigs.get(0).height, config.videoStreamConfigs.get(0).height);
}
@Test
public void listEncodingProfiles() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
EncodingProfileConfig config = this.createEncodingProfileConfig();
EncodingProfile encodingProfile = bitApi.createEncodingProfile(config);
EncodingProfileList encodingProfileList = bitApi.listEncodingProfiles(0);
EncodingProfile lastRecentProfile = encodingProfileList.profiles.get(0);
assertEquals(lastRecentProfile.encodingProfileId, encodingProfile.encodingProfileId);
}
@Test
public void getEncodingProfile() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
EncodingProfileConfig config = this.createEncodingProfileConfig();
EncodingProfile encodingProfile = bitApi.createEncodingProfile(config);
EncodingProfile sameProfile = bitApi.getEncodingProfile(encodingProfile.encodingProfileId);
assertEquals(sameProfile.name, encodingProfile.name);
assertEquals(sameProfile.encodingProfileId, encodingProfile.encodingProfileId);
}
public JobConfig createJobConfig() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
JobConfig jobConfig = new JobConfig();
EncodingProfileConfig config = this.createEncodingProfileConfig();
EncodingProfile encodingProfile = bitApi.createEncodingProfile(config);
Input input = this.createSintelInput();
jobConfig.encodingProfileId = encodingProfile.encodingProfileId;
jobConfig.inputId = input.inputId;
jobConfig.manifestTypes.addElement(ManifestType.MPEG_DASH_MPD);
jobConfig.manifestTypes.addElement(ManifestType.HLS_M3U8);
return jobConfig;
}
@Test
public void createJob() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
JobConfig jobConfig = this.createJobConfig();
Job job = bitApi.createJob(jobConfig);
assertEquals(job.status, JobStatus.ENQUEUED);
}
@Test
public void listJobs() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
JobConfig jobConfig = this.createJobConfig();
Job job = bitApi.createJob(jobConfig);
JobList jobList = bitApi.listJobs(0);
Job lastRecentJob = jobList.jobs.get(0);
assertEquals(lastRecentJob.jobId, job.jobId);
}
@Test
public void getJob() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
JobConfig jobConfig = this.createJobConfig();
Job job = bitApi.createJob(jobConfig);
Job sameJob = bitApi.getJob(job.jobId);
assertEquals(sameJob.jobId, job.jobId);
}
public void transfer (Output output) throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
JobList jobList = bitApi.listJobs(0);
Job finishedJob = null;
for (Job job : jobList.jobs) {
if (job.status == JobStatus.FINISHED) {
finishedJob = job;
break;
}
}
assertNotNull(finishedJob);
TransferConfig transferConfig = new TransferConfig();
transferConfig.jobId = finishedJob.jobId;
transferConfig.outputId = output.outputId;
bitApi.transfer(transferConfig);
}
@Test
public void transferToS3() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output s3Output = bitApi.createS3Output(this.settings.s3OutputEUWest);
this.transfer(s3Output);
}
@Test
public void transferToFTP() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Output ftpOutput = bitApi.createFTPOutput(this.settings.ftpOutput);
this.transfer(ftpOutput);
}
@Test
public void listTransfers() throws BitcodinApiException {
/* TODO
* cannot effectively be implemented without API fix
* so that transfer returns at least id */
}
@Test
public void getStatistics() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Statistic stats = bitApi.getStatistics();
assertNotNull(stats);
/* TODO
* Does this call return monthly? Values must be redesigned */
}
@Test
public void getStatisticsFromTo() throws BitcodinApiException {
BitcodinApi bitApi = new BitcodinApi(this.settings.apikey);
Statistic stats = bitApi.getStatistics("2015-06-01", "2015-06-10");
assertNotNull(stats);
/* TODO
* Range is not working -> fix in API */
}
}
|
package com.celements.xwikiPatches;
import static com.celements.common.test.CelementsTestUtils.*;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.*;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.easymock.IAnswer;
import org.hibernate.FlushMode;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.Query;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.classic.Session;
import org.hibernate.impl.AbstractQueryImpl;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xwiki.cache.CacheFactory;
import org.xwiki.context.Execution;
import org.xwiki.context.ExecutionContext;
import org.xwiki.context.ExecutionContextException;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.model.reference.WikiReference;
import com.celements.common.test.AbstractComponentTest;
import com.celements.navigation.INavigationClassConfig;
import com.celements.pagetype.IPageTypeClassConfig;
import com.celements.web.service.IWebUtilsService;
import com.xpn.xwiki.XWiki;
import com.xpn.xwiki.XWikiConfig;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.doc.XWikiAttachment;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.objects.BaseProperty;
import com.xpn.xwiki.objects.PropertyInterface;
import com.xpn.xwiki.objects.classes.BaseClass;
import com.xpn.xwiki.store.XWikiCacheStore;
import com.xpn.xwiki.store.XWikiStoreInterface;
import com.xpn.xwiki.store.hibernate.HibernateSessionFactory;
import com.xpn.xwiki.util.AbstractXWikiRunnable;
import com.xpn.xwiki.web.Utils;
public class ConcurrentCacheTest extends AbstractComponentTest {
private static final Logger LOGGER = LoggerFactory.getLogger(ConcurrentCacheTest.class);
private volatile XWikiCacheStore theCacheStore;
private volatile ConcurrentMap<DocumentReference, List<BaseObject>> baseObjMap = new ConcurrentHashMap<>();
private volatile DocumentReference testDocRef;
private static volatile Collection<Object> defaultMocks;
private static volatile XWikiContext defaultContext;
private final String wikiName = "testWiki";
private final WikiReference wikiRef = new WikiReference(wikiName);
private String testFullName = "TestSpace.TestDoc";
private XWikiConfig configMock;
private SessionFactory sessionFactoryMock;
private IPageTypeClassConfig pageTypeClassConfig;
private INavigationClassConfig navClassConfig;
private IWebUtilsService webUtilsService;
@SuppressWarnings("deprecation")
@Before
public void setUp_ConcurrentCatchTest() throws Exception {
pageTypeClassConfig = Utils.getComponent(IPageTypeClassConfig.class);
navClassConfig = Utils.getComponent(INavigationClassConfig.class);
webUtilsService = Utils.getComponent(IWebUtilsService.class);
getContext().setDatabase(wikiName);
sessionFactoryMock = createMockAndAddToDefault(SessionFactory.class);
Utils.getComponent(HibernateSessionFactory.class).setSessionFactory(sessionFactoryMock);
testDocRef = new DocumentReference(wikiName, "TestSpace", "TestDoc");
configMock = createMockAndAddToDefault(XWikiConfig.class);
expect(getWikiMock().getConfig()).andReturn(configMock).anyTimes();
expect(configMock.getProperty(eq("xwiki.store.hibernate.path"), eq(
"/WEB-INF/hibernate.cfg.xml"))).andReturn("testhibernate.cfg.xml");
expect(getWikiMock().Param(eq("xwiki.store.cache.capacity"))).andReturn(null).anyTimes();
expect(getWikiMock().Param(eq("xwiki.store.cache.pageexistcapacity"))).andReturn(
null).anyTimes();
CacheFactory cacheFactory = Utils.getComponent(CacheFactory.class, "jbosscache");
expect(getWikiMock().getCacheFactory()).andReturn(cacheFactory).anyTimes();
expect(getWikiMock().getPlugin(eq("monitor"), isA(XWikiContext.class))).andReturn(
null).anyTimes();
expect(getWikiMock().hasDynamicCustomMappings()).andReturn(false).anyTimes();
expect(getWikiMock().isVirtualMode()).andReturn(false).anyTimes();
expect(getWikiMock().Param(eq("xwiki.store.hibernate.useclasstables.read"), eq("1"))).andReturn(
"0").anyTimes();
expect(getWikiMock().getXClass(isA(DocumentReference.class), isA(
XWikiContext.class))).andStubDelegateTo(new TestXWiki());
createBaseObjects();
}
@Test
public void test_singleThreaded_sync() throws Exception {
setupTestMocks();
replayDefault();
initStorePrepareMultiThreadMocks();
LoadXWikiDocCommand testLoadCommand = new LoadXWikiDocCommand(false);
LoadDocCheckResult result = testLoadCommand.call();
assertTrue(Arrays.deepToString(result.getMessages().toArray()), result.isSuccessfull());
verifyDefault();
}
@Test
public void test_singleThreaded_async() throws Exception {
setupTestMocks();
replayDefault();
initStorePrepareMultiThreadMocks();
ScheduledExecutorService theExecutor = Executors.newScheduledThreadPool(1);
Future<LoadDocCheckResult> testFuture = theExecutor.submit(
(Callable<LoadDocCheckResult>) new LoadXWikiDocCommand(false));
theExecutor.shutdown();
while (!theExecutor.isTerminated()) {
Thread.sleep(500L);
}
LoadDocCheckResult result = testFuture.get();
assertTrue(Arrays.deepToString(result.getMessages().toArray()), result.isSuccessfull());
verifyDefault();
}
@Test
public void test_multiRuns_singleThreaded_scenario1() throws Exception {
int cores = 1;
int executeRuns = 50000;
setupTestMocks();
replayDefault();
initStorePrepareMultiThreadMocks();
assertSuccessFullRuns(testScenario1(cores, executeRuns));
verifyDefault();
}
@Test
public void test_multiRuns_singleThreaded_scenario2() throws Exception {
int cores = 1;
int executeRuns = 50000;
setupTestMocks();
replayDefault();
initStorePrepareMultiThreadMocks();
assertSuccessFullRuns(testScenario2(cores, executeRuns));
verifyDefault();
}
@Test
public void test_multiThreaded_scenario1() throws Exception {
int cores = Runtime.getRuntime().availableProcessors();
assertTrue("This tests needs real multi core processors, but found " + cores, cores > 1);
int executeRuns = 500000;
setupTestMocks();
replayDefault();
initStorePrepareMultiThreadMocks();
assertSuccessFullRuns(testScenario1(cores, executeRuns));
verifyDefault();
}
@Test
public void test_multiThreaded_scenario2() throws Exception {
int cores = Runtime.getRuntime().availableProcessors();
assertTrue("This tests needs real multi core processors, but found " + cores, cores > 1);
int executeRuns = 500000;
setupTestMocks();
replayDefault();
initStorePrepareMultiThreadMocks();
assertSuccessFullRuns(testScenario2(cores, executeRuns));
verifyDefault();
}
private void setupTestMocks() {
Session sessionMock = createMockAndAddToDefault(Session.class);
expect(sessionFactoryMock.openSession()).andReturn(sessionMock).anyTimes();
sessionMock.setFlushMode(eq(FlushMode.COMMIT));
expectLastCall().atLeastOnce();
sessionMock.setFlushMode(eq(FlushMode.MANUAL));
expectLastCall().atLeastOnce();
Transaction transactionMock = createMockAndAddToDefault(Transaction.class);
expect(sessionMock.beginTransaction()).andReturn(transactionMock).anyTimes();
transactionMock.rollback();
expectLastCall().anyTimes();
expect(sessionMock.close()).andReturn(null).anyTimes();
XWikiDocument myDoc = new XWikiDocument(testDocRef);
expectXWikiDocLoad(sessionMock, myDoc);
expectLoadEmptyAttachmentList(sessionMock);
expectBaseObjectLoad(sessionMock);
}
/**
* Scenario 1
* 1. for executeRuns do
* 1.1 first and every 100 run
* 1.1.1 reset cache entry
* 1.1.2 load document in cache
* 1.2 load document 3*cores in parallels for core threads
*/
private List<Future<LoadDocCheckResult>> testScenario1(int cores, int executeRuns)
throws Exception {
ScheduledExecutorService theExecutor = Executors.newScheduledThreadPool(cores);
try {
final int numTimesFromCache = cores * 3;
List<Future<LoadDocCheckResult>> futureList = new ArrayList<>(executeRuns
* numTimesFromCache);
for (int i = 0; i < executeRuns; i++) {
CountDownLatch doneSignal = new CountDownLatch(numTimesFromCache);
CountDownLatch startSignal = new CountDownLatch(cores);
List<LoadXWikiDocCommand> loadTasks = new ArrayList<>(numTimesFromCache);
for (int j = 1; j <= numTimesFromCache; j++) {
loadTasks.add(new LoadXWikiDocCommand(startSignal, doneSignal, true));
}
if ((i % 100) == 0) {
// LOGGER.error("reset cache after {} runs", futureList.size());
Future<?> resetCacheCmd = theExecutor.submit(new ResetCacheEntryCommand());
while (!resetCacheCmd.isDone()) {
Thread.sleep(10);
}
CountDownLatch doneLoadingSignal = new CountDownLatch(1);
Future<LoadDocCheckResult> loadDocToCache = theExecutor.submit(
(Callable<LoadDocCheckResult>) new LoadXWikiDocCommand(null, doneLoadingSignal,
false));
futureList.add(loadDocToCache);
doneLoadingSignal.await();
}
futureList.addAll(theExecutor.invokeAll(loadTasks));
doneSignal.await();
}
return futureList;
} finally {
theExecutor.shutdown();
}
}
/**
* Scenario 2
* 1. reset cache
* 2. Load document once into cache
* 3. read executeRuns times from cache in parallel for cores threads
*/
private List<Future<LoadDocCheckResult>> testScenario2(int cores, int executeRuns)
throws Exception {
ScheduledExecutorService theExecutor = Executors.newScheduledThreadPool(cores);
try {
CountDownLatch startSignal = new CountDownLatch(cores);
CountDownLatch doneSignal = new CountDownLatch(executeRuns);
List<Future<LoadDocCheckResult>> futureList = new ArrayList<>(executeRuns);
Future<?> resetCacheCmd = theExecutor.submit(new ResetCacheEntryCommand());
while (!resetCacheCmd.isDone()) {
Thread.sleep(100);
}
Future<LoadDocCheckResult> loadDocToCache = theExecutor.submit(
(Callable<LoadDocCheckResult>) new LoadXWikiDocCommand(null, doneSignal, false));
futureList.add(loadDocToCache);
while (!loadDocToCache.isDone()) {
Thread.sleep(100);
}
for (int i = 1; i < executeRuns; i++) {
Future<LoadDocCheckResult> testFuture = theExecutor.submit(
(Callable<LoadDocCheckResult>) new LoadXWikiDocCommand(startSignal, doneSignal, true));
futureList.add(testFuture);
}
doneSignal.await();
return futureList;
} finally {
theExecutor.shutdown();
}
}
private void assertSuccessFullRuns(List<Future<LoadDocCheckResult>> futureList)
throws InterruptedException, ExecutionException {
int successfulRuns = 0;
int failedRuns = 0;
List<String> failMessgs = new ArrayList<>();
for (Future<LoadDocCheckResult> testFuture : futureList) {
LoadDocCheckResult result = testFuture.get();
if (result.isSuccessfull()) {
successfulRuns += 1;
} else {
failedRuns += 1;
List<String> messages = result.getMessages();
failMessgs.add("Run num: " + (successfulRuns + failedRuns) + "\n");
failMessgs.addAll(messages);
}
}
assertEquals("Found " + failedRuns + " failing runs: " + Arrays.deepToString(
failMessgs.toArray()), futureList.size(), successfulRuns);
}
private void expectBaseObjectLoad(Session sessionMock) {
String loadBaseObjectHql = "from BaseObject as bobject where bobject.name = :name order by "
+ "bobject.number";
Query queryObj = new TestQuery<BaseObject>(loadBaseObjectHql, new QueryList<BaseObject>() {
@Override
public List<BaseObject> list(String string, Map<String, Object> params)
throws HibernateException {
DocumentReference theDocRef = webUtilsService.resolveDocumentReference((String) params.get(
"name"));
List<BaseObject> attList = new ArrayList<>();
for (BaseObject templBaseObject : baseObjMap.get(theDocRef)) {
BaseObject bObj = createBaseObject(templBaseObject.getNumber(),
templBaseObject.getXClassReference());
bObj.setDocumentReference(theDocRef);
attList.add(bObj);
}
return attList;
}
});
expect(sessionMock.createQuery(eq(loadBaseObjectHql))).andReturn(queryObj).anyTimes();
expectPropertiesLoad(sessionMock);
}
private void expectPropertiesLoad(Session sessionMock) {
String loadPropHql = "select prop.name, prop.classType from BaseProperty as prop where "
+ "prop.id.id = :id";
Query queryProp = new TestQuery<String[]>(loadPropHql, new QueryList<String[]>() {
@Override
public List<String[]> list(String string, Map<String, Object> params)
throws HibernateException {
Integer objId = (Integer) params.get("id");
List<String[]> propList = new ArrayList<>();
for (BaseObject templBaseObject : baseObjMap.get(testDocRef)) {
if (objId.equals(templBaseObject.getId())) {
for (Object theObj : templBaseObject.getFieldList()) {
PropertyInterface theField = (PropertyInterface) theObj;
String[] row = new String[2];
row[0] = theField.getName();
row[1] = theField.getClass().getCanonicalName();
propList.add(row);
}
}
}
return propList;
}
});
expect(sessionMock.createQuery(eq(loadPropHql))).andReturn(queryProp).atLeastOnce();
sessionMock.load(isA(PropertyInterface.class), isA(Serializable.class));
expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
BaseProperty property = (BaseProperty) getCurrentArguments()[0];
Integer objId = property.getObject().getId();
for (BaseObject templBaseObject : baseObjMap.get(testDocRef)) {
if (objId.equals(templBaseObject.getId())) {
for (Object theObj : templBaseObject.getFieldList()) {
BaseProperty theField = (BaseProperty) theObj;
if (theField.getName().equals(property.getName()) && theField.getClass().equals(
property.getClass())) {
property.setValue(theField.getValue());
}
}
}
}
return this;
}
}).atLeastOnce();
}
private void expectLoadEmptyAttachmentList(Session sessionMock) {
String loadAttachmentHql = "from XWikiAttachment as attach where attach.docId=:docid";
Query query = new TestQuery<XWikiAttachment>(loadAttachmentHql,
new QueryList<XWikiAttachment>() {
@Override
public List<XWikiAttachment> list(String string, Map<String, Object> params)
throws HibernateException {
List<XWikiAttachment> attList = new ArrayList<>();
return attList;
}
});
expect(sessionMock.createQuery(eq(loadAttachmentHql))).andReturn(query).anyTimes();
}
private void expectXWikiDocLoad(Session sessionMock, XWikiDocument myDoc) {
sessionMock.load(isA(XWikiDocument.class), eq(new Long(myDoc.getId())));
expectLastCall().andAnswer(new IAnswer<Object>() {
@Override
public Object answer() throws Throwable {
XWikiDocument theDoc = (XWikiDocument) getCurrentArguments()[0];
if (testDocRef.equals(theDoc)) {
theDoc.setContent("test Content");
theDoc.setTitle("the test Title");
theDoc.setAuthor("XWiki.testAuthor");
theDoc.setCreationDate(new java.sql.Date(new Date().getTime() - 5000L));
theDoc.setContentUpdateDate(new java.sql.Date(new Date().getTime() - 2000L));
}
return this;
}
}).anyTimes();
}
private void createBaseObjects() {
DocumentReference testDocRefClone = new DocumentReference(testDocRef.clone());
BaseObject bObj1 = createBaseObject(0, navClassConfig.getMenuNameClassRef(wikiName));
bObj1.setDocumentReference(testDocRefClone);
addStringField(bObj1, INavigationClassConfig.MENU_NAME_LANG_FIELD, "de");
addStringField(bObj1, INavigationClassConfig.MENU_NAME_FIELD, "Hause");
BaseObject bObj2 = createBaseObject(1, navClassConfig.getMenuNameClassRef(wikiName));
bObj2.setDocumentReference(testDocRefClone);
addStringField(bObj2, INavigationClassConfig.MENU_NAME_LANG_FIELD, "en");
addStringField(bObj2, INavigationClassConfig.MENU_NAME_FIELD, "Home");
BaseObject bObj3 = createBaseObject(0, navClassConfig.getMenuItemClassRef(wikiRef));
bObj3.setDocumentReference(testDocRefClone);
addIntField(bObj3, INavigationClassConfig.MENU_POSITION_FIELD, 1);
BaseObject bObj4 = createBaseObject(0, pageTypeClassConfig.getPageTypeClassRef(wikiRef));
bObj4.setDocumentReference(testDocRefClone);
addStringField(bObj4, IPageTypeClassConfig.PAGE_TYPE_FIELD, "Performance");
List<BaseObject> attList = new Vector<>(Arrays.asList(bObj1, bObj2, bObj3, bObj4));
baseObjMap.put(testDocRefClone, attList);
}
private void initStorePrepareMultiThreadMocks() throws XWikiException {
XWikiStoreInterface store = Utils.getComponent(XWikiStoreInterface.class);
defaultContext = (XWikiContext) getContext().clone();
theCacheStore = new XWikiCacheStore(store, defaultContext);
defaultMocks = Collections.unmodifiableCollection(getDefaultMocks());
}
private class ResetCacheEntryCommand implements Runnable {
@Override
public void run() {
String key = theCacheStore.getKey(wikiName, testFullName, "");
if (theCacheStore.getCache() != null) {
theCacheStore.getCache().remove(key);
}
}
}
private class LoadDocCheckResult {
private final List<String> messages = new Vector<String>();
public void addMessage(String message) {
messages.add(message);
}
public boolean isSuccessfull() {
return (messages.size() == 0);
}
public List<String> getMessages() {
return messages;
}
}
private class LoadXWikiDocCommand extends AbstractXWikiRunnable implements
Callable<LoadDocCheckResult> {
private XWikiDocument loadedXWikiDoc;
private boolean hasNewContext;
private final CountDownLatch startSignal;
private final CountDownLatch doneSignal;
private final LoadDocCheckResult result = new LoadDocCheckResult();
private final boolean expectFromCache;
private boolean startDone = false;
public LoadXWikiDocCommand(boolean expectFromCache) {
this(null, null, expectFromCache);
}
public LoadXWikiDocCommand(CountDownLatch startSignal, CountDownLatch doneSignal,
boolean expectFromCache) {
this.startSignal = startSignal;
this.doneSignal = doneSignal;
this.expectFromCache = expectFromCache;
}
private ExecutionContext getExecutionContext() {
return Utils.getComponent(Execution.class).getContext();
}
@Override
public LoadDocCheckResult call() throws Exception {
try {
try {
hasNewContext = (getExecutionContext() == null);
if (hasNewContext) {
initExecutionContext();
getExecutionContext().setProperty(EXECUTIONCONTEXT_KEY_MOCKS, defaultMocks);
getExecutionContext().setProperty(XWikiContext.EXECUTIONCONTEXT_KEY,
defaultContext.clone());
}
try {
runInternal();
testLoadedDocument();
} finally {
if (hasNewContext) {
// cleanup execution context
cleanupExecutionContext();
}
}
} catch (ExecutionContextException e) {
LOGGER.error("Failed to initialize execution context", e);
}
} catch (Throwable exp) {
// anything could happen in the test and we want to catch all failures
result.addMessage("Exception: " + exp.getMessage() + "\n" + ExceptionUtils.getStackTrace(
exp));
} finally {
if ((startSignal != null) && !startDone) {
startSignal.countDown();
}
if (doneSignal != null) {
doneSignal.countDown();
}
}
return result;
}
private void testLoadedDocument() {
if (loadedXWikiDoc != null) {
if (loadedXWikiDoc.isNew()) {
result.addMessage("unexpected: isNew is true");
}
if (!loadedXWikiDoc.isMostRecent()) {
result.addMessage("unexpected: isMostRecent is false");
}
if (loadedXWikiDoc.isFromCache() != expectFromCache) {
result.addMessage("isFromCache does not match " + expectFromCache);
}
for (BaseObject theTestObj : baseObjMap.get(testDocRef)) {
Map<DocumentReference, List<BaseObject>> loadedObjs = loadedXWikiDoc.getXObjects();
final List<BaseObject> xclassObjs = loadedObjs.get(theTestObj.getXClassReference());
if (!xclassObjs.contains(theTestObj)) {
result.addMessage("Object missing " + theTestObj);
} else {
BaseObject theLoadedObj = xclassObjs.get(xclassObjs.indexOf(theTestObj));
if (theLoadedObj == theTestObj) {
result.addMessage("Object is same " + theTestObj);
} else {
for (String theFieldName : theTestObj.getPropertyNames()) {
BaseProperty theField = (BaseProperty) theLoadedObj.getField(theFieldName);
BaseProperty theTestField = (BaseProperty) theTestObj.getField(theFieldName);
if (theField == theTestField) {
result.addMessage("Field is same " + theField);
} else if (!theTestField.getValue().equals(theField.getValue())) {
result.addMessage("Field value missmatch expected: " + theField + "\n but found: "
+ theField.getValue());
}
}
}
}
}
} else {
result.addMessage("Loaded document reference is null.");
}
}
@Override
public void runInternal() {
try {
if (startSignal != null) {
startSignal.countDown();
startSignal.await();
startDone = true;
}
XWikiDocument myDoc = new XWikiDocument(testDocRef);
try {
loadedXWikiDoc = theCacheStore.loadXWikiDoc(myDoc, getContext());
} catch (XWikiException exp) {
throw new IllegalStateException(exp);
}
} catch (Exception exp) {
throw new RuntimeException(exp);
}
}
}
private final void addIntField(BaseObject bObj, String fieldName, int value) {
bObj.setIntValue(fieldName, value);
}
private final void addStringField(BaseObject bObj, String fieldName, String value) {
bObj.setStringValue(fieldName, value);
}
private final BaseObject createBaseObject(int num, DocumentReference classRef) {
BaseObject bObj = new BaseObject();
bObj.setXClassReference(new DocumentReference(classRef.clone()));
bObj.setNumber(num);
return bObj;
}
private interface QueryList<T> {
public List<T> list(String string, Map<String, Object> params) throws HibernateException;
}
private class TestQuery<T> extends AbstractQueryImpl {
private Query theQueryMock;
private QueryList<T> listStub;
private Map<String, Object> params;
public TestQuery(String queryStr, QueryList<T> listStub) {
super(queryStr, FlushMode.AUTO, null, null);
this.listStub = listStub;
this.params = new HashMap<String, Object>();
theQueryMock = createMock(Query.class);
replay(theQueryMock);
}
@SuppressWarnings("rawtypes")
@Override
public Iterator iterate() throws HibernateException {
return theQueryMock.iterate();
}
@Override
public ScrollableResults scroll() throws HibernateException {
return theQueryMock.scroll();
}
@Override
public ScrollableResults scroll(ScrollMode scrollMode) throws HibernateException {
return theQueryMock.scroll(scrollMode);
}
@SuppressWarnings("unchecked")
@Override
public List<T> list() throws HibernateException {
if (listStub != null) {
return listStub.list(getQueryString(), params);
}
return theQueryMock.list();
}
@Override
public Query setText(String named, String val) {
this.params.put(named, val);
return this;
}
@Override
public Query setInteger(String named, int val) {
this.params.put(named, new Integer(val));
return this;
}
@Override
public Query setLong(String named, long val) {
this.params.put(named, new Long(val));
return this;
}
@Override
public int executeUpdate() throws HibernateException {
return theQueryMock.executeUpdate();
}
@Override
public Query setLockMode(String alias, LockMode lockMode) {
return theQueryMock.setLockMode(alias, lockMode);
}
@SuppressWarnings("rawtypes")
@Override
protected Map getLockModes() {
throw new UnsupportedOperationException("getLockModes not supported");
}
}
private class TestXWiki extends XWiki {
@Override
public BaseClass getXClass(DocumentReference documentReference, XWikiContext context)
throws XWikiException {
// Used to avoid recursive loading of documents if there are recursives usage of classes
BaseClass bclass = context.getBaseClass(documentReference);
if (bclass == null) {
bclass = new BaseClass();
bclass.setDocumentReference(documentReference);
context.addBaseClass(bclass);
}
return bclass;
}
}
}
|
package com.example.javamavenjunithelloworld;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
public class HelloTest {
@Test
public void testSayHello() {
OutputStream os = new ByteArrayOutputStream();
PrintStream stream = new PrintStream(os, true);
Hello hi = new Hello();
hi.sayHello(stream);
assertThat(os.toString(), is(equalTo(Hello.HELLO + "\n")));
}
@Test
public void testSayHelloAFewTimes() {
OutputStream os = new ByteArrayOutputStream();
PrintStream stream = new PrintStream(os, true);
Hello hi = new Hello();
hi.setTimes(3);
hi.sayHello(stream);
// Does it say "Hello!" three times?
String goal = Hello.HELLO + "\r\n" + Hello.HELLO + "\r\n" + Hello.HELLO + "\r\n";
assertThat(os.toString(), is(equalTo(goal)));
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalArgumentForHello21() {
Hello hi = new Hello();
hi.setTimes(Hello.MAXIMUM_AMOUNT_OF_TIMES + 1);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalArgumentForHelloNegative() {
Hello hi = new Hello();
hi.setTimes(-1);
}
}
|
package com.versionone.integration.ciCommon;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Random;
import org.junit.Assert;
import org.junit.Test;
import com.versionone.DB;
import com.versionone.om.BuildProject;
import com.versionone.om.BuildRun;
import com.versionone.om.ChangeSet;
import com.versionone.om.PrimaryWorkitem;
import com.versionone.om.V1Instance;
import com.versionone.om.filters.BuildRunFilter;
public class WorkerTest {
// connection credentials
private static final String URL_TO_V1 = "https://www14.v1host.com/v1sdktesting/";
private static final String PASSWORD_TO_V1 = "admin";
private static final String LOGIN_TO_V1 = "admin";
private static final String BUILDPROJECT_ID = "BuildProject:1016";
private static final String BUILDPROJECT_REFERENCE = "Sample: Call Center";
private static final String STORY1 = "B-01001";
@Test
public void test() {
final Date now = new Date();
int random = new Random().nextInt();
final V1Config cfg = new V1Config(URL_TO_V1, LOGIN_TO_V1,
PASSWORD_TO_V1);
final Worker w = new V1Worker(cfg, System.out);
final BuildInfoMock info = new BuildInfoMock();
info.buildId = random++;
info.buildName = String.valueOf(random++);
info.elapsedTime = 4567;
info.forced = false;
info.projectName = BUILDPROJECT_REFERENCE;
info.startTime = now;
info.successful = true;
info.url = "localhost";
String id = "Id" + (random++);
info.changes.put(id, new VcsModificationMock("User1", "Comment2 - "
+ STORY1, now, id));
id = "Id" + random;
info.changes.put(id, new VcsModificationMock("User9", "Comment8", now,
id));
Assert.assertEquals(Worker.Result.SUCCESS, w.submitBuildRun(info));
final V1Instance v1 = cfg.getV1Instance();
final BuildProject x = v1.get().buildProjectByID(BUILDPROJECT_ID);
Assert.assertEquals(BUILDPROJECT_REFERENCE, x.getReference());
final BuildRunFilter filter = new BuildRunFilter();
filter.references.add(String.valueOf(info.buildId));
final Collection<BuildRun> y = x.getBuildRuns(filter);
Assert.assertEquals(1, y.size());
}
private void checkBuildRun(BuildInfoMock info, BuildRun z) {
Assert.assertEquals(BUILDPROJECT_REFERENCE + " - build."
+ info.buildName, z.getName());
Assert.assertEquals(info.forced ? "Forced" : "Trigger", z.getSource()
.getCurrentValue());
Assert.assertEquals(String.valueOf(info.buildId), z.getReference());
Assert.assertEquals(new DB.DateTime(info.startTime), z.getDate());
Assert.assertEquals(info.successful ? "Passed" : "Failed", z
.getStatus().getCurrentValue());
Assert.assertEquals((double) info.elapsedTime, z.getElapsed(), 0.001);
checkWorkitemCollection(STORY1, z.getAffectedPrimaryWorkitems(null),
Boolean.TRUE);
checkWorkitemCollection(STORY1, z.getCompletedPrimaryWorkitems(null),
Boolean.FALSE);
final String desc = z.getDescription();
for (VcsModification change : info.getChanges()) {
Assert.assertTrue(desc.contains(change.getUserName()));
Assert.assertTrue(desc.contains(change.getComment()));
}
final Collection<ChangeSet> v1Changes = z.getChangeSets();
Assert.assertEquals(info.changes.size(), v1Changes.size());
for (ChangeSet change : v1Changes) {
String id = change.getReference();
Assert.assertTrue(info.changes.containsKey(id));
Assert.assertTrue(change.getName().contains(
info.changes.get(id).getUserName()));
final Date date = info.changes.get(id).getDate();
final String d = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
.format(date);
Assert.assertTrue(change.getName().contains(d));
Assert.assertTrue(change.getDescription().contains(
info.changes.get(id).getComment()));
}
}
private void checkWorkitemCollection(String storyName,
Collection<PrimaryWorkitem> z, Boolean flag) {
if (flag) {
Assert.assertEquals(6012, z.size());
} else {
Assert.assertEquals(0, z.size());
}
Assert.assertEquals(storyName, z.iterator().next().getDisplayID());
}
private static final String ASSETDETAIL = "assetdetail.v1?oid=";
/**
* This is integration test to use this test need to: 1. Setup credentials
* for connection 2. Create story in the VersionOne 3. Copy display ID of
* story and set it to displayId variable 4. Copy name of story to storyName
* variable 4. Copy token of story to storyId variable
*/
@Test
public void testWorkitemData() {
final String displayId = "B-01012";
final String storyName = "Sample: Enter Order Total";
final String storyId = "Story:1066";
final V1Config cfg = new V1Config(URL_TO_V1, LOGIN_TO_V1,
PASSWORD_TO_V1);
final Worker w = new V1Worker(cfg, System.out);
WorkitemData workitemData = w.getWorkitemData(displayId);
Assert.assertEquals(storyId, workitemData.getId());
Assert.assertEquals(storyName, workitemData.getName());
Assert.assertEquals(URL_TO_V1 + ASSETDETAIL + storyId,
workitemData.getUrl());
}
}
|
package org.jenkinsci.plugins.gitclient;
import static java.util.Collections.unmodifiableList;
import static org.apache.commons.lang.StringUtils.isBlank;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.jenkinsci.plugins.gitclient.StringSharesPrefix.sharesPrefix;
import static org.junit.Assert.assertNotEquals;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.ProxyConfiguration;
import hudson.model.TaskListener;
import hudson.plugins.git.Branch;
import hudson.plugins.git.GitException;
import hudson.plugins.git.GitLockFailedException;
import hudson.plugins.git.IGitAPI;
import hudson.plugins.git.IndexEntry;
import hudson.remoting.VirtualChannel;
import hudson.util.IOUtils;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringWriter;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import junit.framework.TestCase;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.SystemUtils;
import org.eclipse.jgit.api.Status;
import org.eclipse.jgit.internal.storage.file.FileRepository;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.transport.RefSpec;
import org.eclipse.jgit.transport.RemoteConfig;
import org.eclipse.jgit.transport.URIish;
import org.jvnet.hudson.test.Bug;
import org.jvnet.hudson.test.TemporaryDirectoryAllocator;
import org.objenesis.ObjenesisStd;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Collections2;
import com.google.common.collect.Lists;
/**
* @author <a href="mailto:nicolas.deloof@gmail.com">Nicolas De Loof</a>
*/
public abstract class GitAPITestCase extends TestCase {
public final TemporaryDirectoryAllocator temporaryDirectoryAllocator = new TemporaryDirectoryAllocator();
protected hudson.EnvVars env = new hudson.EnvVars();
protected TaskListener listener;
protected LogHandler handler = null;
private int logCount = 0;
private static final String LOGGING_STARTED = "Logging started";
private static final String SRC_DIR = (new File(".")).getAbsolutePath();
private String revParseBranchName = null;
private void createRevParseBranch() throws GitException, InterruptedException {
revParseBranchName = "rev-parse-branch-" + UUID.randomUUID().toString();
w.git.checkout("origin/master", revParseBranchName);
}
/**
* One local workspace of a Git repository on a temporary directory
* that gets automatically cleaned up in the end.
*
* Every test case automatically gets one in {@link #w} but additional ones can be created if multi-repository
* interactions need to be tested.
*/
class WorkingArea {
final File repo;
final GitClient git;
boolean bare = false;
WorkingArea() throws Exception {
this(temporaryDirectoryAllocator.allocate());
}
WorkingArea(File repo) throws Exception {
this.repo = repo;
git = setupGitAPI(repo);
setupProxy(git);
}
private void setupProxy(GitClient gitClient)
throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException
{
final String proxyHost = getSystemProperty("proxyHost", "http.proxyHost", "https.proxyHost");
final String proxyPort = getSystemProperty("proxyPort", "http.proxyPort", "https.proxyPort");
final String proxyUser = getSystemProperty("proxyUser", "http.proxyUser", "https.proxyUser");
//final String proxyPassword = getSystemProperty("proxyPassword", "http.proxyPassword", "https.proxyPassword");
final String noProxyHosts = getSystemProperty("noProxyHosts", "http.noProxyHosts", "https.noProxyHosts");
if(isBlank(proxyHost) || isBlank(proxyPort)) return;
ProxyConfiguration proxyConfig = new ObjenesisStd().newInstance(ProxyConfiguration.class);
setField(ProxyConfiguration.class, "name", proxyConfig, proxyHost);
setField(ProxyConfiguration.class, "port", proxyConfig, Integer.parseInt(proxyPort));
setField(ProxyConfiguration.class, "userName", proxyConfig, proxyUser);
setField(ProxyConfiguration.class, "noProxyHost", proxyConfig, noProxyHosts);
//Password does not work since a set password results in a "Secret" call which expects a running Jenkins
setField(ProxyConfiguration.class, "password", proxyConfig, null);
setField(ProxyConfiguration.class, "secretPassword", proxyConfig, null);
gitClient.setProxy(proxyConfig);
}
private void setField(Class<?> clazz, String fieldName, Object object, Object value)
throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException
{
Field declaredField = clazz.getDeclaredField(fieldName);
declaredField.setAccessible(true);
declaredField.set(object, value);
}
private String getSystemProperty(String ... keyVariants)
{
for(String key : keyVariants) {
String value = System.getProperty(key);
if(value != null) return value;
}
return null;
}
String cmd(String args) throws IOException, InterruptedException {
return launchCommand(args.split(" "));
}
String cmd(boolean ignoreError, String args) throws IOException, InterruptedException {
return launchCommand(ignoreError, args.split(" "));
}
String launchCommand(String... args) throws IOException, InterruptedException {
return launchCommand(false, args);
}
String launchCommand(boolean ignoreError, String... args) throws IOException, InterruptedException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
int st = new Launcher.LocalLauncher(listener).launch().pwd(repo).cmds(args).
envs(env).stdout(out).join();
String s = out.toString();
if (!ignoreError) {
if (s == null || s.isEmpty()) {
s = StringUtils.join(args, ' ');
}
assertEquals(s, 0, st); /* Reports full output of failing commands */
}
return s;
}
String repoPath() {
return repo.getAbsolutePath();
}
WorkingArea init() throws IOException, InterruptedException {
git.init();
return this;
}
WorkingArea init(boolean bare) throws IOException, InterruptedException {
git.init_().workspace(repoPath()).bare(bare).execute();
return this;
}
void tag(String tag) throws IOException, InterruptedException {
cmd("git tag " + tag);
}
void commitEmpty(String msg) throws IOException, InterruptedException {
cmd("git commit --allow-empty -m " + msg);
}
/**
* Refers to a file in this workspace
*/
File file(String path) {
return new File(repo, path);
}
boolean exists(String path) {
return file(path).exists();
}
/**
* Creates a file in the workspace.
*/
void touch(String path) throws IOException {
file(path).createNewFile();
}
/**
* Creates a file in the workspace.
*/
File touch(String path, String content) throws IOException {
File f = file(path);
FileUtils.writeStringToFile(f, content, "UTF-8");
return f;
}
public void rm(String path) {
file(path).delete();
}
public String contentOf(String path) throws IOException {
return FileUtils.readFileToString(file(path), "UTF-8");
}
/**
* Creates a CGit implementation. Sometimes we need this for testing JGit impl.
*/
protected CliGitAPIImpl cgit() throws Exception {
return (CliGitAPIImpl)Git.with(listener, env).in(repo).using("git").getClient();
}
/**
* Creates a {@link Repository} object out of it.
*/
protected FileRepository repo() throws IOException {
return bare ? new FileRepository(repo) : new FileRepository(new File(repo, ".git"));
}
/**
* Obtain the current HEAD revision
*/
ObjectId head() throws IOException, InterruptedException {
return git.revParse("HEAD");
}
/**
* Casts the {@link #git} to {@link IGitAPI}
*/
public IGitAPI igit() {
return (IGitAPI)git;
}
}
private WorkingArea w;
WorkingArea clone(String src) throws Exception {
WorkingArea x = new WorkingArea();
x.launchCommand("git", "clone", src, x.repoPath());
return new WorkingArea(x.repo);
}
private boolean timeoutVisibleInCurrentTest;
/**
* Returns true if the current test is expected to have a timeout
* value visible written to the listener log. Used to assert
* timeout values are passed correctly through the layers without
* requiring that the timeout actually expire.
* @see #setTimeoutVisibleInCurrentTest(boolean)
*/
protected boolean getTimeoutVisibleInCurrentTest() {
return timeoutVisibleInCurrentTest;
}
/**
* Pass visible = true to cause the current test to assert that a
* timeout value should be reported in at least one of the log
* entries.
* @param visible set to false if current test performs no operation which should report a timeout value
* @see #getTimeoutVisibleInCurrentTest()
*/
protected void setTimeoutVisibleInCurrentTest(boolean visible) {
timeoutVisibleInCurrentTest = visible;
}
/**
* Array of integer values of the timeouts expected to be passed
* to launchCommandIn() during a single test. Simplest to use if
* the first or the last call in a test is the only call which
* uses a timeout, then the expectedTimeouts array can be
* initialized with default values in all the other entries.
*/
private List<Integer> expectedTimeouts = null;
protected void setExpectedTimeouts(List<Integer> timeouts) {
expectedTimeouts = timeouts;
}
@Override
protected void setUp() throws Exception {
revParseBranchName = null;
setTimeoutVisibleInCurrentTest(true);
expectedTimeouts = null;
Logger logger = Logger.getLogger(this.getClass().getPackage().getName() + "-" + logCount++);
handler = new LogHandler();
handler.setLevel(Level.ALL);
logger.setUseParentHandlers(false);
logger.addHandler(handler);
logger.setLevel(Level.ALL);
listener = new hudson.util.LogTaskListener(logger, Level.ALL);
listener.getLogger().println(LOGGING_STARTED);
w = new WorkingArea();
}
/* HEAD ref of local mirror - all read access should use getMirrorHead */
private static ObjectId mirrorHead = null;
private ObjectId getMirrorHead() throws IOException, InterruptedException
{
if (mirrorHead == null) {
final String mirrorPath = new File(localMirror()).getAbsolutePath();
mirrorHead = ObjectId.fromString(w.launchCommand("git", "--git-dir=" + mirrorPath, "rev-parse", "HEAD").substring(0,40));
}
return mirrorHead;
}
private final String remoteMirrorURL = "https://github.com/jenkinsci/git-client-plugin.git";
private final String remoteSshURL = "git@github.com:ndeloof/git-client-plugin.git";
public String localMirror() throws IOException, InterruptedException {
File base = new File(".").getAbsoluteFile();
for (File f=base; f!=null; f=f.getParentFile()) {
if (new File(f,"target").exists()) {
File clone = new File(f, "target/clone.git");
if (!clone.exists()) { // TODO: perhaps some kind of quick timestamp-based up-to-date check?
w.launchCommand("git", "clone", "--mirror", "https://github.com/jenkinsci/git-client-plugin.git", clone.getAbsolutePath());
}
return clone.getPath();
}
}
throw new IllegalStateException();
}
/* JENKINS-33258 detected many calls to git rev-parse. This checks
* those calls are not being made. The createRevParseBranch call
* creates a branch whose name is unknown to the tests. This
* checks that the branch name is not mentioned in a call to
* git rev-parse.
*/
private void checkRevParseCalls(String branchName) {
String messages = StringUtils.join(handler.getMessages(), ";");
// Linux uses rev-parse without quotes
assertFalse("git rev-parse called: " + messages, handler.containsMessageSubstring("rev-parse " + branchName));
// Windows quotes the rev-parse argument
assertFalse("git rev-parse called: " + messages, handler.containsMessageSubstring("rev-parse \"" + branchName));
}
private void checkTimeout() {
List<Integer> timeouts = handler.getTimeouts();
if (expectedTimeouts == null) {
expectedTimeouts = new ArrayList<>();
for (int i = 0; i < timeouts.size(); i++) {
expectedTimeouts.add(i, CliGitAPIImpl.TIMEOUT);
}
} else {
assertEquals("Wrong timeout count", expectedTimeouts.size(), timeouts.size());
timeouts = expectedTimeouts;
}
assertEquals("Wrong timeout", expectedTimeouts, timeouts);
}
protected abstract GitClient setupGitAPI(File ws) throws Exception;
@Override
protected void tearDown() throws Exception {
try {
temporaryDirectoryAllocator.dispose();
} catch (IOException e) {
e.printStackTrace(System.err);
}
try {
String messages = StringUtils.join(handler.getMessages(), ";");
assertTrue("Logging not started: " + messages, handler.containsMessageSubstring(LOGGING_STARTED));
if (getTimeoutVisibleInCurrentTest()) {
checkTimeout();
}
if (revParseBranchName != null) {
checkRevParseCalls(revParseBranchName);
}
} finally {
handler.close();
}
}
private void check_remote_url(final String repositoryName) throws InterruptedException, IOException {
assertEquals("Wrong remote URL", localMirror(), w.git.getRemoteUrl(repositoryName));
String remotes = w.cmd("git remote -v");
assertTrue("remote URL has not been updated", remotes.contains(localMirror()));
}
private void assertBranchesExist(Set<Branch> branches, String ... names) throws InterruptedException {
Collection<String> branchNames = Collections2.transform(branches, new Function<Branch, String>() {
public String apply(Branch branch) {
return branch.getName();
}
});
for (String name : names) {
assertTrue(name + " branch not found in " + branchNames, branchNames.contains(name));
}
}
public void test_setAuthor() throws Exception {
final String authorName = "Test Author";
final String authorEmail = "jenkins@example.com";
w.init();
w.touch("file1", "Varying content " + java.util.UUID.randomUUID().toString());
w.git.add("file1");
w.git.setAuthor(authorName, authorEmail);
w.git.commit("Author was set explicitly on this commit");
List<String> revision = w.git.showRevision(w.head());
assertTrue("Wrong author in " + revision, revision.get(2).startsWith("author " + authorName + " <" + authorEmail +"> "));
}
public void test_setCommitter() throws Exception {
final String committerName = "Test Commiter";
final String committerEmail = "jenkins.plugin@example.com";
w.init();
w.touch("file1", "Varying content " + java.util.UUID.randomUUID().toString());
w.git.add("file1");
w.git.setCommitter(committerName, committerEmail);
w.git.commit("Committer was set explicitly on this commit");
List<String> revision = w.git.showRevision(w.head());
assertTrue("Wrong committer in " + revision, revision.get(3).startsWith("committer " + committerName + " <" + committerEmail + "> "));
}
private void setExpectedTimeoutWithAdjustedEnd(final int newTimeout) {
setExpectedTimeoutWithAdjustedEnd(newTimeout, 1);
}
private void setExpectedTimeoutWithAdjustedEnd(final int newTimeout, int adjustmentCount) {
if (getTimeoutVisibleInCurrentTest()) {
int size = handler.getTimeouts().size();
List<Integer> expected = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
expected.add(i, CliGitAPIImpl.TIMEOUT);
}
for (int i = 0; i < adjustmentCount; i++) {
expected.set(size - i - 1, newTimeout);
}
setExpectedTimeouts(expected);
}
}
/** Clone arguments include:
* repositoryName(String) - if omitted, CliGit does not set a remote repo name
* shallow() - no relevant assertion of success or failure of this argument
* shared() - not implemented on CliGit, not verified on JGit
* reference() - implemented on JGit, not verified on either JGit or CliGit
*
* CliGit and JGit both require the w.git.checkout() call
* otherwise no branch is checked out. That is different than the
* command line git program, but consistent within the git API.
*/
public void test_clone() throws Exception
{
int newTimeout = 7;
w.git.clone_().timeout(newTimeout).url(localMirror()).repositoryName("origin").execute();
createRevParseBranch(); // Verify JENKINS-32258 is fixed
w.git.checkout("origin/master", "master");
check_remote_url("origin");
assertBranchesExist(w.git.getBranches(), "master");
final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates";
assertFalse("Alternates file found: " + alternates, w.exists(alternates));
assertFalse("Unexpected shallow clone", w.cgit().isShallowRepository());
setExpectedTimeoutWithAdjustedEnd(newTimeout);
}
public void test_checkout_exception() throws Exception {
w.git.clone_().url(localMirror()).repositoryName("origin").execute();
createRevParseBranch();
w.git.checkout("origin/master", "master");
final String SHA1 = "feedbeefbeaded";
try {
w.git.checkout(SHA1, "master");
fail("Expected checkout exception not thrown");
} catch (GitException ge) {
assertEquals("Could not checkout master with start point " + SHA1, ge.getMessage());
}
}
public void test_clone_repositoryName() throws IOException, InterruptedException
{
w.git.clone_().url(localMirror()).repositoryName("upstream").execute();
w.git.checkout("upstream/master", "master");
check_remote_url("upstream");
assertBranchesExist(w.git.getBranches(), "master");
final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates";
assertFalse("Alternates file found: " + alternates, w.exists(alternates));
}
public void test_clone_shallow() throws Exception
{
w.git.clone_().url(localMirror()).repositoryName("origin").shallow().execute();
createRevParseBranch(); // Verify JENKINS-32258 is fixed
w.git.checkout("origin/master", "master");
check_remote_url("origin");
assertBranchesExist(w.git.getBranches(), "master");
final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates";
assertFalse("Alternates file found: " + alternates, w.exists(alternates));
/* JGit does not support shallow clone */
assertEquals("isShallow?", w.igit() instanceof CliGitAPIImpl, w.cgit().isShallowRepository());
final String shallow = ".git" + File.separator + "shallow";
assertEquals("Shallow file existence: " + shallow, w.igit() instanceof CliGitAPIImpl, w.exists(shallow));
}
public void test_clone_shallow_with_depth() throws IOException, InterruptedException
{
w.git.clone_().url(localMirror()).repositoryName("origin").shallow().depth(2).execute();
w.git.checkout("origin/master", "master");
check_remote_url("origin");
assertBranchesExist(w.git.getBranches(), "master");
final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates";
assertFalse("Alternates file found: " + alternates, w.exists(alternates));
/* JGit does not support shallow clone */
final String shallow = ".git" + File.separator + "shallow";
assertEquals("Shallow file existence: " + shallow, w.igit() instanceof CliGitAPIImpl, w.exists(shallow));
}
public void test_clone_shared() throws IOException, InterruptedException
{
w.git.clone_().url(localMirror()).repositoryName("origin").shared().execute();
createRevParseBranch(); // Verify JENKINS-32258 is fixed
w.git.checkout("origin/master", "master");
check_remote_url("origin");
assertBranchesExist(w.git.getBranches(), "master");
assertAlternateFilePointsToLocalMirror();
assertNoObjectsInRepository();
}
public void test_clone_reference() throws IOException, InterruptedException
{
w.git.clone_().url(localMirror()).repositoryName("origin").reference(localMirror()).execute();
createRevParseBranch(); // Verify JENKINS-32258 is fixed
w.git.checkout("origin/master", "master");
check_remote_url("origin");
assertBranchesExist(w.git.getBranches(), "master");
assertAlternateFilePointsToLocalMirror();
assertNoObjectsInRepository();
}
private void assertNoObjectsInRepository() {
List<String> objectsDir = new ArrayList<>(Arrays.asList(w.file(".git/objects").list()));
objectsDir.remove("info");
objectsDir.remove("pack");
assertTrue("Objects directory must not contain anything but 'info' and 'pack' folders", objectsDir.isEmpty());
File packDir = w.file(".git/objects/pack");
if (packDir.isDirectory()) {
assertEquals("Pack dir must noct contain anything", 0, packDir.list().length);
}
}
private void assertAlternateFilePointsToLocalMirror() throws IOException, InterruptedException {
final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates";
assertTrue("Alternates file not found: " + alternates, w.exists(alternates));
final String expectedContent = localMirror().replace("\\", "/") + "/objects";
final String actualContent = w.contentOf(alternates);
assertEquals("Alternates file wrong content", expectedContent, actualContent);
final File alternatesDir = new File(actualContent);
assertTrue("Alternates destination " + actualContent + " missing", alternatesDir.isDirectory());
}
public void test_clone_reference_working_repo() throws IOException, InterruptedException
{
assertTrue("SRC_DIR " + SRC_DIR + " has no .git subdir", (new File(SRC_DIR + File.separator + ".git").isDirectory()));
w.git.clone_().url(localMirror()).repositoryName("origin").reference(SRC_DIR).execute();
w.git.checkout("origin/master", "master");
check_remote_url("origin");
assertBranchesExist(w.git.getBranches(), "master");
final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates";
assertTrue("Alternates file not found: " + alternates, w.exists(alternates));
final String expectedContent = SRC_DIR.replace("\\", "/") + "/.git/objects";
final String actualContent = w.contentOf(alternates);
assertEquals("Alternates file wrong content", expectedContent, actualContent);
final File alternatesDir = new File(actualContent);
assertTrue("Alternates destination " + actualContent + " missing", alternatesDir.isDirectory());
}
public void test_clone_refspec() throws Exception {
w.git.clone_().url(localMirror()).repositoryName("origin").execute();
final WorkingArea w2 = new WorkingArea();
w2.launchCommand("git", "clone", localMirror(), "./");
w2.git.withRepository(new RepositoryCallback<Void>() {
public Void invoke(final Repository realRepo, VirtualChannel channel) throws IOException, InterruptedException {
return w.git.withRepository(new RepositoryCallback<Void>() {
public Void invoke(final Repository implRepo, VirtualChannel channel) {
final String realRefspec = realRepo.getConfig().getString(ConfigConstants.CONFIG_REMOTE_SECTION, Constants.DEFAULT_REMOTE_NAME, "fetch");
final String implRefspec = implRepo.getConfig().getString(ConfigConstants.CONFIG_REMOTE_SECTION, Constants.DEFAULT_REMOTE_NAME, "fetch");
assertEquals("Refspec not as git-clone", realRefspec, implRefspec);
return null;
}
});
}
});
}
public void test_clone_refspecs() throws Exception {
List<RefSpec> refspecs = Lists.newArrayList(
new RefSpec("+refs/heads/master:refs/remotes/origin/master"),
new RefSpec("+refs/heads/1.4.x:refs/remotes/origin/1.4.x")
);
w.git.clone_().url(localMirror()).refspecs(refspecs).repositoryName("origin").execute();
w.git.withRepository(new RepositoryCallback<Void>() {
public Void invoke(Repository repo, VirtualChannel channel) throws IOException, InterruptedException {
String[] fetchRefSpecs = repo.getConfig().getStringList(ConfigConstants.CONFIG_REMOTE_SECTION, Constants.DEFAULT_REMOTE_NAME, "fetch");
assertEquals("Expected 2 refspecs", 2, fetchRefSpecs.length);
assertEquals("Incorrect refspec 1", "+refs/heads/master:refs/remotes/origin/master", fetchRefSpecs[0]);
assertEquals("Incorrect refspec 2", "+refs/heads/1.4.x:refs/remotes/origin/1.4.x", fetchRefSpecs[1]);
return null;
}});
Set<Branch> remoteBranches = w.git.getRemoteBranches();
assertBranchesExist(remoteBranches, "origin/master");
assertBranchesExist(remoteBranches, "origin/1.4.x");
assertEquals(2, remoteBranches.size());
}
public void test_detect_commit_in_repo() throws Exception {
w.init();
assertFalse(w.git.isCommitInRepo(null)); // NPE safety check
w.touch("file1");
w.git.add("file1");
w.git.commit("commit1");
assertTrue("HEAD commit not found", w.git.isCommitInRepo(w.head()));
// this MAY fail if commit has this exact sha1, but please admit this would be unlucky
assertFalse(w.git.isCommitInRepo(ObjectId.fromString("1111111111111111111111111111111111111111")));
assertFalse(w.git.isCommitInRepo(null)); // NPE safety check
}
@Deprecated
public void test_lsTree_non_recursive() throws IOException, InterruptedException {
w.init();
w.touch("file1", "file1 fixed content");
w.git.add("file1");
w.git.commit("commit1");
String expectedBlobSHA1 = "3f5a898e0c8ea62362dbf359cf1a400f3cfd46ae";
List<IndexEntry> tree = w.igit().lsTree("HEAD", false);
assertEquals("Wrong blob sha1", expectedBlobSHA1, tree.get(0).getObject());
assertEquals("Wrong number of tree entries", 1, tree.size());
final String remoteUrl = localMirror();
w.igit().setRemoteUrl("origin", remoteUrl, w.repoPath() + File.separator + ".git");
assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin"));
assertEquals("Wrong invalid default remote", "origin", w.igit().getDefaultRemote("invalid"));
}
@Deprecated
public void test_lsTree_recursive() throws IOException, InterruptedException {
w.init();
assertTrue("mkdir dir1 failed", w.file("dir1").mkdir());
w.touch("dir1/file1", "dir1/file1 fixed content");
w.git.add("dir1/file1");
w.touch("file2", "file2 fixed content");
w.git.add("file2");
w.git.commit("commit-dir-and-file");
String expectedBlob1SHA1 = "a3ee484019f0576fcdeb48e682fa1058d0c74435";
String expectedBlob2SHA1 = "aa1b259ac5e8d6cfdfcf4155a9ff6836b048d0ad";
List<IndexEntry> tree = w.igit().lsTree("HEAD", true);
assertEquals("Wrong blob 1 sha1", expectedBlob1SHA1, tree.get(0).getObject());
assertEquals("Wrong blob 2 sha1", expectedBlob2SHA1, tree.get(1).getObject());
assertEquals("Wrong number of tree entries", 2, tree.size());
final String remoteUrl = "https://github.com/jenkinsci/git-client-plugin.git";
w.git.setRemoteUrl("origin", remoteUrl);
assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin"));
assertEquals("Wrong invalid default remote", "origin", w.igit().getDefaultRemote("invalid"));
}
@Deprecated
public void test_getRemoteURL_two_args() throws Exception {
w.init();
String originUrl = "https://github.com/bogus/bogus.git";
w.git.setRemoteUrl("origin", originUrl);
assertEquals("Wrong remote URL", originUrl, w.git.getRemoteUrl("origin"));
assertEquals("Wrong null remote URL", originUrl, w.igit().getRemoteUrl("origin", null));
assertEquals("Wrong blank remote URL", originUrl, w.igit().getRemoteUrl("origin", ""));
if (w.igit() instanceof CliGitAPIImpl) {
String gitDir = w.repoPath() + File.separator + ".git";
assertEquals("Wrong repoPath/.git remote URL for " + gitDir, originUrl, w.igit().getRemoteUrl("origin", gitDir));
assertEquals("Wrong .git remote URL", originUrl, w.igit().getRemoteUrl("origin", ".git"));
} else {
assertEquals("Wrong repoPath remote URL", originUrl, w.igit().getRemoteUrl("origin", w.repoPath()));
}
// Fails on both JGit and CliGit, though with different failure modes in each
// assertEquals("Wrong . remote URL", originUrl, w.igit().getRemoteUrl("origin", "."));
}
@Deprecated
public void test_getDefaultRemote() throws Exception {
w.init();
w.cmd("git remote add origin https://github.com/jenkinsci/git-client-plugin.git");
w.cmd("git remote add ndeloof git@github.com:ndeloof/git-client-plugin.git");
assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin"));
assertEquals("Wrong ndeloof default remote", "ndeloof", w.igit().getDefaultRemote("ndeloof"));
/* CliGitAPIImpl and JGitAPIImpl return different ordered lists for default remote if invalid */
assertEquals("Wrong invalid default remote", w.git instanceof CliGitAPIImpl ? "ndeloof" : "origin",
w.igit().getDefaultRemote("invalid"));
}
public void test_getRemoteURL() throws Exception {
w.init();
w.cmd("git remote add origin https://github.com/jenkinsci/git-client-plugin.git");
w.cmd("git remote add ndeloof git@github.com:ndeloof/git-client-plugin.git");
String remoteUrl = w.git.getRemoteUrl("origin");
assertEquals("unexepected remote URL " + remoteUrl, "https://github.com/jenkinsci/git-client-plugin.git", remoteUrl);
}
public void test_getRemoteURL_local_clone() throws Exception {
w = clone(localMirror());
assertEquals("Wrong origin URL", localMirror(), w.git.getRemoteUrl("origin"));
String remotes = w.cmd("git remote -v");
assertTrue("remote URL has not been updated", remotes.contains(localMirror()));
}
public void test_setRemoteURL() throws Exception {
w.init();
w.cmd("git remote add origin https://github.com/jenkinsci/git-client-plugin.git");
w.git.setRemoteUrl("origin", "git@github.com:ndeloof/git-client-plugin.git");
String remotes = w.cmd("git remote -v");
assertTrue("remote URL has not been updated", remotes.contains("git@github.com:ndeloof/git-client-plugin.git"));
}
public void test_setRemoteURL_local_clone() throws Exception {
w = clone(localMirror());
String originURL = "https://github.com/jenkinsci/git-client-plugin.git";
w.git.setRemoteUrl("origin", originURL);
assertEquals("Wrong origin URL", originURL, w.git.getRemoteUrl("origin"));
String remotes = w.cmd("git remote -v");
assertTrue("remote URL has not been updated", remotes.contains(originURL));
}
public void test_addRemoteUrl_local_clone() throws Exception {
w = clone(localMirror());
assertEquals("Wrong origin URL before add", localMirror(), w.git.getRemoteUrl("origin"));
String upstreamURL = "https://github.com/jenkinsci/git-client-plugin.git";
w.git.addRemoteUrl("upstream", upstreamURL);
assertEquals("Wrong upstream URL", upstreamURL, w.git.getRemoteUrl("upstream"));
assertEquals("Wrong origin URL after add", localMirror(), w.git.getRemoteUrl("origin"));
}
@Bug(20410)
public void test_clean() throws Exception {
w.init();
w.commitEmpty("init");
String fileName = "\uD835\uDD65-\u5c4f\u5e55\u622a\u56fe-\u0041\u030a-\u00c5-\u212b-fileName.xml";
w.touch(fileName, "content " + fileName);
w.git.add(fileName);
w.git.commit(fileName);
/* JENKINS-27910 reported that certain cyrillic file names
* failed to delete if the encoding was not UTF-8.
*/
String fileNameSwim = "\u00d0\u00bf\u00d0\u00bb\u00d0\u00b0\u00d0\u00b2\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5-swim.png";
w.touch(fileNameSwim, "content " + fileNameSwim);
w.git.add(fileNameSwim);
w.git.commit(fileNameSwim);
String fileNameFace = "\u00d0\u00bb\u00d0\u00b8\u00d1\u2020\u00d0\u00be-face.png";
w.touch(fileNameFace, "content " + fileNameFace);
w.git.add(fileNameFace);
w.git.commit(fileNameFace);
w.touch(".gitignore", ".test");
w.git.add(".gitignore");
w.git.commit("ignore");
String dirName1 = "\u5c4f\u5e55\u622a\u56fe-dir-not-added";
String fileName1 = dirName1 + File.separator + "\u5c4f\u5e55\u622a\u56fe-fileName1-not-added.xml";
String fileName2 = ".test-\u00f8\u00e4\u00fc\u00f6-fileName2-not-added";
assertTrue("Did not create dir " + dirName1, w.file(dirName1).mkdir());
w.touch(fileName1);
w.touch(fileName2);
w.touch(fileName, "new content");
w.git.clean();
assertFalse(w.exists(dirName1));
assertFalse(w.exists(fileName1));
assertFalse(w.exists(fileName2));
assertEquals("content " + fileName, w.contentOf(fileName));
assertEquals("content " + fileNameFace, w.contentOf(fileNameFace));
assertEquals("content " + fileNameSwim, w.contentOf(fileNameSwim));
String status = w.cmd("git status");
assertTrue("unexpected status " + status, status.contains("working directory clean"));
/* A few poorly placed tests of hudson.FilePath - testing JENKINS-22434 */
FilePath fp = new FilePath(w.file(fileName));
assertTrue(fp + " missing", fp.exists());
assertTrue("mkdir " + dirName1 + " failed", w.file(dirName1).mkdir());
assertTrue("dir " + dirName1 + " missing", w.file(dirName1).isDirectory());
FilePath dir1 = new FilePath(w.file(dirName1));
w.touch(fileName1);
assertTrue("Did not create file " + fileName1, w.file(fileName1).exists());
assertTrue(dir1 + " missing", dir1.exists());
dir1.deleteRecursive(); /* Fails on Linux JDK 7 with LANG=C, ok with LANG=en_US.UTF-8 */
/* Java reports "Malformed input or input contains unmappable chacraters" */
assertFalse("Did not delete file " + fileName1, w.file(fileName1).exists());
assertFalse(dir1 + " not deleted", dir1.exists());
w.touch(fileName2);
FilePath fp2 = new FilePath(w.file(fileName2));
assertTrue(fp2 + " missing", fp2.exists());
fp2.delete();
assertFalse(fp2 + " not deleted", fp2.exists());
String dirContents = Arrays.toString((new File(w.repoPath())).listFiles());
String finalStatus = w.cmd("git status");
assertTrue("unexpected final status " + finalStatus + " dir contents: " + dirContents, finalStatus.contains("working directory clean"));
}
public void test_fetch() throws Exception {
/* Create a working repo containing a commit */
w.init();
w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString());
w.git.add("file1");
w.git.commit("commit1");
ObjectId commit1 = w.head();
/* Clone working repo into a bare repo */
WorkingArea bare = new WorkingArea();
bare.init(true);
w.git.setRemoteUrl("origin", bare.repoPath());
Set<Branch> remoteBranchesEmpty = w.git.getRemoteBranches();
assertEquals("Unexpected branch count", 0, remoteBranchesEmpty.size());
w.git.push("origin", "master");
ObjectId bareCommit1 = bare.git.getHeadRev(bare.repoPath(), "master");
assertEquals("bare != working", commit1, bareCommit1);
assertEquals(commit1, bare.git.getHeadRev(bare.repoPath(), "refs/heads/master"));
/* Clone new working repo from bare repo */
WorkingArea newArea = clone(bare.repoPath());
ObjectId newAreaHead = newArea.head();
assertEquals("bare != newArea", bareCommit1, newAreaHead);
Set<Branch> remoteBranches1 = newArea.git.getRemoteBranches();
assertEquals("Unexpected branch count in " + remoteBranches1, 2, remoteBranches1.size());
assertEquals(bareCommit1, newArea.git.getHeadRev(newArea.repoPath(), "refs/heads/master"));
/* Commit a new change to the original repo */
w.touch("file2", "file2 content " + java.util.UUID.randomUUID().toString());
w.git.add("file2");
w.git.commit("commit2");
ObjectId commit2 = w.head();
assertEquals(commit2, w.git.getHeadRev(w.repoPath(), "refs/heads/master"));
/* Push the new change to the bare repo */
w.git.push("origin", "master");
ObjectId bareCommit2 = bare.git.getHeadRev(bare.repoPath(), "master");
assertEquals("bare2 != working2", commit2, bareCommit2);
assertEquals(commit2, bare.git.getHeadRev(bare.repoPath(), "refs/heads/master"));
/* Fetch new change into newArea repo */
/* Confirm the fetch did not alter working branch */
/* Merge the fetch results into working branch */
/* Commit a new change to the original repo */
/* Push the new change to the bare repo */
/* Fetch new change into newArea repo using different argument forms */
/* Merge the fetch results into working branch */
/* Commit a new change to the original repo */
/* Push the new change to the bare repo */
/* Fetch new change into newArea repo using a different argument form */
/* Merge the fetch results into working branch */
/* Commit a new change to the original repo */
/* Push the new change to the bare repo */
/* Fetch into newArea repo with null RefSpec - should only
* pull tags, not commits in git versions prior to git 1.9.0.
* In git 1.9.0, fetch -t pulls tags and versions. */
newArea.git.fetch("origin", null, null);
assertEquals("null refSpec fetch modified local repo", bareCommit4, newArea.head());
ObjectId expectedHead = bareCommit4;
try {
/* Assert that change did not arrive in repo if git
* command line less than 1.9. Assert that change arrives in
* repo if git command line 1.9 or later. */
newArea.git.merge().setRevisionToMerge(bareCommit5).execute();
assertTrue("JGit should not have copied the revision", newArea.git instanceof CliGitAPIImpl);
assertTrue("Wrong git version", w.cgit().isAtLeastVersion(1, 9, 0, 0));
expectedHead = bareCommit5;
} catch (org.eclipse.jgit.api.errors.JGitInternalException je) {
String expectedSubString = "Missing commit " + bareCommit5.name();
assertTrue("Wrong message :" + je.getMessage(), je.getMessage().contains(expectedSubString));
} catch (GitException ge) {
assertTrue("Wrong message :" + ge.getMessage(), ge.getMessage().contains("Could not merge"));
assertTrue("Wrong message :" + ge.getMessage(), ge.getMessage().contains(bareCommit5.name()));
}
/* Assert that expected change is in repo after merge. With
* git 1.7 and 1.8, it should be bareCommit4. With git 1.9
* and later, it should be bareCommit5. */
assertEquals("null refSpec fetch modified local repo", expectedHead, newArea.head());
try {
/* Fetch into newArea repo with invalid repo name and no RefSpec */
newArea.git.fetch("invalid-remote-name");
fail("Should have thrown an exception");
} catch (GitException ge) {
assertTrue("Wrong message :" + ge.getMessage(), ge.getMessage().contains("invalid-remote-name"));
}
}
public void test_push_tags() throws Exception {
/* Create a working repo containing a commit */
w.init();
w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString());
w.git.add("file1");
w.git.commit("commit1");
ObjectId commit1 = w.head();
/* Clone working repo into a bare repo */
WorkingArea bare = new WorkingArea();
bare.init(true);
w.git.setRemoteUrl("origin", bare.repoPath());
Set<Branch> remoteBranchesEmpty = w.git.getRemoteBranches();
assertEquals("Unexpected branch count", 0, remoteBranchesEmpty.size());
w.git.push("origin", "master");
ObjectId bareCommit1 = bare.git.getHeadRev(bare.repoPath(), "master");
assertEquals("bare != working", commit1, bareCommit1);
assertEquals(commit1, bare.git.getHeadRev(bare.repoPath(), "refs/heads/master"));
/* Add tag to working repo and without pushing it to the bare repo */
w.tag("tag1");
assertTrue("tag1 wasn't created", w.git.tagExists("tag1"));
w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(false).execute();
assertFalse("tag1 wasn't pushed", bare.cmd("git tag").contains("tag1"));
/* Add tag to working repo without pushing it to the bare
* repo, tests the default behavior when tags() is not added
* to PushCommand.
*/
w.tag("tag3");
assertTrue("tag3 wasn't created", w.git.tagExists("tag3"));
w.git.push().ref("master").to(new URIish(bare.repoPath())).execute();
assertFalse("tag3 was pushed", bare.cmd("git tag").contains("tag3"));
/* Add another tag to working repo and push tags to the bare repo */
w.touch("file2", "file2 content " + java.util.UUID.randomUUID().toString());
w.git.add("file2");
w.git.commit("commit2");
w.tag("tag2");
assertTrue("tag2 wasn't created", w.git.tagExists("tag2"));
w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(true).execute();
assertTrue("tag1 wasn't pushed", bare.cmd("git tag").contains("tag1"));
assertTrue("tag2 wasn't pushed", bare.cmd("git tag").contains("tag2"));
assertTrue("tag3 wasn't pushed", bare.cmd("git tag").contains("tag3"));
}
@Bug(19591)
public void test_fetch_needs_preceding_prune() throws Exception {
/* Create a working repo containing a commit */
w.init();
w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString());
w.git.add("file1");
w.git.commit("commit1");
ObjectId commit1 = w.head();
assertEquals("Wrong branch count", 1, w.git.getBranches().size());
assertTrue("Remote branches should not exist", w.git.getRemoteBranches().isEmpty());
/* Prune when a remote is not yet defined */
try {
w.git.prune(new RemoteConfig(new Config(), "remote-is-not-defined"));
fail("Should have thrown an exception");
} catch (GitException ge) {
String expected = w.git instanceof CliGitAPIImpl ? "returned status code 1" : "The uri was empty or null";
final String msg = ge.getMessage();
assertTrue("Wrong exception: " + msg, msg.contains(expected));
}
/* Clone working repo into a bare repo */
WorkingArea bare = new WorkingArea();
bare.init(true);
w.git.setRemoteUrl("origin", bare.repoPath());
w.git.push("origin", "master");
ObjectId bareCommit1 = bare.git.getHeadRev(bare.repoPath(), "master");
assertEquals("bare != working", commit1, bareCommit1);
assertEquals("Wrong branch count", 1, w.git.getBranches().size());
assertTrue("Remote branches should not exist", w.git.getRemoteBranches().isEmpty());
/* Create a branch in working repo named "parent" */
w.git.branch("parent");
w.git.checkout("parent");
w.touch("file2", "file2 content " + java.util.UUID.randomUUID().toString());
w.git.add("file2");
w.git.commit("commit2");
ObjectId commit2 = w.head();
assertEquals("Wrong branch count", 2, w.git.getBranches().size());
assertTrue("Remote branches should not exist", w.git.getRemoteBranches().isEmpty());
/* Push branch named "parent" to bare repo */
w.git.push("origin", "parent");
ObjectId bareCommit2 = bare.git.getHeadRev(bare.repoPath(), "parent");
assertEquals("working parent != bare parent", commit2, bareCommit2);
assertEquals("Wrong branch count", 2, w.git.getBranches().size());
assertTrue("Remote branches should not exist", w.git.getRemoteBranches().isEmpty());
/* Clone new working repo from bare repo */
WorkingArea newArea = clone(bare.repoPath());
ObjectId newAreaHead = newArea.head();
assertEquals("bare != newArea", bareCommit1, newAreaHead);
Set<Branch> remoteBranches = newArea.git.getRemoteBranches();
assertBranchesExist(remoteBranches, "origin/master", "origin/parent", "origin/HEAD");
assertEquals("Wrong count in " + remoteBranches, 3, remoteBranches.size());
/* Checkout parent in new working repo */
newArea.git.checkout("origin/parent", "parent");
ObjectId newAreaParent = newArea.head();
assertEquals("parent1 != newAreaParent", commit2, newAreaParent);
/* Delete parent branch from w */
w.git.checkout("master");
w.cmd("git branch -D parent");
assertEquals("Wrong branch count", 1, w.git.getBranches().size());
/* Delete parent branch on bare repo*/
bare.cmd("git branch -D parent");
// assertEquals("Wrong branch count", 1, bare.git.getBranches().size());
/* Create parent/a branch in working repo */
w.git.branch("parent/a");
w.git.checkout("parent/a");
w.touch("file3", "file3 content " + java.util.UUID.randomUUID().toString());
w.git.add("file3");
w.git.commit("commit3");
ObjectId commit3 = w.head();
/* Push parent/a branch to bare repo */
w.git.push("origin", "parent/a");
ObjectId bareCommit3 = bare.git.getHeadRev(bare.repoPath(), "parent/a");
assertEquals("parent/a != bare", commit3, bareCommit3);
remoteBranches = bare.git.getRemoteBranches();
assertEquals("Wrong count in " + remoteBranches, 0, remoteBranches.size());
/* Fetch parent/a into newArea repo - fails for
* CliGitAPIImpl, succeeds for JGitAPIImpl */
newArea.git.fetch(new URIish(bare.repo.toString()), refSpecs);
assertTrue("CliGit should have thrown an exception", newArea.git instanceof JGitAPIImpl);
} catch (GitException ge) {
final String msg = ge.getMessage();
assertTrue("Wrong exception: " + msg, msg.contains("some local refs could not be updated"));
}
/* Use git remote prune origin to remove obsolete branch named "parent" */
newArea.git.prune(new RemoteConfig(new Config(), "origin"));
/* Fetch should succeed */
/* Adjusted timeout will be logged, should not change test results */
final int newTimeout = 3;
newArea.git.fetch_().timeout(newTimeout).from(new URIish(bare.repo.toString()), refSpecs).execute();
setExpectedTimeoutWithAdjustedEnd(newTimeout);
}
/**
* JGit 3.3.0 thru 3.6.0 "prune during fetch" prunes more remote
* branches than command line git prunes during fetch. This test
* should be used to evaluate future versions of JGit to see if
* pruning behavior more closely emulates command line git.
*
* This has been fixed using a workaround.
*/
public void test_fetch_with_prune() throws Exception {
WorkingArea bare = new WorkingArea();
bare.init(true);
/* Create a working repo containing three branches */
/* master -> branch1 */
/* -> branch2 */
w.init();
w.touch("file-master", "file master content " + java.util.UUID.randomUUID().toString());
w.git.add("file-master");
w.git.commit("master-commit");
ObjectId master = w.head();
assertEquals("Wrong branch count", 1, w.git.getBranches().size());
w.git.setRemoteUrl("origin", bare.repoPath());
w.git.push("origin", "master"); /* master branch is now on bare repo */
w.git.checkout("master");
w.git.branch("branch1");
w.touch("file-branch1", "file branch1 content " + java.util.UUID.randomUUID().toString());
w.git.add("file-branch1");
w.git.commit("branch1-commit");
ObjectId branch1 = w.head();
assertEquals("Wrong branch count", 2, w.git.getBranches().size());
w.git.push("origin", "branch1"); /* branch1 is now on bare repo */
w.git.checkout("master");
w.git.branch("branch2");
w.touch("file-branch2", "file branch2 content " + java.util.UUID.randomUUID().toString());
w.git.add("file-branch2");
w.git.commit("branch2-commit");
ObjectId branch2 = w.head();
assertEquals("Wrong branch count", 3, w.git.getBranches().size());
assertTrue("Remote branches should not exist", w.git.getRemoteBranches().isEmpty());
w.git.push("origin", "branch2"); /* branch2 is now on bare repo */
/* Clone new working repo from bare repo */
WorkingArea newArea = clone(bare.repoPath());
ObjectId newAreaHead = newArea.head();
Set<Branch> remoteBranches = newArea.git.getRemoteBranches();
assertBranchesExist(remoteBranches, "origin/master", "origin/branch1", "origin/branch2", "origin/HEAD");
assertEquals("Wrong count in " + remoteBranches, 4, remoteBranches.size());
/* Remove branch1 from bare repo using original repo */
w.cmd("git push " + bare.repoPath() + " :branch1");
/* Fetch without prune should leave branch1 in newArea */
/* Fetch with prune should remove branch1 from newArea */
/* Git 1.7.1 on Red Hat 6 does not prune branch1, don't fail the test
* on that old git version.
*/
int expectedBranchCount = 3;
if (newArea.git instanceof CliGitAPIImpl && !w.cgit().isAtLeastVersion(1, 7, 9, 0)) {
expectedBranchCount = 4;
}
assertEquals("Wrong count in " + remoteBranches, expectedBranchCount, remoteBranches.size());
}
public void test_fetch_from_url() throws Exception {
WorkingArea r = new WorkingArea();
r.init();
r.commitEmpty("init");
String sha1 = r.cmd("git rev-list --max-count=1 HEAD");
w.init();
w.cmd("git remote add origin " + r.repoPath());
w.git.fetch(new URIish(r.repo.toString()), Collections.EMPTY_LIST);
assertTrue(sha1.equals(r.cmd("git rev-list --max-count=1 HEAD")));
}
public void test_fetch_with_updated_tag() throws Exception {
WorkingArea r = new WorkingArea();
r.init();
r.commitEmpty("init");
r.tag("t");
String sha1 = r.cmd("git rev-list --max-count=1 t");
w.init();
w.cmd("git remote add origin " + r.repoPath());
w.git.fetch("origin", new RefSpec[] {null});
assertTrue(sha1.equals(r.cmd("git rev-list --max-count=1 t")));
r.touch("file.txt");
r.git.add("file.txt");
r.git.commit("update");
r.tag("-d t");
r.tag("t");
sha1 = r.cmd("git rev-list --max-count=1 t");
w.git.fetch("origin", new RefSpec[] {null});
assertTrue(sha1.equals(r.cmd("git rev-list --max-count=1 t")));
}
public void test_fetch_shallow() throws Exception {
w.init();
w.git.setRemoteUrl("origin", localMirror());
/* JGit does not support shallow clone */
/* JGit does not support shallow clone */
/* JGit seems to have the better behavior in this case, always
* returning the SHA1 of the commit. Most users are using
* command line git, so the difference is retained in command
* line git for compatibility with any legacy command line git
* use cases which depend on returning the SHA-1 of the
* annotated tag rather than the SHA-1 of the commit to which
* the annotated tag points.
*/
ObjectId testTag = w.git.getHeadRev(gitDir, "test"); // Remember SHA1 of annotated test tag
if (w.git instanceof JGitAPIImpl) {
assertEquals("Annotated tag does not match SHA1", init, testTag);
} else {
assertNotEquals("Annotated tag unexpectedly equals SHA1", init, testTag);
}
/* Because refs/tags/test syntax is more specific than "test",
* and because the more specific syntax was only introduced in
* more recent git client plugin versions (like 1.10.0 and
* later), the CliGit and JGit behavior are kept the same here
* in order to fix JENKINS-23299.
*/
ObjectId testTagCommit = w.git.getHeadRev(gitDir, "refs/tags/test"); // SHA1 of commit identified by test tag
assertEquals("Annotated tag doesn't match queried commit SHA1", init, testTagCommit);
assertEquals(init, w.git.revParse("test")); // SHA1 of commit identified by test tag
assertEquals(init, w.git.revParse("refs/tags/test")); // SHA1 of commit identified by test tag
assertTrue("test tag not created", w.cmd("git tag").contains("test"));
String message = w.cmd("git tag -l -n1");
assertTrue("unexpected test tag message : " + message, message.contains("this is a tag"));
assertNull(w.git.getHeadRev(gitDir, "not-a-valid-tag")); // Confirm invalid tag returns null
}
public void test_delete_tag() throws Exception {
w.init();
w.commitEmpty("init");
w.tag("test");
w.tag("another");
w.git.deleteTag("test");
String tags = w.cmd("git tag");
assertFalse("deleted test tag still present", tags.contains("test"));
assertTrue("expected tag not listed", tags.contains("another"));
try {
w.git.deleteTag("test");
assertTrue("cgit did not throw an exception", w.git instanceof JGitAPIImpl);
} catch (GitException ge) {
assertEquals("Could not delete tag test", ge.getMessage());
}
}
public void test_list_tags_with_filter() throws Exception {
w.init();
w.commitEmpty("init");
w.tag("test");
w.tag("another_test");
w.tag("yet_another");
Set<String> tags = w.git.getTagNames("*test");
assertTrue("expected tag test not listed", tags.contains("test"));
assertTrue("expected tag another_test not listed", tags.contains("another_test"));
assertFalse("unexpected yet_another tag listed", tags.contains("yet_another"));
}
public void test_list_tags_without_filter() throws Exception {
w.init();
w.commitEmpty("init");
w.tag("test");
w.tag("another_test");
w.tag("yet_another");
Set<String> allTags = w.git.getTagNames(null);
assertTrue("tag 'test' not listed", allTags.contains("test"));
assertTrue("tag 'another_test' not listed", allTags.contains("another_test"));
assertTrue("tag 'yet_another' not listed", allTags.contains("yet_another"));
}
public void test_list_tags_star_filter() throws Exception {
w.init();
w.commitEmpty("init");
w.tag("test");
w.tag("another_test");
w.tag("yet_another");
Set<String> allTags = w.git.getTagNames("*");
assertTrue("tag 'test' not listed", allTags.contains("test"));
assertTrue("tag 'another_test' not listed", allTags.contains("another_test"));
assertTrue("tag 'yet_another' not listed", allTags.contains("yet_another"));
}
public void test_tag_exists() throws Exception {
w.init();
w.commitEmpty("init");
w.tag("test");
assertTrue(w.git.tagExists("test"));
assertFalse(w.git.tagExists("unknown"));
}
public void test_get_tag_message() throws Exception {
w.init();
w.commitEmpty("init");
w.tag("test -m this-is-a-test");
assertEquals("this-is-a-test", w.git.getTagMessage("test"));
}
public void test_get_tag_message_multi_line() throws Exception {
w.init();
w.commitEmpty("init");
w.launchCommand("git", "tag", "test", "-m", "test 123!\n* multi-line tag message\n padded ");
// Leading four spaces from each line should be stripped,
// but not the explicit single space before "padded",
// and the final errant space at the end should be trimmed
assertEquals("test 123!\n* multi-line tag message\n padded", w.git.getTagMessage("test"));
}
public void test_create_ref() throws Exception {
w.init();
w.commitEmpty("init");
w.git.ref("refs/testing/testref");
assertTrue("test ref not created", w.cmd("git show-ref").contains("refs/testing/testref"));
}
public void test_delete_ref() throws Exception {
w.init();
w.commitEmpty("init");
w.git.ref("refs/testing/testref");
w.git.ref("refs/testing/anotherref");
w.git.deleteRef("refs/testing/testref");
String refs = w.cmd("git show-ref");
assertFalse("deleted test tag still present", refs.contains("refs/testing/testref"));
assertTrue("expected tag not listed", refs.contains("refs/testing/anotherref"));
w.git.deleteRef("refs/testing/testref"); // Double-deletes do nothing.
}
public void test_list_refs_with_prefix() throws Exception {
w.init();
w.commitEmpty("init");
w.git.ref("refs/testing/testref");
w.git.ref("refs/testing/nested/anotherref");
w.git.ref("refs/testing/nested/yetanotherref");
Set<String> refs = w.git.getRefNames("refs/testing/nested/");
assertFalse("ref testref listed", refs.contains("refs/testing/testref"));
assertTrue("ref anotherref not listed", refs.contains("refs/testing/nested/anotherref"));
assertTrue("ref yetanotherref not listed", refs.contains("refs/testing/nested/yetanotherref"));
}
public void test_list_refs_without_prefix() throws Exception {
w.init();
w.commitEmpty("init");
w.git.ref("refs/testing/testref");
w.git.ref("refs/testing/nested/anotherref");
w.git.ref("refs/testing/nested/yetanotherref");
Set<String> allRefs = w.git.getRefNames("");
assertTrue("ref testref not listed", allRefs.contains("refs/testing/testref"));
assertTrue("ref anotherref not listed", allRefs.contains("refs/testing/nested/anotherref"));
assertTrue("ref yetanotherref not listed", allRefs.contains("refs/testing/nested/yetanotherref"));
}
public void test_ref_exists() throws Exception {
w.init();
w.commitEmpty("init");
w.git.ref("refs/testing/testref");
assertTrue(w.git.refExists("refs/testing/testref"));
assertFalse(w.git.refExists("refs/testing/testref_notfound"));
assertFalse(w.git.refExists("refs/testing2/yetanother"));
}
public void test_revparse_sha1_HEAD_or_tag() throws Exception {
w.init();
w.commitEmpty("init");
w.touch("file1");
w.git.add("file1");
w.git.commit("commit1");
w.tag("test");
String sha1 = w.cmd("git rev-parse HEAD").substring(0,40);
assertEquals(sha1, w.git.revParse(sha1).name());
assertEquals(sha1, w.git.revParse("HEAD").name());
assertEquals(sha1, w.git.revParse("test").name());
}
public void test_revparse_throws_expected_exception() throws Exception {
w.init();
w.commitEmpty("init");
try {
w.git.revParse("unknown-rev-to-parse");
fail("Did not throw exception");
} catch (GitException ge) {
final String msg = ge.getMessage();
assertTrue("Wrong exception: " + msg, msg.contains("unknown-rev-to-parse"));
}
}
public void test_hasGitRepo_without_git_directory() throws Exception
{
setTimeoutVisibleInCurrentTest(false);
assertFalse("Empty directory has a Git repo", w.git.hasGitRepo());
}
public void test_hasGitRepo_with_invalid_git_repo() throws Exception
{
// Create an empty directory named .git - "corrupt" git repo
assertTrue("mkdir .git failed", w.file(".git").mkdir());
assertFalse("Invalid Git repo reported as valid", w.git.hasGitRepo());
}
public void test_hasGitRepo_with_valid_git_repo() throws Exception {
w.init();
assertTrue("Valid Git repo reported as invalid", w.git.hasGitRepo());
}
public void test_push() throws Exception {
w.init();
w.commitEmpty("init");
w.touch("file1");
w.git.add("file1");
w.git.commit("commit1");
ObjectId sha1 = w.head();
WorkingArea r = new WorkingArea();
r.init(true);
w.cmd("git remote add origin " + r.repoPath());
w.git.push("origin", "master");
String remoteSha1 = r.cmd("git rev-parse master").substring(0, 40);
assertEquals(sha1.name(), remoteSha1);
}
@Deprecated
public void test_push_deprecated_signature() throws Exception {
/* Make working repo a remote of the bare repo */
w.init();
w.commitEmpty("init");
ObjectId workHead = w.head();
/* Create a bare repo */
WorkingArea bare = new WorkingArea();
bare.init(true);
/* Set working repo origin to point to bare */
w.git.setRemoteUrl("origin", bare.repoPath());
assertEquals("Wrong remote URL", w.git.getRemoteUrl("origin"), bare.repoPath());
/* Push to bare repo */
w.git.push("origin", "master");
/* JGitAPIImpl revParse fails unexpectedly when used here */
ObjectId bareHead = w.git instanceof CliGitAPIImpl ? bare.head() : ObjectId.fromString(bare.cmd("git rev-parse master").substring(0, 40));
assertEquals("Heads don't match", workHead, bareHead);
assertEquals("Heads don't match", w.git.getHeadRev(w.repoPath(), "master"), bare.git.getHeadRev(bare.repoPath(), "master"));
/* Commit a new file */
w.touch("file1");
w.git.add("file1");
w.git.commit("commit1");
/* Push commit to the bare repo */
Config config = new Config();
config.fromText(w.contentOf(".git/config"));
RemoteConfig origin = new RemoteConfig(config, "origin");
w.igit().push(origin, "master");
/* JGitAPIImpl revParse fails unexpectedly when used here */
ObjectId workHead2 = w.git instanceof CliGitAPIImpl ? w.head() : ObjectId.fromString(w.cmd("git rev-parse master").substring(0, 40));
ObjectId bareHead2 = w.git instanceof CliGitAPIImpl ? bare.head() : ObjectId.fromString(bare.cmd("git rev-parse master").substring(0, 40));
assertEquals("Working SHA1 != bare SHA1", workHead2, bareHead2);
assertEquals("Working SHA1 != bare SHA1", w.git.getHeadRev(w.repoPath(), "master"), bare.git.getHeadRev(bare.repoPath(), "master"));
}
@NotImplementedInJGit
public void test_push_from_shallow_clone() throws Exception {
WorkingArea r = new WorkingArea();
r.init();
r.commitEmpty("init");
r.touch("file1");
r.git.add("file1");
r.git.commit("commit1");
r.cmd("git checkout -b other");
w.init();
w.cmd("git remote add origin " + r.repoPath());
w.cmd("git pull --depth=1 origin master");
w.touch("file2");
w.git.add("file2");
w.git.commit("commit2");
ObjectId sha1 = w.head();
try {
w.git.push("origin", "master");
assertTrue("git < 1.9.0 can push from shallow repository", w.cgit().isAtLeastVersion(1, 9, 0, 0));
String remoteSha1 = r.cmd("git rev-parse master").substring(0, 40);
assertEquals(sha1.name(), remoteSha1);
} catch (GitException e) {
// expected for git cli < 1.9.0
assertTrue("Wrong exception message: " + e, e.getMessage().contains("push from shallow repository"));
assertFalse("git >= 1.9.0 can't push from shallow repository", w.cgit().isAtLeastVersion(1, 9, 0, 0));
}
}
public void test_notes_add() throws Exception {
w.init();
w.touch("file1");
w.git.add("file1");
w.commitEmpty("init");
w.git.addNote("foo", "commits");
assertEquals("foo\n", w.cmd("git notes show"));
w.git.appendNote("alpha\rbravo\r\ncharlie\r\n\r\nbar\n\n\nzot\n\n", "commits");
// cgit normalizes CR+LF aggressively
// it appears to be collpasing CR+LF to LF, then truncating duplicate LFs down to 2
// note that CR itself is left as is
assertEquals("foo\n\nalpha\rbravo\ncharlie\n\nbar\n\nzot\n", w.cmd("git notes show"));
}
/**
* A rev-parse warning message should not break revision parsing.
*/
@Bug(11177)
public void test_jenkins_11177() throws Exception
{
w.init();
w.commitEmpty("init");
ObjectId base = w.head();
ObjectId master = w.git.revParse("master");
assertEquals(base, master);
/* Make reference to master ambiguous, verify it is reported ambiguous by rev-parse */
w.tag("master"); // ref "master" is now ambiguous
String revParse = w.cmd("git rev-parse master");
assertTrue("'" + revParse + "' does not contain 'ambiguous'", revParse.contains("ambiguous"));
ObjectId masterTag = w.git.revParse("refs/tags/master");
assertEquals("masterTag != head", w.head(), masterTag);
/* Get reference to ambiguous master */
ObjectId ambiguous = w.git.revParse("master");
assertEquals("ambiguous != master", ambiguous.toString(), master.toString());
/* Exploring JENKINS-20991 ambigous revision breaks checkout */
w.touch("file-master", "content-master");
w.git.add("file-master");
w.git.commit("commit1-master");
final ObjectId masterTip = w.head();
w.cmd("git branch branch1 " + masterTip.name());
w.cmd("git checkout branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1-branch1");
final ObjectId branch1 = w.head();
/* JGit checks out the masterTag, while CliGit checks out
* master branch. It is risky that there are different
* behaviors between the two implementations, but when a
* reference is ambiguous, it is safe to assume that
* resolution of the ambiguous reference is an implementation
* specific detail. */
w.git.checkout("master");
String messageDetails =
", head=" + w.head().name() +
", masterTip=" + masterTip.name() +
", masterTag=" + masterTag.name() +
", branch1=" + branch1.name();
if (w.git instanceof CliGitAPIImpl) {
assertEquals("head != master branch" + messageDetails, masterTip, w.head());
} else {
assertEquals("head != master tag" + messageDetails, masterTag, w.head());
}
}
public void test_no_submodules() throws IOException, InterruptedException {
w.init();
w.touch("committed-file", "committed-file content " + java.util.UUID.randomUUID().toString());
w.git.add("committed-file");
w.git.commit("commit1");
w.igit().submoduleClean(false);
w.igit().submoduleClean(true);
w.igit().submoduleUpdate(false);
w.igit().submoduleUpdate(true);
w.igit().submoduleSync();
assertTrue("committed-file missing at commit1", w.file("committed-file").exists());
}
public void assertFixSubmoduleUrlsThrows() throws InterruptedException {
try {
w.igit().fixSubmoduleUrls("origin", listener);
fail("Expected exception not thrown");
} catch (UnsupportedOperationException uoe) {
assertTrue("Unsupported operation not on JGit", w.igit() instanceof JGitAPIImpl);
} catch (GitException ge) {
assertTrue("GitException not on CliGit", w.igit() instanceof CliGitAPIImpl);
assertTrue("Wrong message in " + ge.getMessage(), ge.getMessage().startsWith("Could not determine remote"));
assertTrue("Wrong remote in " + ge.getMessage(), ge.getMessage().contains("origin"));
}
}
public void test_addSubmodule() throws Exception {
String sub1 = "sub1-" + java.util.UUID.randomUUID().toString();
String readme1 = sub1 + File.separator + "README.md";
w.init();
assertFalse("submodule1 dir found too soon", w.file(sub1).exists());
assertFalse("submodule1 file found too soon", w.file(readme1).exists());
w.git.addSubmodule(localMirror(), sub1);
assertTrue("submodule1 dir not found after add", w.file(sub1).exists());
assertTrue("submodule1 file not found after add", w.file(readme1).exists());
w.igit().submoduleUpdate(false);
assertTrue("submodule1 dir not found after add", w.file(sub1).exists());
assertTrue("submodule1 file not found after add", w.file(readme1).exists());
w.igit().submoduleUpdate(true);
assertTrue("submodule1 dir not found after recursive update", w.file(sub1).exists());
assertTrue("submodule1 file found after recursive update", w.file(readme1).exists());
w.igit().submoduleSync();
assertFixSubmoduleUrlsThrows();
}
@NotImplementedInJGit
public void test_trackingSubmodule() throws Exception {
if (! ((CliGitAPIImpl)w.git).isAtLeastVersion(1,8,2,0)) {
System.err.println("git must be at least 1.8.2 to do tracking submodules.");
return;
}
w.init(); // empty repository
// create a new GIT repo.
// master -- <file1>C <file2>C
WorkingArea r = new WorkingArea();
r.init();
r.touch("file1", "content1");
r.git.add("file1");
r.git.commit("submod-commit1");
// Add new GIT repo to w
String subModDir = "submod1-" + java.util.UUID.randomUUID().toString();
w.git.addSubmodule(r.repoPath(), subModDir);
w.git.submoduleInit();
// Add a new file to the separate GIT repo.
r.touch("file2", "content2");
r.git.add("file2");
r.git.commit("submod-branch1-commit1");
// Make sure that the new file doesn't exist in the repo with remoteTracking
String subFile = subModDir + File.separator + "file2";
w.git.submoduleUpdate(true, false);
assertFalse("file2 exists and should not because we didn't update to the tip of the branch (master).", w.exists(subFile));
// Run submodule update with remote tracking
w.git.submoduleUpdate(true, true);
assertTrue("file2 does not exist and should because we updated to the top of the branch (master).", w.exists(subFile));
assertFixSubmoduleUrlsThrows();
}
/* Check JENKINS-23424 - inconsistent handling of modified tracked
* files when performing a checkout in an existing directory.
* CliGitAPIImpl reverts tracked files, while JGitAPIImpl does
* not.
*/
private void base_checkout_replaces_tracked_changes(boolean defineBranch) throws Exception {
w.git.clone_().url(localMirror()).repositoryName("JENKINS-23424").execute();
w.git.checkout("JENKINS-23424/master", "master");
if (defineBranch) {
w.git.checkout().branch("master").ref("JENKINS-23424/master").deleteBranchIfExist(true).execute();
} else {
w.git.checkout().ref("JENKINS-23424/master").deleteBranchIfExist(true).execute();
}
/* Confirm first checkout */
String pomContent = w.contentOf("pom.xml");
assertTrue("Missing jacoco ref in master pom : " + pomContent, pomContent.contains("jacoco"));
assertFalse("Found untracked file", w.file("untracked-file").exists());
/* Modify the pom file by adding a comment */
String comment = " <!-- JENKINS-23424 comment -->";
/* JGit implementation prior to 3.4.1 did not reset modified tracked files */
w.touch("pom.xml", pomContent + comment);
assertTrue(w.contentOf("pom.xml").contains(comment));
/* Create an untracked file. Both implementations retain
* untracked files across checkout.
*/
w.touch("untracked-file", comment);
assertTrue("Missing untracked file", w.file("untracked-file").exists());
/* Checkout should erase local modification */
CheckoutCommand cmd = w.git.checkout().ref("JENKINS-23424/1.4.x").deleteBranchIfExist(true);
if (defineBranch) {
cmd.branch("1.4.x");
}
cmd.execute();
/* Tracked file should not contain added comment, nor the jacoco reference */
pomContent = w.contentOf("pom.xml");
assertFalse("Found jacoco ref in 1.4.x pom : " + pomContent, pomContent.contains("jacoco"));
assertFalse("Found comment in 1.4.x pom", pomContent.contains(comment));
assertTrue("Missing untracked file", w.file("untracked-file").exists());
}
@Bug(23424)
public void test_checkout_replaces_tracked_changes() throws Exception {
base_checkout_replaces_tracked_changes(false);
}
@Bug(23424)
public void test_checkout_replaces_tracked_changes_with_branch() throws Exception {
base_checkout_replaces_tracked_changes(true);
}
/**
* Confirm that JENKINS-8122 is fixed in the current
* implementation. That bug reported that the tags from a
* submodule were being included in the set of tags associated
* with the parent repository. This test clones a repository with
* submodules, updates those submodules, and compares the tags
* available in the repository before the submodule branch
* checkout, after the submodule branch checkout, and within one
* of the submodules.
*/
@Bug(8122)
public void test_submodule_tags_not_fetched_into_parent() throws Exception {
w.git.clone_().url(localMirror()).repositoryName("origin").execute();
w.git.checkout("origin/master", "master");
String tagsBefore = w.cmd("git tag");
Set<String> tagNamesBefore = w.git.getTagNames(null);
for (String tag : tagNamesBefore) {
assertTrue(tag + " not in " + tagsBefore, tagsBefore.contains(tag));
}
w.git.checkout().branch("tests/getSubmodules").ref("origin/tests/getSubmodules").execute();
w.git.submoduleUpdate().recursive(true).execute();
String tagsAfter = w.cmd("git tag");
Set<String> tagNamesAfter = w.git.getTagNames(null);
for (String tag : tagNamesAfter) {
assertTrue(tag + " not in " + tagsAfter, tagsAfter.contains(tag));
}
assertEquals("tags before != after", tagsBefore, tagsAfter);
GitClient gitNtp = w.git.subGit("modules/ntp");
Set<String> tagNamesSubmodule = gitNtp.getTagNames(null);
for (String tag : tagNamesSubmodule) {
assertFalse("Submodule tag " + tag + " in parent " + tagsAfter, tagsAfter.matches("^" + tag + "$"));
}
try {
w.igit().fixSubmoduleUrls("origin", listener);
assertTrue("not CliGit", w.igit() instanceof CliGitAPIImpl);
} catch (UnsupportedOperationException uoe) {
assertTrue("Unsupported operation not on JGit", w.igit() instanceof JGitAPIImpl);
}
}
public void test_getSubmodules() throws Exception {
w.init();
w.git.clone_().url(localMirror()).repositoryName("sub_origin").execute();
w.git.checkout("sub_origin/tests/getSubmodules", "tests/getSubmodules");
List<IndexEntry> r = w.git.getSubmodules("HEAD");
assertEquals(
"[IndexEntry[mode=160000,type=commit,file=modules/firewall,object=978c8b223b33e203a5c766ecf79704a5ea9b35c8], " +
"IndexEntry[mode=160000,type=commit,file=modules/ntp,object=b62fabbc2bb37908c44ded233e0f4bf479e45609]]",
r.toString()
);
w.git.submoduleInit();
w.git.submoduleUpdate().execute();
assertTrue("modules/firewall does not exist", w.exists("modules/firewall"));
assertTrue("modules/ntp does not exist", w.exists("modules/ntp"));
assertFixSubmoduleUrlsThrows();
}
public void test_submodule_update() throws Exception {
w.init();
w.git.clone_().url(localMirror()).repositoryName("sub2_origin").execute();
w.git.checkout().branch("tests/getSubmodules").ref("sub2_origin/tests/getSubmodules").deleteBranchIfExist(true).execute();
w.git.submoduleInit();
w.git.submoduleUpdate().execute();
assertTrue("modules/firewall does not exist", w.exists("modules/firewall"));
assertTrue("modules/ntp does not exist", w.exists("modules/ntp"));
assertFixSubmoduleUrlsThrows();
}
@NotImplementedInJGit
public void test_trackingSubmoduleBranches() throws Exception {
if (! ((CliGitAPIImpl)w.git).isAtLeastVersion(1,8,2,0)) {
setTimeoutVisibleInCurrentTest(false);
System.err.println("git must be at least 1.8.2 to do tracking submodules.");
return;
}
w.init(); // empty repository
// create a new GIT repo.
// master -- <file1>C
// branch1 -- <file1>C <file2>C
// branch2 -- <file1>C <file3>C
WorkingArea r = new WorkingArea();
r.init();
r.touch("file1", "content1");
r.git.add("file1");
r.git.commit("submod-commit1");
r.git.branch("branch1");
r.git.checkout("branch1");
r.touch("file2", "content2");
r.git.add("file2");
r.git.commit("submod-commit2");
r.git.checkout("master");
r.git.branch("branch2");
r.git.checkout("branch2");
r.touch("file3", "content3");
r.git.add("file3");
r.git.commit("submod-commit3");
r.git.checkout("master");
// Setup variables for use in tests
String submodDir = "submod1" + java.util.UUID.randomUUID().toString();
String subFile1 = submodDir + File.separator + "file1";
String subFile2 = submodDir + File.separator + "file2";
String subFile3 = submodDir + File.separator + "file3";
// Add new GIT repo to w, at the master branch
w.git.addSubmodule(r.repoPath(), submodDir);
w.git.submoduleInit();
assertTrue("file1 does not exist and should be we imported the submodule.", w.exists(subFile1));
assertFalse("file2 exists and should not because not on 'branch1'", w.exists(subFile2));
assertFalse("file3 exists and should not because not on 'branch2'", w.exists(subFile3));
// Switch to branch1
w.git.submoduleUpdate().remoteTracking(true).useBranch(submodDir, "branch1").execute();
assertTrue("file2 does not exist and should because on branch1", w.exists(subFile2));
assertFalse("file3 exists and should not because not on 'branch2'", w.exists(subFile3));
// Switch to branch2
w.git.submoduleUpdate().remoteTracking(true).useBranch(submodDir, "branch2").execute();
assertFalse("file2 exists and should not because not on 'branch1'", w.exists(subFile2));
assertTrue("file3 does not exist and should because on branch2", w.exists(subFile3));
// Switch to master
int newTimeout = 6;
w.git.submoduleUpdate().remoteTracking(true).useBranch(submodDir, "master").timeout(newTimeout).execute();
assertFalse("file2 exists and should not because not on 'branch1'", w.exists(subFile2));
assertFalse("file3 exists and should not because not on 'branch2'", w.exists(subFile3));
setExpectedTimeoutWithAdjustedEnd(newTimeout, 2);
}
@NotImplementedInJGit
public void test_sparse_checkout() throws Exception {
/* Sparse checkout was added in git 1.7.0, but the checkout -f syntax
* required by the plugin implementation does not work in git 1.7.1.
*/
if (!w.cgit().isAtLeastVersion(1, 7, 9, 0)) {
return;
}
// Create a repo for cloning purpose
w.init();
w.commitEmpty("init");
assertTrue("mkdir dir1 failed", w.file("dir1").mkdir());
w.touch("dir1/file1");
assertTrue("mkdir dir2 failed", w.file("dir2").mkdir());
w.touch("dir2/file2");
assertTrue("mkdir dir3 failed", w.file("dir3").mkdir());
w.touch("dir3/file3");
w.git.add("dir1/file1");
w.git.add("dir2/file2");
w.git.add("dir3/file3");
w.git.commit("commit");
// Clone it
WorkingArea workingArea = new WorkingArea();
workingArea.git.clone_().url(w.repoPath()).execute();
workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Lists.newArrayList("dir1")).execute();
assertTrue(workingArea.exists("dir1"));
assertFalse(workingArea.exists("dir2"));
assertFalse(workingArea.exists("dir3"));
workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Lists.newArrayList("dir2")).execute();
assertFalse(workingArea.exists("dir1"));
assertTrue(workingArea.exists("dir2"));
assertFalse(workingArea.exists("dir3"));
workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Lists.newArrayList("dir1", "dir2")).execute();
assertTrue(workingArea.exists("dir1"));
assertTrue(workingArea.exists("dir2"));
assertFalse(workingArea.exists("dir3"));
workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Collections.<String>emptyList()).execute();
assertTrue(workingArea.exists("dir1"));
assertTrue(workingArea.exists("dir2"));
assertTrue(workingArea.exists("dir3"));
int newTimeout = 3; /* Check that checkout timeout is honored */
workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(null)
.timeout(newTimeout)
.execute();
assertTrue(workingArea.exists("dir1"));
assertTrue(workingArea.exists("dir2"));
assertTrue(workingArea.exists("dir3"));
setExpectedTimeoutWithAdjustedEnd(newTimeout);
}
public void test_clone_no_checkout() throws Exception {
// Create a repo for cloning purpose
WorkingArea repoToClone = new WorkingArea();
repoToClone.init();
repoToClone.commitEmpty("init");
repoToClone.touch("file1");
repoToClone.git.add("file1");
repoToClone.git.commit("commit");
// Clone it with no checkout
w.git.clone_().url(repoToClone.repoPath()).repositoryName("origin").noCheckout().execute();
assertFalse(w.exists("file1"));
}
public void test_hasSubmodules() throws Exception {
w.init();
w.launchCommand("git", "fetch", localMirror(), "tests/getSubmodules:t");
w.git.checkout("t");
assertTrue(w.git.hasGitModules());
w.launchCommand("git", "fetch", localMirror(), "master:t2");
w.git.checkout("t2");
assertFalse(w.git.hasGitModules());
assertFixSubmoduleUrlsThrows();
}
private boolean isJava6() {
if (System.getProperty("java.version").startsWith("1.6")) {
return true;
}
return false;
}
/**
* core.symlinks is set to false by msysgit on Windows and by JGit
* 3.3.0 on all platforms. It is not set on Linux. Refer to
* JENKINS-21168, JENKINS-22376, and JENKINS-22391 for details.
*/
private void checkSymlinkSetting(WorkingArea area) throws IOException {
String expected = SystemUtils.IS_OS_WINDOWS || (area.git instanceof JGitAPIImpl && isJava6()) ? "false" : "";
String symlinkValue = null;
try {
symlinkValue = w.cmd(true, "git config core.symlinks").trim();
} catch (Exception e) {
symlinkValue = e.getMessage();
}
assertEquals(expected, symlinkValue);
}
public void test_init() throws Exception {
assertFalse(w.file(".git").exists());
w.git.init();
assertTrue(w.file(".git").exists());
checkSymlinkSetting(w);
}
public void test_init_() throws Exception {
assertFalse(w.file(".git").exists());
w.git.init_().workspace(w.repoPath()).execute();
assertTrue(w.file(".git").exists());
checkSymlinkSetting(w);
}
public void test_init_bare() throws Exception {
assertFalse(w.file(".git").exists());
assertFalse(w.file("refs").exists());
w.git.init_().workspace(w.repoPath()).bare(false).execute();
assertTrue(w.file(".git").exists());
assertFalse(w.file("refs").exists());
checkSymlinkSetting(w);
WorkingArea anotherRepo = new WorkingArea();
assertFalse(anotherRepo.file(".git").exists());
assertFalse(anotherRepo.file("refs").exists());
anotherRepo.git.init_().workspace(anotherRepo.repoPath()).bare(true).execute();
assertFalse(anotherRepo.file(".git").exists());
assertTrue(anotherRepo.file("refs").exists());
checkSymlinkSetting(anotherRepo);
}
public void test_getSubmoduleUrl() throws Exception {
w = clone(localMirror());
w.cmd("git checkout tests/getSubmodules");
w.git.submoduleInit();
assertEquals("https://github.com/puppetlabs/puppetlabs-firewall.git", w.igit().getSubmoduleUrl("modules/firewall"));
try {
w.igit().getSubmoduleUrl("bogus");
fail();
} catch (GitException e) {
// expected
}
}
public void test_setSubmoduleUrl() throws Exception {
w = clone(localMirror());
w.cmd("git checkout tests/getSubmodules");
w.git.submoduleInit();
String DUMMY = "/dummy";
w.igit().setSubmoduleUrl("modules/firewall", DUMMY);
// create a brand new Git object to make sure it's persisted
WorkingArea subModuleVerify = new WorkingArea(w.repo);
assertEquals(DUMMY, subModuleVerify.igit().getSubmoduleUrl("modules/firewall"));
}
public void test_prune() throws Exception {
// pretend that 'r' is a team repository and ws1 and ws2 are team members
WorkingArea r = new WorkingArea();
r.init(true);
WorkingArea ws1 = new WorkingArea().init();
WorkingArea ws2 = w.init();
ws1.commitEmpty("c");
ws1.cmd("git remote add origin " + r.repoPath());
ws1.cmd("git push origin master:b1");
ws1.cmd("git push origin master:b2");
ws1.cmd("git push origin master");
ws2.cmd("git remote add origin " + r.repoPath());
ws2.cmd("git fetch origin");
// at this point both ws1&ws2 have several remote tracking branches
ws1.cmd("git push origin :b1");
ws1.cmd("git push origin master:b3");
ws2.git.prune(new RemoteConfig(new Config(),"origin"));
assertFalse(ws2.exists(".git/refs/remotes/origin/b1"));
assertTrue( ws2.exists(".git/refs/remotes/origin/b2"));
assertFalse(ws2.exists(".git/refs/remotes/origin/b3"));
}
public void test_revListAll() throws Exception {
w.init();
w.launchCommand("git", "pull", localMirror());
StringBuilder out = new StringBuilder();
for (ObjectId id : w.git.revListAll()) {
out.append(id.name()).append('\n');
}
String all = w.cmd("git rev-list --all");
assertEquals(all,out.toString());
}
public void test_revList_() throws Exception {
List<ObjectId> oidList = new ArrayList<>();
w.init();
w.launchCommand("git", "pull", localMirror());
RevListCommand revListCommand = w.git.revList_();
revListCommand.all();
revListCommand.to(oidList);
revListCommand.execute();
StringBuilder out = new StringBuilder();
for (ObjectId id : oidList) {
out.append(id.name()).append('\n');
}
String all = w.cmd("git rev-list --all");
assertEquals(all,out.toString());
}
public void test_revListFirstParent() throws Exception {
w.init();
w.launchCommand("git", "pull", localMirror());
for (Branch b : w.git.getRemoteBranches()) {
StringBuilder out = new StringBuilder();
List<ObjectId> oidList = new ArrayList<>();
RevListCommand revListCommand = w.git.revList_();
revListCommand.firstParent();
revListCommand.to(oidList);
revListCommand.reference(b.getName());
revListCommand.execute();
for (ObjectId id : oidList) {
out.append(id.name()).append('\n');
}
String all = w.cmd("git rev-list --first-parent " + b.getName());
assertEquals(all,out.toString());
}
}
public void test_revList() throws Exception {
w.init();
w.launchCommand("git", "pull", localMirror());
for (Branch b : w.git.getRemoteBranches()) {
StringBuilder out = new StringBuilder();
for (ObjectId id : w.git.revList(b.getName())) {
out.append(id.name()).append('\n');
}
String all = w.cmd("git rev-list " + b.getName());
assertEquals(all,out.toString());
}
}
public void test_merge_strategy() throws Exception {
w.init();
w.commitEmpty("init");
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file", "content1");
w.git.add("file");
w.git.commit("commit1");
w.git.checkout("master");
w.git.branch("branch2");
w.git.checkout("branch2");
File f = w.touch("file", "content2");
w.git.add("file");
w.git.commit("commit2");
w.git.merge().setStrategy(MergeCommand.Strategy.OURS).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
assertEquals("merge didn't selected OURS content", "content2", FileUtils.readFileToString(f));
}
public void test_merge_strategy_correct_fail() throws Exception {
w.init();
w.commitEmpty("init");
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file", "content1");
w.git.add("file");
w.git.commit("commit1");
w.git.checkout("master");
w.git.branch("branch2");
w.git.checkout("branch2");
w.touch("file", "content2");
w.git.add("file");
w.git.commit("commit2");
try {
w.git.merge().setStrategy(MergeCommand.Strategy.RESOLVE).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
fail();
}
catch (GitException e) {
// expected
}
}
@Bug(12402)
public void test_merge_fast_forward_mode_ff() throws Exception {
w.init();
w.commitEmpty("init");
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
final ObjectId branch1 = w.head();
w.git.checkout("master");
w.git.branch("branch2");
w.git.checkout("branch2");
w.touch("file2", "content2");
w.git.add("file2");
w.git.commit("commit2");
final ObjectId branch2 = w.head();
w.git.checkout("master");
// The first merge is a fast-forward, master moves to branch1
w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
assertEquals("Fast-forward merge failed. master and branch1 should be the same.",w.head(),branch1);
// The second merge calls for fast-forward (FF), but a merge commit will result
// This tests that calling for FF gracefully falls back to a commit merge
// master moves to a new commit ahead of branch1 and branch2
w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch2")).execute();
// The merge commit (head) should have branch2 and branch1 as parents
List<ObjectId> revList = w.git.revList("HEAD^1");
assertEquals("Merge commit failed. branch1 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch1.name());
revList = w.git.revList("HEAD^2");
assertEquals("Merge commit failed. branch2 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch2.name());
}
public void test_merge_fast_forward_mode_ff_only() throws Exception {
w.init();
w.commitEmpty("init");
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
final ObjectId branch1 = w.head();
w.git.checkout("master");
w.git.branch("branch2");
w.git.checkout("branch2");
w.touch("file2", "content2");
w.git.add("file2");
w.git.commit("commit2");
final ObjectId branch2 = w.head();
w.git.checkout("master");
final ObjectId master = w.head();
// The first merge is a fast-forward, master moves to branch1
w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF_ONLY).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
assertEquals("Fast-forward merge failed. master and branch1 should be the same but aren't.",w.head(),branch1);
// The second merge calls for fast-forward only (FF_ONLY), but a merge commit is required, hence it is expected to fail
try {
w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF_ONLY).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch2")).execute();
fail("Exception not thrown: the fast-forward only mode should have failed");
} catch (GitException e) {
// expected
assertEquals("Fast-forward merge abort failed. master and branch1 should still be the same as the merge was aborted.",w.head(),branch1);
}
}
public void test_merge_fast_forward_mode_no_ff() throws Exception {
w.init();
w.commitEmpty("init");
final ObjectId base = w.head();
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
final ObjectId branch1 = w.head();
w.git.checkout("master");
w.git.branch("branch2");
w.git.checkout("branch2");
w.touch("file2", "content2");
w.git.add("file2");
w.git.commit("commit2");
final ObjectId branch2 = w.head();
w.git.checkout("master");
final ObjectId master = w.head();
// The first merge is normally a fast-forward, but we're calling for a merge commit which is expected to work
w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
// The first merge will have base and branch1 as parents
List<ObjectId> revList = null;
revList = w.git.revList("HEAD^1");
assertEquals("Merge commit failed. base should be a parent of HEAD but it isn't.",revList.get(0).name(), base.name());
revList = w.git.revList("HEAD^2");
assertEquals("Merge commit failed. branch1 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch1.name());
final ObjectId base2 = w.head();
// Calling for NO_FF when required is expected to work
w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch2")).execute();
// The second merge will have base2 and branch2 as parents
revList = w.git.revList("HEAD^1");
assertEquals("Merge commit failed. base2 should be a parent of HEAD but it isn't.",revList.get(0).name(), base2.name());
revList = w.git.revList("HEAD^2");
assertEquals("Merge commit failed. branch2 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch2.name());
}
public void test_merge_squash() throws Exception{
w.init();
w.commitEmpty("init");
w.git.branch("branch1");
//First commit to branch1
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
//Second commit to branch1
w.touch("file2", "content2");
w.git.add("file2");
w.git.commit("commit2");
//Merge branch1 with master, squashing both commits
w.git.checkout("master");
w.git.merge().setSquash(true).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
//Compare commit counts of before and after commiting the merge, should be one due to the squashing of commits.
final int commitCountBefore = w.git.revList("HEAD").size();
w.git.commit("commitMerge");
final int commitCountAfter = w.git.revList("HEAD").size();
assertEquals("Squash merge failed. Should have merged only one commit.", 1, commitCountAfter - commitCountBefore);
}
public void test_merge_no_squash() throws Exception{
w.init();
w.commitEmpty("init");
//First commit to branch1
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
//Second commit to branch1
w.touch("file2", "content2");
w.git.add("file2");
w.git.commit("commit2");
//Merge branch1 with master, without squashing commits.
//Compare commit counts of before and after commiting the merge, should be one due to the squashing of commits.
w.git.checkout("master");
final int commitCountBefore = w.git.revList("HEAD").size();
w.git.merge().setSquash(false).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
final int commitCountAfter = w.git.revList("HEAD").size();
assertEquals("Squashless merge failed. Should have merged two commits.", 2, commitCountAfter - commitCountBefore);
}
public void test_merge_no_commit() throws Exception{
w.init();
w.commitEmpty("init");
//Create branch1 and commit a file
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
//Merge branch1 with master, without committing the merge.
//Compare commit counts of before and after the merge, should be zero due to the lack of autocommit.
w.git.checkout("master");
final int commitCountBefore = w.git.revList("HEAD").size();
w.git.merge().setCommit(false).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
final int commitCountAfter = w.git.revList("HEAD").size();
assertEquals("No Commit merge failed. Shouldn't have committed any changes.", commitCountBefore, commitCountAfter);
}
public void test_merge_commit() throws Exception{
w.init();
w.commitEmpty("init");
//Create branch1 and commit a file
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
//Merge branch1 with master, without committing the merge.
//Compare commit counts of before and after the merge, should be two due to the commit of the file and the commit of the merge.
w.git.checkout("master");
final int commitCountBefore = w.git.revList("HEAD").size();
w.git.merge().setCommit(true).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
final int commitCountAfter = w.git.revList("HEAD").size();
assertEquals("Commit merge failed. Should have committed the merge.", 2, commitCountAfter - commitCountBefore);
}
public void test_merge_with_message() throws Exception {
w.init();
w.commitEmpty("init");
// First commit to branch1
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1");
// Merge branch1 into master
w.git.checkout("master");
String mergeMessage = "Merge message to be tested.";
w.git.merge().setMessage(mergeMessage).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute();
// Obtain last commit message
String resultMessage = w.git.showRevision(w.head()).get(7).trim();
assertEquals("Custom message merge failed. Should have set custom merge message.", mergeMessage, resultMessage);
}
@Deprecated
public void test_merge_refspec() throws Exception {
w.init();
w.commitEmpty("init");
w.touch("file-master", "content-master");
w.git.add("file-master");
w.git.commit("commit1-master");
final ObjectId base = w.head();
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1-branch1");
final ObjectId branch1 = w.head();
w.cmd("git branch branch2 master");
w.git.checkout("branch2");
File f = w.touch("file2", "content2");
w.git.add("file2");
w.git.commit("commit2-branch2");
final ObjectId branch2 = w.head();
assertTrue("file2 does not exist", f.exists());
assertFalse("file1 exists before merge", w.exists("file1"));
assertEquals("Wrong merge-base branch1 branch2", base, w.igit().mergeBase(branch1, branch2));
String badSHA1 = "15c80fb1567f0e88ca855c69e3f17425d515a188";
ObjectId badBase = ObjectId.fromString(badSHA1);
try {
assertNull("Base unexpected for bad SHA1", w.igit().mergeBase(branch1, badBase));
assertTrue("Exception not thrown by CliGit", w.git instanceof CliGitAPIImpl);
} catch (GitException moa) {
assertFalse("Exception thrown by CliGit", w.git instanceof CliGitAPIImpl);
assertTrue("Exception message didn't mention " + badBase.toString(), moa.getMessage().contains(badSHA1));
}
try {
assertNull("Base unexpected for bad SHA1", w.igit().mergeBase(badBase, branch1));
assertTrue("Exception not thrown by CliGit", w.git instanceof CliGitAPIImpl);
} catch (GitException moa) {
assertFalse("Exception thrown by CliGit", w.git instanceof CliGitAPIImpl);
assertTrue("Exception message didn't mention " + badBase.toString(), moa.getMessage().contains(badSHA1));
}
w.igit().merge("branch1");
assertTrue("file1 does not exist after merge", w.exists("file1"));
/* Git 1.7.1 does not understand the --orphan argument to checkout.
* Stop the test here on older git versions
*/
if (!w.cgit().isAtLeastVersion(1, 7, 9, 0)) {
return;
}
w.cmd("git checkout --orphan newroot"); // Create an independent root
w.commitEmpty("init-on-newroot");
final ObjectId newRootCommit = w.head();
assertNull("Common root not expected", w.igit().mergeBase(newRootCommit, branch1));
final String remoteUrl = "ssh://mwaite.example.com//var/lib/git/mwaite/jenkins/git-client-plugin.git";
w.git.setRemoteUrl("origin", remoteUrl);
assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin"));
assertEquals("Wrong invalid default remote", "origin", w.igit().getDefaultRemote("invalid"));
}
public void test_rebase_passes_without_conflict() throws Exception {
w.init();
w.commitEmpty("init");
// First commit to master
w.touch("master_file", "master1");
w.git.add("master_file");
w.git.commit("commit-master1");
// Create a feature branch and make a commit
w.git.branch("feature1");
w.git.checkout("feature1");
w.touch("feature_file", "feature1");
w.git.add("feature_file");
w.git.commit("commit-feature1");
// Second commit to master
w.git.checkout("master");
w.touch("master_file", "master2");
w.git.add("master_file");
w.git.commit("commit-master2");
// Rebase feature commit onto master
w.git.checkout("feature1");
w.git.rebase().setUpstream("master").execute();
assertThat("Should've rebased feature1 onto master", w.git.revList("feature1").contains(w.git.revParse("master")));
assertEquals("HEAD should be on the rebased branch", w.git.revParse("HEAD").name(), w.git.revParse("feature1").name());
assertThat("Rebased file should be present in the worktree",w.git.getWorkTree().child("feature_file").exists());
}
public void test_rebase_fails_with_conflict() throws Exception {
w.init();
w.commitEmpty("init");
// First commit to master
w.touch("file", "master1");
w.git.add("file");
w.git.commit("commit-master1");
// Create a feature branch and make a commit
w.git.branch("feature1");
w.git.checkout("feature1");
w.touch("file", "feature1");
w.git.add("file");
w.git.commit("commit-feature1");
// Second commit to master
w.git.checkout("master");
w.touch("file", "master2");
w.git.add("file");
w.git.commit("commit-master2");
// Rebase feature commit onto master
w.git.checkout("feature1");
try {
w.git.rebase().setUpstream("master").execute();
fail("Rebase did not throw expected GitException");
} catch (GitException e) {
assertEquals("HEAD not reset to the feature branch.", w.git.revParse("HEAD").name(), w.git.revParse("feature1").name());
Status status = new org.eclipse.jgit.api.Git(w.repo()).status().call();
assertTrue("Workspace is not clean", status.isClean());
assertFalse("Workspace has uncommitted changes", status.hasUncommittedChanges());
assertTrue("Workspace has conflicting changes", status.getConflicting().isEmpty());
assertTrue("Workspace has missing changes", status.getMissing().isEmpty());
assertTrue("Workspace has modified files", status.getModified().isEmpty());
assertTrue("Workspace has removed files", status.getRemoved().isEmpty());
assertTrue("Workspace has untracked files", status.getUntracked().isEmpty());
}
}
/**
* Checks that the ChangelogCommand abort() API does not write
* output to the destination. Does not check that the abort() API
* releases resources.
*/
public void test_changelog_abort() throws InterruptedException, IOException
{
final String logMessage = "changelog-abort-test-commit";
w.init();
w.touch("file-changelog-abort", "changelog abort file contents " + java.util.UUID.randomUUID().toString());
w.git.add("file-changelog-abort");
w.git.commit(logMessage);
String sha1 = w.git.revParse("HEAD").name();
ChangelogCommand changelogCommand = w.git.changelog();
StringWriter writer = new StringWriter();
changelogCommand.to(writer);
/* Abort the changelog, confirm no content was written */
changelogCommand.abort();
assertEquals("aborted changelog wrote data", "", writer.toString());
/* Execute the changelog, confirm expected content was written */
changelogCommand = w.git.changelog();
changelogCommand.to(writer);
changelogCommand.execute();
assertTrue("No log message in " + writer.toString(), writer.toString().contains(logMessage));
assertTrue("No SHA1 in " + writer.toString(), writer.toString().contains(sha1));
}
@Bug(23299)
public void test_getHeadRev() throws Exception {
Map<String, ObjectId> heads = w.git.getHeadRev(remoteMirrorURL);
ObjectId master = w.git.getHeadRev(remoteMirrorURL, "refs/heads/master");
assertEquals("URL is " + remoteMirrorURL + ", heads is " + heads, master, heads.get("refs/heads/master"));
/* Test with a specific tag reference - JENKINS-23299 */
ObjectId knownTag = w.git.getHeadRev(remoteMirrorURL, "refs/tags/git-client-1.10.0");
ObjectId expectedTag = ObjectId.fromString("1fb23708d6b639c22383c8073d6e75051b2a63aa"); // commit SHA1
assertEquals("Wrong SHA1 for git-client-1.10.0 tag", expectedTag, knownTag);
}
@Bug(25444)
public void test_fetch_delete_cleans() throws Exception {
w.init();
w.touch("file1", "old");
w.git.add("file1");
w.git.commit("commit1");
w.touch("file1", "new");
w.git.checkout().branch("other").ref(Constants.HEAD).deleteBranchIfExist(true).execute();
Status status = new org.eclipse.jgit.api.Git(w.repo()).status().call();
assertTrue("Workspace must be clean", status.isClean());
}
/**
* Test getHeadRev with wildcard matching in the branch name.
* Relies on the branches in the git-client-plugin repository
* include at least branches named:
* master
* tests/getSubmodules
*
* Also relies on a specific return ordering of the values in the
* pattern matching performed by getHeadRev, and relies on not
* having new branches created which match the patterns and will
* occur earlier than the expected value.
*/
public void test_getHeadRev_wildcards() throws Exception {
Map<String, ObjectId> heads = w.git.getHeadRev(localMirror());
ObjectId master = w.git.getHeadRev(localMirror(), "refs/heads/master");
assertEquals("heads is " + heads, heads.get("refs/heads/master"), master);
ObjectId wildOrigin = w.git.getHeadRev(localMirror(), "*/master");
assertEquals("heads is " + heads, heads.get("refs/heads/master"), wildOrigin);
/**
* Test getHeadRev with namespaces in the branch name
* and branch specs containing only the simple branch name.
*
* TODO: This does not work yet! Fix behaviour and enable test!
*/
public void test_getHeadRev_namespaces_withSimpleBranchNames() throws Exception {
setTimeoutVisibleInCurrentTest(false);
File tempRemoteDir = temporaryDirectoryAllocator.allocate();
extract(new ZipFile("src/test/resources/namespaceBranchRepo.zip"), tempRemoteDir);
Properties commits = parseLsRemote(new File("src/test/resources/namespaceBranchRepo.ls-remote"));
w = clone(tempRemoteDir.getAbsolutePath());
final String remote = tempRemoteDir.getAbsolutePath();
final String[][] checkBranchSpecs = {};
//TODO: Fix and enable test
// {"master", commits.getProperty("refs/heads/master")},
// {"a_tests/b_namespace1/master", commits.getProperty("refs/heads/a_tests/b_namespace1/master")},
// {"a_tests/b_namespace2/master", commits.getProperty("refs/heads/a_tests/b_namespace2/master")},
// {"a_tests/b_namespace3/master", commits.getProperty("refs/heads/a_tests/b_namespace3/master")},
// {"b_namespace3/master", commits.getProperty("refs/heads/b_namespace3/master")}
for(String[] branch : checkBranchSpecs) {
final ObjectId objectId = ObjectId.fromString(branch[1]);
final String branchName = branch[0];
check_getHeadRev(remote, branchName, objectId);
check_getHeadRev(remote, "remotes/origin/" + branchName, objectId);
check_getHeadRev(remote, "refs/heads/" + branchName, objectId);
}
}
/**
* Test getHeadRev with namespaces in the branch name
* and branch specs starting with "refs/heads/".
*/
public void test_getHeadRev_namespaces_withRefsHeads() throws Exception {
File tempRemoteDir = temporaryDirectoryAllocator.allocate();
extract(new ZipFile("src/test/resources/namespaceBranchRepo.zip"), tempRemoteDir);
Properties commits = parseLsRemote(new File("src/test/resources/namespaceBranchRepo.ls-remote"));
w = clone(tempRemoteDir.getAbsolutePath());
final String remote = tempRemoteDir.getAbsolutePath();
final String[][] checkBranchSpecs = {
{"refs/heads/master", commits.getProperty("refs/heads/master")},
{"refs/heads/a_tests/b_namespace1/master", commits.getProperty("refs/heads/a_tests/b_namespace1/master")},
{"refs/heads/a_tests/b_namespace2/master", commits.getProperty("refs/heads/a_tests/b_namespace2/master")},
{"refs/heads/a_tests/b_namespace3/master", commits.getProperty("refs/heads/a_tests/b_namespace3/master")},
{"refs/heads/b_namespace3/master", commits.getProperty("refs/heads/b_namespace3/master")}
};
for(String[] branch : checkBranchSpecs) {
final ObjectId objectId = ObjectId.fromString(branch[1]);
final String branchName = branch[0];
check_getHeadRev(remote, branchName, objectId);
}
}
/**
* Test getHeadRev with branch names which SHOULD BE reserved by Git, but ARE NOT.<br/>
* E.g. it is possible to create the following LOCAL (!) branches:<br/>
* <ul>
* <li> origin/master
* <li> remotes/origin/master
* <li> refs/heads/master
* <li> refs/remotes/origin/master
* </ul>
*
* TODO: This does not work yet! Fix behaviour and enable test!
*/
public void test_getHeadRev_reservedBranchNames() throws Exception {
/* REMARK: Local branch names in this test are called exactly like follows!
* e.g. origin/master means the branch is called "origin/master", it does NOT mean master branch in remote "origin".
* or refs/heads/master means branch called "refs/heads/master" ("refs/heads/refs/heads/master" in the end).
*/
setTimeoutVisibleInCurrentTest(false);
File tempRemoteDir = temporaryDirectoryAllocator.allocate();
extract(new ZipFile("src/test/resources/specialBranchRepo.zip"), tempRemoteDir);
Properties commits = parseLsRemote(new File("src/test/resources/specialBranchRepo.ls-remote"));
w = clone(tempRemoteDir.getAbsolutePath());
/*
* The first entry in the String[2] is the branch name (as specified in the job config).
* The second entry is the expected commit.
*/
final String[][] checkBranchSpecs = {};
//TODO: Fix and enable test
// {"master", commits.getProperty("refs/heads/master")},
// {"origin/master", commits.getProperty("refs/heads/master")},
// {"remotes/origin/master", commits.getProperty("refs/heads/master")},
// {"refs/remotes/origin/master", commits.getProperty("refs/heads/refs/remotes/origin/master")},
// {"refs/heads/origin/master", commits.getProperty("refs/heads/origin/master")},
// {"refs/heads/master", commits.getProperty("refs/heads/master")},
// {"refs/heads/refs/heads/master", commits.getProperty("refs/heads/refs/heads/master")},
// {"refs/heads/refs/heads/refs/heads/master", commits.getProperty("refs/heads/refs/heads/refs/heads/master")},
// {"refs/tags/master", commits.getProperty("refs/tags/master^{}")}
for(String[] branch : checkBranchSpecs) {
check_getHeadRev(tempRemoteDir.getAbsolutePath(), branch[0], ObjectId.fromString(branch[1]));
}
}
/**
* Test getRemoteReferences with listing all references
*/
public void test_getRemoteReferences() throws Exception {
Map<String, ObjectId> references = w.git.getRemoteReferences(remoteMirrorURL, null, false, false);
assertTrue(references.containsKey("refs/heads/master"));
assertTrue(references.containsKey("refs/tags/git-client-1.0.0"));
}
/**
* Test getRemoteReferences with listing references limit to refs/heads or refs/tags
*/
public void test_getRemoteReferences_withLimitReferences() throws Exception {
Map<String, ObjectId> references = w.git.getRemoteReferences(remoteMirrorURL, null, true, false);
assertTrue(references.containsKey("refs/heads/master"));
assertTrue(!references.containsKey("refs/tags/git-client-1.0.0"));
references = w.git.getRemoteReferences(remoteMirrorURL, null, false, true);
assertTrue(!references.containsKey("refs/heads/master"));
assertTrue(references.containsKey("refs/tags/git-client-1.0.0"));
for (String key : references.keySet()) {
assertTrue(!key.endsWith("^{}"));
}
}
/**
* Test getRemoteReferences with matching pattern
*/
public void test_getRemoteReferences_withMatchingPattern() throws Exception {
Map<String, ObjectId> references = w.git.getRemoteReferences(remoteMirrorURL, "refs/heads/master", true, false);
assertTrue(references.containsKey("refs/heads/master"));
assertTrue(!references.containsKey("refs/tags/git-client-1.0.0"));
references = w.git.getRemoteReferences(remoteMirrorURL, "git-client-*", false, true);
assertTrue(!references.containsKey("refs/heads/master"));
for (String key : references.keySet()) {
assertTrue(key.startsWith("refs/tags/git-client"));
}
references = new HashMap<>();
try {
references = w.git.getRemoteReferences(remoteMirrorURL, "notexists-*", false, false);
} catch (GitException ge) {
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("unexpected ls-remote output"));
}
assertTrue(references.isEmpty());
}
private Properties parseLsRemote(File file) throws IOException
{
Properties properties = new Properties();
Pattern pattern = Pattern.compile("([a-f0-9]{40})\\s*(.*)");
for(Object lineO : FileUtils.readLines(file)) {
String line = ((String)lineO).trim();
Matcher matcher = pattern.matcher(line);
if(matcher.matches()) {
properties.setProperty(matcher.group(2), matcher.group(1));
} else {
System.err.println("ls-remote pattern does not match '" + line + "'");
}
}
return properties;
}
private void extract(ZipFile zipFile, File outputDir) throws IOException
{
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
File entryDestination = new File(outputDir, entry.getName());
entryDestination.getParentFile().mkdirs();
if (entry.isDirectory())
entryDestination.mkdirs();
else {
InputStream in = zipFile.getInputStream(entry);
OutputStream out = new FileOutputStream(entryDestination);
IOUtils.copy(in, out);
IOUtils.closeQuietly(in);
IOUtils.closeQuietly(out);
}
}
}
private void check_getHeadRev(String remote, String branchSpec, ObjectId expectedObjectId) throws Exception
{
ObjectId actualObjectId = w.git.getHeadRev(remote, branchSpec);
assertNotNull(String.format("Expected ObjectId is null expectedObjectId '%s', remote '%s', branchSpec '%s'.",
expectedObjectId, remote, branchSpec), expectedObjectId);
assertNotNull(String.format("Actual ObjectId is null. expectedObjectId '%s', remote '%s', branchSpec '%s'.",
expectedObjectId, remote, branchSpec), actualObjectId);
assertEquals(String.format("Actual ObjectId differs from expected one for branchSpec '%s', remote '%s':\n" +
"Actual %s,\nExpected %s\n", branchSpec, remote,
StringUtils.join(getBranches(actualObjectId), ", "),
StringUtils.join(getBranches(expectedObjectId), ", ")),
expectedObjectId, actualObjectId);
}
private List<Branch> getBranches(ObjectId objectId) throws GitException, InterruptedException
{
List<Branch> matches = new ArrayList<>();
Set<Branch> branches = w.git.getBranches();
for(Branch branch : branches) {
if(branch.getSHA1().equals(objectId)) matches.add(branch);
}
return unmodifiableList(matches);
}
private void check_headRev(String repoURL, ObjectId expectedId) throws InterruptedException, IOException {
final ObjectId originMaster = w.git.getHeadRev(repoURL, "origin/master");
assertEquals("origin/master mismatch", expectedId, originMaster);
final ObjectId simpleMaster = w.git.getHeadRev(repoURL, "master");
assertEquals("simple master mismatch", expectedId, simpleMaster);
final ObjectId wildcardSCMMaster = w.git.getHeadRev(repoURL, "*/master");
assertEquals("wildcard SCM master mismatch", expectedId, wildcardSCMMaster);
/* This assertion may fail if the localMirror has more than
* one branch matching the wildcard expression in the call to
* getHeadRev. The expression is chosen to be unlikely to
* match with typical branch names, while still matching a
* known branch name. Should be fine so long as no one creates
* branches named like master-master or new-master on the
* remote repo */
final ObjectId wildcardEndMaster = w.git.getHeadRev(repoURL, "origin/m*aste?");
assertEquals("wildcard end master mismatch", expectedId, wildcardEndMaster);
}
public void test_getHeadRev_localMirror() throws Exception {
check_headRev(localMirror(), getMirrorHead());
}
public void test_getHeadRev_remote() throws Exception {
String lsRemote = w.cmd("git ls-remote -h " + remoteMirrorURL + " refs/heads/master");
ObjectId lsRemoteId = ObjectId.fromString(lsRemote.substring(0, 40));
check_headRev(remoteMirrorURL, lsRemoteId);
}
public void test_getHeadRev_current_directory() throws Exception {
w = clone(localMirror());
w.git.checkout("master");
final ObjectId master = w.head();
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "branch1 contents " + java.util.UUID.randomUUID().toString());
w.git.add("file1");
w.git.commit("commit1-branch1");
final ObjectId branch1 = w.head();
Map<String, ObjectId> heads = w.git.getHeadRev(w.repoPath());
assertEquals(master, heads.get("refs/heads/master"));
assertEquals(branch1, heads.get("refs/heads/branch1"));
check_headRev(w.repoPath(), getMirrorHead());
}
public void test_getHeadRev_returns_accurate_SHA1_values() throws Exception {
/* CliGitAPIImpl had a longstanding bug that it inserted the
* same SHA1 in all the values, rather than inserting the SHA1
* which matched the key.
*/
w = clone(localMirror());
w.git.checkout("master");
final ObjectId master = w.head();
w.git.branch("branch1");
w.git.checkout("branch1");
w.touch("file1", "content1");
w.git.add("file1");
w.git.commit("commit1-branch1");
final ObjectId branch1 = w.head();
w.cmd("git branch branch.2 master");
w.git.checkout("branch.2");
File f = w.touch("file.2", "content2");
w.git.add("file.2");
w.git.commit("commit2-branch.2");
final ObjectId branchDot2 = w.head();
assertTrue("file.2 does not exist", f.exists());
Map<String,ObjectId> heads = w.git.getHeadRev(w.repoPath());
assertEquals("Wrong master in " + heads, master, heads.get("refs/heads/master"));
assertEquals("Wrong branch1 in " + heads, branch1, heads.get("refs/heads/branch1"));
assertEquals("Wrong branch.2 in " + heads, branchDot2, heads.get("refs/heads/branch.2"));
assertEquals("wildcard branch.2 mismatch", branchDot2, w.git.getHeadRev(w.repoPath(), "br*.2"));
check_headRev(w.repoPath(), getMirrorHead());
}
private void check_changelog_sha1(final String sha1, final String branchName) throws InterruptedException
{
ChangelogCommand changelogCommand = w.git.changelog();
changelogCommand.max(1);
StringWriter writer = new StringWriter();
changelogCommand.to(writer);
changelogCommand.execute();
String splitLog[] = writer.toString().split("[\\n\\r]", 3); // Extract first line of changelog
assertEquals("Wrong changelog line 1 on branch " + branchName, "commit " + sha1, splitLog[0]);
}
public void test_changelog() throws Exception {
w = clone(localMirror());
String sha1Prev = w.git.revParse("HEAD").name();
w.touch("changelog-file", "changelog-file-content-" + sha1Prev);
w.git.add("changelog-file");
w.git.commit("changelog-commit-message");
String sha1 = w.git.revParse("HEAD").name();
check_changelog_sha1(sha1, "master");
}
public void test_show_revision_for_merge() throws Exception {
w = clone(localMirror());
ObjectId from = ObjectId.fromString("45e76942914664ee19f31d90e6f2edbfe0d13a46");
ObjectId to = ObjectId.fromString("b53374617e85537ec46f86911b5efe3e4e2fa54b");
List<String> revisionDetails = w.git.showRevision(from, to);
Collection<String> commits = Collections2.filter(revisionDetails, new Predicate<String>() {
public boolean apply(String detail) {
return detail.startsWith("commit ");
}
});
assertEquals(3, commits.size());
assertTrue(commits.contains("commit 4f2964e476776cf59be3e033310f9177bedbf6a8"));
// Merge commit is duplicated as have to capture changes that may have been made as part of merge
assertTrue(commits.contains("commit b53374617e85537ec46f86911b5efe3e4e2fa54b (from 4f2964e476776cf59be3e033310f9177bedbf6a8)"));
assertTrue(commits.contains("commit b53374617e85537ec46f86911b5efe3e4e2fa54b (from 45e76942914664ee19f31d90e6f2edbfe0d13a46)"));
Collection<String> diffs = Collections2.filter(revisionDetails, new Predicate<String>() {
public boolean apply(String detail) {
return detail.startsWith(":");
}
});
Collection<String> paths = Collections2.transform(diffs, new Function<String, String>() {
public String apply(String diff) {
return diff.substring(diff.indexOf('\t')+1).trim(); // Windows diff output ^M removed by trim()
}
});
assertTrue(paths.contains(".gitignore"));
// Some irrelevant changes will be listed due to merge commit
assertTrue(paths.contains("pom.xml"));
assertTrue(paths.contains("src/main/java/hudson/plugins/git/GitAPI.java"));
assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/CliGitAPIImpl.java"));
assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/Git.java"));
assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/GitClient.java"));
assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/JGitAPIImpl.java"));
assertTrue(paths.contains("src/test/java/org/jenkinsci/plugins/gitclient/GitAPITestCase.java"));
assertTrue(paths.contains("src/test/java/org/jenkinsci/plugins/gitclient/JGitAPIImplTest.java"));
// Previous implementation included other commits, and listed irrelevant changes
assertFalse(paths.contains("README.md"));
}
public void test_show_revision_for_merge_exclude_files() throws Exception {
w = clone(localMirror());
ObjectId from = ObjectId.fromString("45e76942914664ee19f31d90e6f2edbfe0d13a46");
ObjectId to = ObjectId.fromString("b53374617e85537ec46f86911b5efe3e4e2fa54b");
Boolean useRawOutput = false;
List<String> revisionDetails = w.git.showRevision(from, to, useRawOutput);
Collection<String> commits = Collections2.filter(revisionDetails, new Predicate<String>() {
public boolean apply(String detail) {
return detail.startsWith("commit ");
}
});
assertEquals(2, commits.size());
assertTrue(commits.contains("commit 4f2964e476776cf59be3e033310f9177bedbf6a8"));
assertTrue(commits.contains("commit b53374617e85537ec46f86911b5efe3e4e2fa54b"));
Collection<String> diffs = Collections2.filter(revisionDetails, new Predicate<String>() {
public boolean apply(String detail) {
return detail.startsWith(":");
}
});
assertTrue(diffs.isEmpty());
}
private void check_bounded_changelog_sha1(final String sha1Begin, final String sha1End, final String branchName) throws InterruptedException
{
StringWriter writer = new StringWriter();
w.git.changelog(sha1Begin, sha1End, writer);
String splitLog[] = writer.toString().split("[\\n\\r]", 3); // Extract first line of changelog
assertEquals("Wrong bounded changelog line 1 on branch " + branchName, "commit " + sha1End, splitLog[0]);
assertTrue("Begin sha1 " + sha1Begin + " not in changelog: " + writer.toString(), writer.toString().contains(sha1Begin));
}
public void test_changelog_bounded() throws Exception {
w = clone(localMirror());
String sha1Prev = w.git.revParse("HEAD").name();
w.touch("changelog-file", "changelog-file-content-" + sha1Prev);
w.git.add("changelog-file");
w.git.commit("changelog-commit-message");
String sha1 = w.git.revParse("HEAD").name();
check_bounded_changelog_sha1(sha1Prev, sha1, "master");
}
public void test_show_revision_for_single_commit() throws Exception {
w = clone(localMirror());
ObjectId to = ObjectId.fromString("51de9eda47ca8dcf03b2af58dfff7355585f0d0c");
List<String> revisionDetails = w.git.showRevision(null, to);
Collection<String> commits = Collections2.filter(revisionDetails, new Predicate<String>() {
public boolean apply(String detail) {
return detail.startsWith("commit ");
}
});
assertEquals(1, commits.size());
assertTrue(commits.contains("commit 51de9eda47ca8dcf03b2af58dfff7355585f0d0c"));
}
@Bug(22343)
public void test_show_revision_for_first_commit() throws Exception {
w.init();
w.touch("a");
w.git.add("a");
w.git.commit("first");
ObjectId first = w.head();
List<String> revisionDetails = w.git.showRevision(first);
Collection<String> commits = Collections2.filter(revisionDetails, new Predicate<String>() {
public boolean apply(String detail) {
return detail.startsWith("commit ");
}
});
assertTrue("Commits '" + commits + "' missing " + first.getName(), commits.contains("commit " + first.getName()));
assertEquals("Commits '" + commits + "' wrong size", 1, commits.size());
}
public void test_describe() throws Exception {
w.init();
w.commitEmpty("first");
w.tag("-m test t1");
w.touch("a");
w.git.add("a");
w.git.commit("second");
assertThat(w.cmd("git describe").trim(), sharesPrefix(w.git.describe("HEAD")));
w.tag("-m test2 t2");
assertThat(w.cmd("git describe").trim(), sharesPrefix(w.git.describe("HEAD")));
}
public void test_getAllLogEntries() throws Exception {
/* Use original clone source instead of localMirror. The
* namespace test modifies the localMirror content by creating
* three independent branches very rapidly. Those three
* branches may be created within the same second, making it
* more difficult for git to provide a time ordered log. The
* reference to localMirror will help performance of the C git
* implementation, since that will avoid copying content which
* is already local. */
String gitUrl = "https://github.com/jenkinsci/git-client-plugin.git";
if (SystemUtils.IS_OS_WINDOWS) {
// Does not leak an open file
w = clone(gitUrl);
} else {
// Leaks an open file - unclear why
w.git.clone_().url(gitUrl).repositoryName("origin").reference(localMirror()).execute();
}
assertEquals(
w.cgit().getAllLogEntries("origin/master"),
w.igit().getAllLogEntries("origin/master"));
}
public void test_branchContaining() throws Exception {
w.init();
w.commitEmpty("c1");
ObjectId c1 = w.head();
w.cmd("git branch Z "+c1.name());
w.git.checkout("Z");
w.commitEmpty("T");
ObjectId t = w.head();
w.commitEmpty("c2");
ObjectId c2 = w.head();
w.commitEmpty("Z");
w.cmd("git branch X "+c1.name());
w.git.checkout("X");
w.commitEmpty("X");
w.cmd("git branch Y "+c1.name());
w.git.checkout("Y");
w.commitEmpty("c3");
ObjectId c3 = w.head();
w.cmd("git merge --no-ff -m Y "+c2.name());
w.git.deleteBranch("master");
assertEquals(3,w.git.getBranches().size()); // X, Y, and Z
assertEquals("X,Y,Z",formatBranches(w.igit().getBranchesContaining(c1.name())));
assertEquals("Y,Z",formatBranches(w.igit().getBranchesContaining(t.name())));
assertEquals("Y",formatBranches(w.igit().getBranchesContaining(c3.name())));
assertEquals("X",formatBranches(w.igit().getBranchesContaining("X")));
}
/**
* UT for {@link GitClient#getBranchesContaining(String, boolean)}. The main
* testing case is retrieving remote branches.
* @throws Exception on exceptions occur
*/
public void test_branchContainingRemote() throws Exception {
final WorkingArea r = new WorkingArea();
r.init();
r.commitEmpty("c1");
ObjectId c1 = r.head();
w.git.clone_().url("file://" + r.repoPath()).execute();
final URIish remote = new URIish(Constants.DEFAULT_REMOTE_NAME);
final List<RefSpec> refspecs = Collections.singletonList(new RefSpec(
@NotImplementedInJGit /* JGit lock file management ignored for now */
@NotImplementedInJGit /* JGit lock file management ignored for now */
/**
* Test case for auto local branch creation behviour.
* This is essentially a stripped down version of {@link #test_branchContainingRemote()}
* @throws Exception on exceptions occur
*/
public void test_checkout_remote_autocreates_local() throws Exception {
final WorkingArea r = new WorkingArea();
r.init();
r.commitEmpty("c1");
w.git.clone_().url("file://" + r.repoPath()).execute();
final URIish remote = new URIish(Constants.DEFAULT_REMOTE_NAME);
final List<RefSpec> refspecs = Collections.singletonList(new RefSpec(
/* The most critical use cases of isBareRepository respond the
* same for both the JGit implementation and the CliGit
* implementation. Those are asserted first in this section of
* assertions.
*/
@Deprecated
public void test_isBareRepository_working_repoPath_dot_git() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-false-repoPath-dot-git");
assertFalse("repoPath/.git is a bare repository", w.igit().isBareRepository(w.repoPath() + File.separator + ".git"));
}
@Deprecated
public void test_isBareRepository_working_null() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-working-null");
try {
assertFalse("null is a bare repository", w.igit().isBareRepository(null));
fail("Did not throw expected exception");
} catch (GitException ge) {
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("Not a git repository"));
}
}
@Deprecated
public void test_isBareRepository_bare_null() throws IOException, InterruptedException {
w.init(true);
try {
assertTrue("null is not a bare repository", w.igit().isBareRepository(null));
fail("Did not throw expected exception");
} catch (GitException ge) {
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("Not a git repository"));
}
}
@Deprecated
public void test_isBareRepository_bare_repoPath() throws IOException, InterruptedException {
w.init(true);
assertTrue("repoPath is not a bare repository", w.igit().isBareRepository(w.repoPath()));
assertTrue("abs(.) is not a bare repository", w.igit().isBareRepository(w.file(".").getAbsolutePath()));
}
@Deprecated
public void test_isBareRepository_working_no_arg() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-no-arg");
assertFalse("no arg is a bare repository", w.igit().isBareRepository());
}
@Deprecated
public void test_isBareRepository_bare_no_arg() throws IOException, InterruptedException {
w.init(true);
assertTrue("no arg is not a bare repository", w.igit().isBareRepository());
}
@Deprecated
public void test_isBareRepository_working_empty_string() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-empty-string");
assertFalse("empty string is a bare repository", w.igit().isBareRepository(""));
}
@Deprecated
public void test_isBareRepository_bare_empty_string() throws IOException, InterruptedException {
w.init(true);
assertTrue("empty string is not a bare repository", w.igit().isBareRepository(""));
}
/* The less critical assertions do not respond the same for the
* JGit and the CliGit implementation. They are implemented here
* so that the current behavior is described in tests and can be
* used to assure that changes to current behavior are
* detected.
*/
// Fails on both JGit and CliGit, though with different failure modes
// @Deprecated
// public void test_isBareRepository_working_repoPath() throws IOException, InterruptedException {
// w.init();
// w.commitEmpty("Not-a-bare-repository-working-repoPath-dot-git");
// assertFalse("repoPath is a bare repository", w.igit().isBareRepository(w.repoPath()));
// assertFalse("abs(.) is a bare repository", w.igit().isBareRepository(w.file(".").getAbsolutePath()));
@Deprecated
public void test_isBareRepository_working_dot() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-working-dot");
try {
assertFalse(". is a bare repository", w.igit().isBareRepository("."));
if (w.git instanceof CliGitAPIImpl) {
/* No exception from JGit */
fail("Did not throw expected exception");
}
} catch (GitException ge) {
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("Not a git repository"));
}
}
@Deprecated
public void test_isBareRepository_bare_dot() throws IOException, InterruptedException {
w.init(true);
assertTrue(". is not a bare repository", w.igit().isBareRepository("."));
}
@Deprecated
public void test_isBareRepository_working_dot_git() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-dot-git");
assertFalse(".git is a bare repository", w.igit().isBareRepository(".git"));
}
@Deprecated
public void test_isBareRepository_bare_dot_git() throws IOException, InterruptedException {
w.init(true);
/* Bare repository does not have a .git directory. This is
* another no-such-location test but is included here for
* consistency.
*/
try {
/* JGit knows that w.igit() has a workspace, and asks the workspace
* if it is bare. That seems more correct than relying on testing
* a specific file that the repository is bare. JGit behaves better
* than CliGit in this case.
*/
assertTrue("non-existent .git is in a bare repository", w.igit().isBareRepository(".git"));
/* JGit will not throw an exception - it knows the repo is bare */
/* CliGit throws an exception so should not reach the next assertion */
assertFalse("CliGitAPIImpl did not throw expected exception", w.igit() instanceof CliGitAPIImpl);
} catch (GitException ge) {
/* Only enters this path for CliGit */
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("Not a git repository"));
}
}
@Deprecated
public void test_isBareRepository_working_no_such_location() throws IOException, InterruptedException {
w.init();
w.commitEmpty("Not-a-bare-repository-working-no-such-location");
try {
assertFalse("non-existent location is in a bare repository", w.igit().isBareRepository("no-such-location"));
/* JGit will not throw an exception - it knows the repo is not bare */
/* CliGit throws an exception so should not reach the next assertion */
assertFalse("CliGitAPIImpl did not throw expected exception", w.igit() instanceof CliGitAPIImpl);
} catch (GitException ge) {
/* Only enters this path for CliGit */
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("Not a git repository"));
}
}
@Deprecated
public void test_isBareRepository_bare_no_such_location() throws IOException, InterruptedException {
w.init(true);
try {
assertTrue("non-existent location is in a bare repository", w.igit().isBareRepository("no-such-location"));
/* JGit will not throw an exception - it knows the repo is not bare */
/* CliGit throws an exception so should not reach the next assertion */
assertFalse("CliGitAPIImpl did not throw expected exception", w.igit() instanceof CliGitAPIImpl);
} catch (GitException ge) {
/* Only enters this path for CliGit */
assertTrue("Wrong exception message: " + ge, ge.getMessage().contains("Not a git repository"));
}
}
public void test_checkoutBranchFailure() throws Exception {
w = clone(localMirror());
File lock = new File(w.repo, ".git/index.lock");
try {
FileUtils.touch(lock);
w.git.checkoutBranch("somebranch", "master");
fail();
} catch (GitLockFailedException e) {
// expected
} finally {
lock.delete();
}
}
@Deprecated
public void test_reset() throws IOException, InterruptedException {
w.init();
/* No valid HEAD yet - nothing to reset, should give no error */
w.igit().reset(false);
w.igit().reset(true);
w.touch("committed-file", "committed-file content " + java.util.UUID.randomUUID().toString());
w.git.add("committed-file");
w.git.commit("commit1");
assertTrue("committed-file missing at commit1", w.file("committed-file").exists());
assertFalse("added-file exists at commit1", w.file("added-file").exists());
assertFalse("touched-file exists at commit1", w.file("added-file").exists());
w.cmd("git rm committed-file");
w.touch("added-file", "File 2 content " + java.util.UUID.randomUUID().toString());
w.git.add("added-file");
w.touch("touched-file", "File 3 content " + java.util.UUID.randomUUID().toString());
assertFalse("committed-file exists", w.file("committed-file").exists());
assertTrue("added-file missing", w.file("added-file").exists());
assertTrue("touched-file missing", w.file("touched-file").exists());
w.igit().reset(false);
assertFalse("committed-file exists", w.file("committed-file").exists());
assertTrue("added-file missing", w.file("added-file").exists());
assertTrue("touched-file missing", w.file("touched-file").exists());
w.git.add("added-file"); /* Add the file which soft reset "unadded" */
w.igit().reset(true);
assertTrue("committed-file missing", w.file("committed-file").exists());
assertFalse("added-file exists at hard reset", w.file("added-file").exists());
assertTrue("touched-file missing", w.file("touched-file").exists());
final String remoteUrl = "git@github.com:MarkEWaite/git-client-plugin.git";
w.git.setRemoteUrl("origin", remoteUrl);
w.git.setRemoteUrl("ndeloof", "git@github.com:ndeloof/git-client-plugin.git");
assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin"));
assertEquals("Wrong ndeloof default remote", "ndeloof", w.igit().getDefaultRemote("ndeloof"));
/* CliGitAPIImpl and JGitAPIImpl return different ordered lists for default remote if invalid */
assertEquals("Wrong invalid default remote", w.git instanceof CliGitAPIImpl ? "ndeloof" : "origin",
w.igit().getDefaultRemote("invalid"));
}
private static final int MAX_PATH = 256;
private void commitFile(String dirName, String fileName, boolean longpathsEnabled) throws Exception {
assertTrue("Didn't mkdir " + dirName, w.file(dirName).mkdir());
String fullName = dirName + File.separator + fileName;
w.touch(fullName, fullName + " content " + UUID.randomUUID().toString());
boolean shouldThrow = !longpathsEnabled &&
SystemUtils.IS_OS_WINDOWS &&
w.git instanceof CliGitAPIImpl &&
w.cgit().isAtLeastVersion(1, 9, 0, 0) &&
(new File(fullName)).getAbsolutePath().length() > MAX_PATH;
try {
w.git.add(fullName);
w.git.commit("commit-" + fileName);
assertFalse("unexpected success " + fullName, shouldThrow);
} catch (GitException ge) {
assertEquals("Wrong message", "Cannot add " + fullName, ge.getMessage());
}
assertTrue("file " + fullName + " missing at commit", w.file(fullName).exists());
}
private void commitFile(String dirName, String fileName) throws Exception {
commitFile(dirName, fileName, false);
}
/**
* msysgit prior to 1.9 forbids file names longer than MAXPATH.
* msysgit 1.9 and later allows longer paths if core.longpaths is
* set to true.
*
* JGit does not have that limitation.
*/
public void check_longpaths(boolean longpathsEnabled) throws Exception {
String shortName = "0123456789abcdef" + "ghijklmnopqrstuv";
String longName = shortName + shortName + shortName + shortName;
String dirName1 = longName;
commitFile(dirName1, "file1a", longpathsEnabled);
String dirName2 = dirName1 + File.separator + longName;
commitFile(dirName2, "file2b", longpathsEnabled);
String dirName3 = dirName2 + File.separator + longName;
commitFile(dirName3, "file3c", longpathsEnabled);
String dirName4 = dirName3 + File.separator + longName;
commitFile(dirName4, "file4d", longpathsEnabled);
String dirName5 = dirName4 + File.separator + longName;
commitFile(dirName5, "file5e", longpathsEnabled);
}
private String getConfigValue(File workingDir, String name) throws IOException, InterruptedException {
String[] args = {"git", "config", "--get", name};
ByteArrayOutputStream out = new ByteArrayOutputStream();
int st = new Launcher.LocalLauncher(listener).launch().pwd(workingDir).cmds(args).stdout(out).join();
String result = out.toString();
if (st != 0 && result != null && !result.isEmpty()) {
fail("git config --get " + name + " failed with result: " + result);
}
return out.toString().trim();
}
private String getHomeConfigValue(String name) throws IOException, InterruptedException {
return getConfigValue(new File(System.getProperty("user.home")), name);
}
private void assert_longpaths(boolean expectedLongPathSetting) throws IOException, InterruptedException {
String value = getHomeConfigValue("core.longpaths");
boolean longPathSetting = Boolean.valueOf(value);
assertEquals("Wrong value: '" + value + "'", expectedLongPathSetting, longPathSetting);
}
private void assert_longpaths(WorkingArea workingArea, boolean expectedLongPathSetting) throws IOException, InterruptedException {
String value = getConfigValue(workingArea.repo, "core.longpaths");
boolean longPathSetting = Boolean.valueOf(value);
assertEquals("Wrong value: '" + value + "'", expectedLongPathSetting, longPathSetting);
}
public void test_longpaths_default() throws Exception {
assert_longpaths(false);
w.init();
assert_longpaths(w, false);
check_longpaths(false);
assert_longpaths(w, false);
}
@NotImplementedInJGit
/* Not implemented in JGit because it is not needed there */
public void test_longpaths_enabled() throws Exception {
assert_longpaths(false);
w.init();
assert_longpaths(w, false);
w.cmd("git config core.longpaths true");
assert_longpaths(w, true);
check_longpaths(true);
assert_longpaths(w, true);
}
@NotImplementedInJGit
/* Not implemented in JGit because it is not needed there */
public void test_longpaths_disabled() throws Exception {
assert_longpaths(false);
w.init();
assert_longpaths(w, false);
w.cmd("git config core.longpaths false");
assert_longpaths(w, false);
check_longpaths(false);
assert_longpaths(w, false);
}
@NotImplementedInJGit
/* Not implemented in JGit because it is not needed there */
public void test_git_ssh_executable_found_on_windows() throws Exception {
setTimeoutVisibleInCurrentTest(false);
if (!SystemUtils.IS_OS_WINDOWS) {
return;
}
assertTrue("ssh.exe not found", w.cgit().getSSHExecutable().exists());
}
/**
* Returns the prefix for the remote branches while querying them.
* @return remote branch prefix, for example, "remotes/"
*/
protected abstract String getRemoteBranchPrefix();
/**
* Test parsing of changelog with unicode characters in commit messages.
*/
public void test_unicodeCharsInChangelog() throws Exception {
// Test for
File tempRemoteDir = temporaryDirectoryAllocator.allocate();
extract(new ZipFile("src/test/resources/unicodeCharsInChangelogRepo.zip"), tempRemoteDir);
File pathToTempRepo = new File(tempRemoteDir, "unicodeCharsInChangelogRepo");
w = clone(pathToTempRepo.getAbsolutePath());
// w.git.changelog gives us strings
// We want to collect all the strings and check that unicode characters are still there.
StringWriter sw = new StringWriter();
w.git.changelog("v0", "vLast", sw);
String content = sw.toString();
assertTrue(content.contains("hello in English: hello"));
assertTrue(content.contains("hello in Russian: \u043F\u0440\u0438\u0432\u0435\u0442 (priv\u00E9t)"));
assertTrue(content.contains("hello in Chinese: \u4F60\u597D (n\u01D0 h\u01CEo)"));
assertTrue(content.contains("hello in French: \u00C7a va ?"));
assertTrue(content.contains("goodbye in German: Tsch\u00FCss"));
}
}
|
package org.synyx.urlaubsverwaltung;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.MariaDBContainer;
import static org.testcontainers.containers.MariaDBContainer.NAME;
@DirtiesContext
public abstract class TestContainersBase {
static MariaDBContainer<?> mariaDB = new MariaDBContainer<>(NAME + ":10.5");
@DynamicPropertySource
static void mariaDBProperties(DynamicPropertyRegistry registry) {
mariaDB.start();
registry.add("spring.datasource.url", mariaDB::getJdbcUrl);
registry.add("spring.datasource.username", mariaDB::getUsername);
registry.add("spring.datasource.password", mariaDB::getPassword);
}
}
|
package com.thebluealliance.androidclient.activities;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ArgbEvaluator;
import android.animation.ValueAnimator;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.Fragment;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewAnimationUtils;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.widget.FrameLayout;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.melnykov.fab.FloatingActionButton;
import com.thebluealliance.androidclient.Constants;
import com.thebluealliance.androidclient.R;
import com.thebluealliance.androidclient.Utilities;
import com.thebluealliance.androidclient.accounts.AccountHelper;
import com.thebluealliance.androidclient.fragments.mytba.NotificationSettingsFragment;
import com.thebluealliance.androidclient.fragments.tasks.UpdateUserModelSettingsTaskFragment;
import com.thebluealliance.androidclient.helpers.ModelHelper;
import com.thebluealliance.androidclient.interfaces.LoadModelSettingsCallback;
import com.thebluealliance.androidclient.interfaces.ModelSettingsCallbacks;
public abstract class FABNotificationSettingsActivity extends LegacyRefreshableHostActivity implements View.OnClickListener, ModelSettingsCallbacks, LoadModelSettingsCallback {
private RelativeLayout mNotificationSettings;
private FloatingActionButton mOpenNotificationSettingsButton;
private View mOpenNotificationSettingsButtonContainer;
private FloatingActionButton mCloseNotificationSettingsButton;
private View mCloseNotificationSettingsButtonContainer;
private View mForegroundDim;
private boolean mIsMyTBAEnabled;
private Toolbar mNotificationSettingsToolbar;
private Handler mFabHandler = new Handler();
private NotificationSettingsFragment mSettingsFragment;
private UpdateUserModelSettingsTaskFragment mSaveSettingsTaskFragment;
private boolean mIsSettingsPanelOpen = false;
private boolean mSaveInProgress = false;
private boolean mFabVisible = true;
private ValueAnimator mRunningFabAnimation;
private static final String SETTINGS_PANEL_OPEN = "settings_panel_open";
private static final String SAVE_SETTINGS_TASK_FRAGMENT_TAG = "task_fragment_tag";
// In milliseconds
private static final int ANIMATION_DURATION = 500;
private static final int FAB_ANIMATE_DURATION = 250;
private static final float UNDIMMED_ALPHA = 0.0f;
private static final float DIMMED_ALPHA = 0.7f;
private Bundle mSavedPreferenceState;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
super.setContentView(R.layout.activity_fab_notification_settings);
mIsMyTBAEnabled = AccountHelper.isMyTBAEnabled(this);
mNotificationSettings = (RelativeLayout) findViewById(R.id.notification_settings);
mOpenNotificationSettingsButton = (FloatingActionButton) findViewById(R.id.open_notification_settings_button);
mOpenNotificationSettingsButton.setOnClickListener(this);
mOpenNotificationSettingsButtonContainer = findViewById(R.id.open_notification_settings_button_container);
mCloseNotificationSettingsButton = (FloatingActionButton) findViewById(R.id.close_notification_settings_button);
mCloseNotificationSettingsButton.setOnClickListener(this);
mCloseNotificationSettingsButtonContainer = findViewById(R.id.close_notification_settings_button_container);
// Hide the notification settings button if myTBA isn't enabled
if (!AccountHelper.isMyTBAEnabled(this)) {
mNotificationSettings.setVisibility(View.INVISIBLE);
}
mNotificationSettingsToolbar = (Toolbar) findViewById(R.id.notification_settings_toolbar);
mNotificationSettingsToolbar.setNavigationIcon(R.drawable.ic_close_black_24dp);
mNotificationSettingsToolbar.setTitle("Team Settings");
mNotificationSettingsToolbar.setNavigationOnClickListener(v -> onNotificationSettingsCloseButtonClick());
mNotificationSettingsToolbar.setNavigationContentDescription(R.string.close);
ViewCompat.setElevation(mNotificationSettingsToolbar, getResources().getDimension(R.dimen.toolbar_elevation));
mForegroundDim = findViewById(R.id.activity_foreground_dim);
// Setup the settings menu
Log.d(Constants.LOG_TAG, "Model: " + modelKey);
if (savedInstanceState != null) {
mIsSettingsPanelOpen = savedInstanceState.getBoolean(SETTINGS_PANEL_OPEN);
if (mIsSettingsPanelOpen) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.INVISIBLE);
mCloseNotificationSettingsButtonContainer.setVisibility(View.VISIBLE);
mNotificationSettings.setVisibility(View.VISIBLE);
if (Utilities.hasLApis()) {
getWindow().setStatusBarColor(getResources().getColor(R.color.accent_dark));
}
} else {
mOpenNotificationSettingsButtonContainer.setVisibility(View.VISIBLE);
mCloseNotificationSettingsButtonContainer.setVisibility(View.INVISIBLE);
mNotificationSettings.setVisibility(View.INVISIBLE);
}
mSavedPreferenceState = savedInstanceState.getBundle(NotificationSettingsFragment.SAVED_STATE_BUNDLE);
}
mSaveSettingsTaskFragment = (UpdateUserModelSettingsTaskFragment) getSupportFragmentManager().findFragmentByTag(SAVE_SETTINGS_TASK_FRAGMENT_TAG);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putBoolean(SETTINGS_PANEL_OPEN, mIsSettingsPanelOpen);
// Only save the preference state if they've already been successfully loaded
// Also, only save them if the settings panel is open. Otherwise, clear them on rotate
if (mSettingsFragment != null && mSettingsFragment.arePreferencesLoaded() && mIsSettingsPanelOpen) {
Bundle b = new Bundle();
mSettingsFragment.writeStateToBundle(b);
outState.putBundle(NotificationSettingsFragment.SAVED_STATE_BUNDLE, b);
}
}
@Override
protected void setModelKey(String key, ModelHelper.MODELS modelType) {
super.setModelKey(key, modelType);
// Now that we have a model key, we can create a settings fragment for the appropriate model type
mSettingsFragment = NotificationSettingsFragment.newInstance(modelKey, modelType, mSavedPreferenceState);
getFragmentManager().beginTransaction().replace(R.id.settings_list, mSettingsFragment).commit();
// Disable the submit settings button so we can't hit it before the content is loaded
// This prevents accidently wiping settings (see #317)
mCloseNotificationSettingsButton.setEnabled(false);
}
@Override
public void setContentView(int layoutResID) {
FrameLayout root = (FrameLayout) findViewById(R.id.activity_content);
LayoutInflater inflater = getLayoutInflater();
inflater.inflate(layoutResID, root);
}
@Override
public void onClick(View v) {
if (v.getId() == R.id.open_notification_settings_button) {
if (!mSaveInProgress) {
openNotificationSettingsView();
}
} else if (v.getId() == R.id.close_notification_settings_button) {
// The user wants to save the preferences
if (mSaveSettingsTaskFragment == null) {
mSaveSettingsTaskFragment = new UpdateUserModelSettingsTaskFragment(mSettingsFragment.getSettings());
getSupportFragmentManager().beginTransaction().add(mSaveSettingsTaskFragment, SAVE_SETTINGS_TASK_FRAGMENT_TAG).commit();
mSaveInProgress = true;
final android.support.v4.app.FragmentManager fm = getSupportFragmentManager();
final Fragment settingsFragment = fm.findFragmentByTag(SAVE_SETTINGS_TASK_FRAGMENT_TAG);
mFabHandler.postDelayed(new Runnable() {
@Override
public void run() {
closeNotificationSettingsWindow();
if (settingsFragment != null) {
fm.beginTransaction().remove(settingsFragment).commitAllowingStateLoss();
}
mSaveSettingsTaskFragment = null;
}
}, 100);
}
} else {
Log.d(Constants.LOG_TAG, "Clicked id: " + v.getId() + " tag: " + v.getTag() + " view: " + v.toString());
}
}
private void onNotificationSettingsCloseButtonClick() {
closeNotificationSettingsWindow();
// Cancel any changes made by the user
mSettingsFragment.restoreInitialState();
}
private void openNotificationSettingsView() {
mSettingsFragment.restoreInitialState();
mCloseNotificationSettingsButton.setColorNormal(getResources().getColor(R.color.accent));
// this is the center of the button in relation to the main view. This provides the center of the clipping circle for the notification settings view.
int centerOfButtonOutsideX = (mOpenNotificationSettingsButtonContainer.getLeft() + mOpenNotificationSettingsButtonContainer.getRight()) / 2;
int centerOfButtonOutsideY = (mOpenNotificationSettingsButtonContainer.getTop() + mOpenNotificationSettingsButtonContainer.getBottom()) / 2;
float finalRadius = (float) Math.sqrt(Math.pow(centerOfButtonOutsideX - mNotificationSettings.getLeft(), 2) + Math.pow(centerOfButtonOutsideY - mNotificationSettings.getTop(), 2));
Animator settingsPanelAnimator;
// Only show the circular reveal on API >= 5.0
mNotificationSettings.setVisibility(View.VISIBLE);
if (Utilities.hasLApis()) {
settingsPanelAnimator = ViewAnimationUtils.createCircularReveal(mNotificationSettings, centerOfButtonOutsideX, centerOfButtonOutsideY, 0, finalRadius);
settingsPanelAnimator.setDuration(ANIMATION_DURATION);
settingsPanelAnimator.setInterpolator(new DecelerateInterpolator());
} else {
settingsPanelAnimator = ValueAnimator.ofFloat(1, 0);
final int notificationSettingsHeight = mNotificationSettings.getHeight();
((ValueAnimator) settingsPanelAnimator).addUpdateListener(animation -> mNotificationSettings.setTranslationY((float) notificationSettingsHeight * (float) animation.getAnimatedValue()));
settingsPanelAnimator.setInterpolator(new AccelerateDecelerateInterpolator());
settingsPanelAnimator.setDuration(ANIMATION_DURATION);
}
mOpenNotificationSettingsButtonContainer.setVisibility(View.INVISIBLE);
ValueAnimator closeButtonScaleUp = ValueAnimator.ofFloat(0, 1).setDuration(ANIMATION_DURATION);
closeButtonScaleUp.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
mCloseNotificationSettingsButtonContainer.setVisibility(View.VISIBLE);
}
});
closeButtonScaleUp.addUpdateListener(animation -> {
ViewCompat.setScaleX(mCloseNotificationSettingsButton, (float) animation.getAnimatedValue());
ViewCompat.setScaleY(mCloseNotificationSettingsButton, (float) animation.getAnimatedValue());
});
closeButtonScaleUp.setDuration(ANIMATION_DURATION / 2);
// Animate the status bar color change
Integer colorFrom = getResources().getColor(R.color.primary_dark);
Integer colorTo = getResources().getColor(R.color.accent_dark);
ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo);
colorAnimation.addUpdateListener(animator -> {
getDrawerLayout().setStatusBarBackgroundColor((Integer) animator.getAnimatedValue());
// We have to invalidate so that the view redraws the background
getDrawerLayout().invalidate();
});
colorAnimation.setDuration(ANIMATION_DURATION);
ValueAnimator dimAnimation = ValueAnimator.ofFloat(UNDIMMED_ALPHA, DIMMED_ALPHA);
dimAnimation.addUpdateListener(animation -> mForegroundDim.setAlpha((float) animation.getAnimatedValue()));
dimAnimation.setDuration(ANIMATION_DURATION);
AnimatorSet animationSet = new AnimatorSet();
animationSet.play(settingsPanelAnimator);
animationSet.play(closeButtonScaleUp).after(ANIMATION_DURATION / 2);
animationSet.play(colorAnimation).with(settingsPanelAnimator);
animationSet.play(dimAnimation).with(settingsPanelAnimator);
animationSet.start();
mIsSettingsPanelOpen = true;
}
private void closeNotificationSettingsWindow() {
int centerOfButtonOutsideX = (mOpenNotificationSettingsButtonContainer.getLeft() + mOpenNotificationSettingsButtonContainer.getRight()) / 2;
int centerOfButtonOutsideY = (mOpenNotificationSettingsButtonContainer.getTop() + mOpenNotificationSettingsButtonContainer.getBottom()) / 2;
float finalRadius = (float) Math.sqrt(Math.pow(centerOfButtonOutsideX - mNotificationSettings.getLeft(), 2) + Math.pow(centerOfButtonOutsideY - mNotificationSettings.getTop(), 2));
Animator settingsPanelAnimator;
if (Utilities.hasLApis()) {
settingsPanelAnimator = ViewAnimationUtils.createCircularReveal(mNotificationSettings, centerOfButtonOutsideX, centerOfButtonOutsideY, finalRadius, 0);
settingsPanelAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mNotificationSettings.setVisibility(View.INVISIBLE);
}
});
settingsPanelAnimator.setDuration(ANIMATION_DURATION);
settingsPanelAnimator.setInterpolator(new AccelerateInterpolator());
} else {
settingsPanelAnimator = ValueAnimator.ofFloat(0, 1);
final int notificationSettingsHeight = mNotificationSettings.getHeight();
((ValueAnimator) settingsPanelAnimator).addUpdateListener(animation -> mNotificationSettings.setTranslationY((float) notificationSettingsHeight * (float) animation.getAnimatedValue()));
settingsPanelAnimator.setDuration(ANIMATION_DURATION);
settingsPanelAnimator.setInterpolator(new AccelerateInterpolator());
settingsPanelAnimator.start();
}
ValueAnimator closeButtonScaleDown = ValueAnimator.ofFloat(1, 0).setDuration(ANIMATION_DURATION);
closeButtonScaleDown.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mCloseNotificationSettingsButtonContainer.setVisibility(View.INVISIBLE);
}
});
closeButtonScaleDown.addUpdateListener(animation -> {
ViewCompat.setScaleX(mCloseNotificationSettingsButton, (float) animation.getAnimatedValue());
ViewCompat.setScaleY(mCloseNotificationSettingsButton, (float) animation.getAnimatedValue());
});
closeButtonScaleDown.setDuration(ANIMATION_DURATION / 2);
ValueAnimator openButtonScaleUp = ValueAnimator.ofFloat(0, 1).setDuration(ANIMATION_DURATION);
openButtonScaleUp.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.VISIBLE);
}
});
openButtonScaleUp.addUpdateListener(animation -> {
ViewCompat.setScaleX(mOpenNotificationSettingsButton, (float) animation.getAnimatedValue());
ViewCompat.setScaleY(mOpenNotificationSettingsButton, (float) animation.getAnimatedValue());
});
openButtonScaleUp.setDuration(ANIMATION_DURATION / 2);
// Animate the status bar color change
Integer colorFrom = getResources().getColor(R.color.accent_dark);
Integer colorTo = getResources().getColor(R.color.primary_dark);
ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo);
colorAnimation.addUpdateListener(animator -> {
getDrawerLayout().setStatusBarBackgroundColor((Integer) animator.getAnimatedValue());
// We have to invalidate so that the view redraws the background
getDrawerLayout().invalidate();
});
colorAnimation.setDuration(ANIMATION_DURATION);
// Undim the foreground
ValueAnimator dimAnimation = ValueAnimator.ofFloat(DIMMED_ALPHA, UNDIMMED_ALPHA);
dimAnimation.addUpdateListener(animation -> mForegroundDim.setAlpha((float) animation.getAnimatedValue()));
dimAnimation.setDuration(ANIMATION_DURATION);
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.play(settingsPanelAnimator);
animatorSet.play(closeButtonScaleDown).after(ANIMATION_DURATION / 2);
animatorSet.play(colorAnimation).with(settingsPanelAnimator);
animatorSet.play(dimAnimation).with(settingsPanelAnimator);
animatorSet.play(openButtonScaleUp).after(settingsPanelAnimator);
animatorSet.start();
mIsSettingsPanelOpen = false;
}
public void showFab(boolean animate) {
if (mFabVisible) {
return;
}
mFabVisible = true;
if (mRunningFabAnimation != null) {
mRunningFabAnimation.cancel();
}
if (!animate) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.GONE);
return;
}
ValueAnimator fabScaleUp = ValueAnimator.ofFloat(0, 1);
fabScaleUp.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.VISIBLE);
}
});
fabScaleUp.addUpdateListener(animation -> {
ViewCompat.setScaleX(mOpenNotificationSettingsButton, (float) animation.getAnimatedValue());
ViewCompat.setScaleY(mOpenNotificationSettingsButton, (float) animation.getAnimatedValue());
});
fabScaleUp.setDuration(FAB_ANIMATE_DURATION);
fabScaleUp.setInterpolator(new DecelerateInterpolator());
fabScaleUp.start();
mRunningFabAnimation = fabScaleUp;
}
public void hideFab(boolean animate) {
if (!mFabVisible) {
return;
}
mFabVisible = false;
if (mRunningFabAnimation != null) {
mRunningFabAnimation.cancel();
}
if (!animate) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.GONE);
return;
}
ValueAnimator fabScaleDown = ValueAnimator.ofFloat(1, 0);
fabScaleDown.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationStart(Animator animation) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.VISIBLE);
}
});
fabScaleDown.addUpdateListener(animation -> {
ViewCompat.setScaleX(mOpenNotificationSettingsButton, (float) animation.getAnimatedValue());
ViewCompat.setScaleY(mOpenNotificationSettingsButton, (float) animation.getAnimatedValue());
});
fabScaleDown.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mOpenNotificationSettingsButtonContainer.setVisibility(View.GONE);
}
});
fabScaleDown.setDuration(FAB_ANIMATE_DURATION);
fabScaleDown.setInterpolator(new AccelerateInterpolator());
fabScaleDown.start();
mRunningFabAnimation = fabScaleDown;
}
public void setSettingsToolbarTitle(String title) {
mNotificationSettingsToolbar.setTitle(title);
}
@Override
public void onSuccess() {
Toast.makeText(this, "Settings Updated", Toast.LENGTH_SHORT).show();
Integer colorFrom = getResources().getColor(R.color.accent);
Integer colorTo = getResources().getColor(R.color.green);
ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo);
colorAnimation.addUpdateListener(animator -> mOpenNotificationSettingsButton.setColorNormal((Integer) animator.getAnimatedValue()));
colorAnimation.setDuration(500);
Integer reverseColorFrom = getResources().getColor(R.color.green);
Integer reverseColorTo = getResources().getColor(R.color.accent);
ValueAnimator reverseColorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), reverseColorFrom, reverseColorTo);
reverseColorAnimation.addUpdateListener(animator -> mOpenNotificationSettingsButton.setColorNormal((Integer) animator.getAnimatedValue()));
reverseColorAnimation.setDuration(500);
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.play(colorAnimation);
animatorSet.play(reverseColorAnimation).after(2000);
animatorSet.start();
// Tell the settings fragment to reload the now-updated
mSettingsFragment.refreshSettingsFromDatabase();
// Save finished
mSaveInProgress = false;
}
@Override
public void onNoOp() {
Toast.makeText(this, "No change", Toast.LENGTH_SHORT).show();
/*
android.support.v4.app.FragmentManager fm = getSupportFragmentManager();
Fragment settingsFragment = fm.findFragmentByTag(SAVE_SETTINGS_TASK_FRAGMENT_TAG);
closeNotificationSettingsWindow();
if(settingsFragment != null) {
fm.beginTransaction().remove(settingsFragment).commitAllowingStateLoss();
}
saveSettingsTaskFragment = null;
*/
mSaveInProgress = false;
}
@Override
public void onError() {
Toast.makeText(this, "Error updating settings", Toast.LENGTH_SHORT).show();
// Something went wrong, restore the initial state
mSettingsFragment.restoreInitialState();
Integer colorFrom = getResources().getColor(R.color.accent);
Integer colorTo = getResources().getColor(R.color.red);
ValueAnimator colorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), colorFrom, colorTo);
colorAnimation.addUpdateListener(animator -> mOpenNotificationSettingsButton.setColorNormal((Integer) animator.getAnimatedValue()));
colorAnimation.setDuration(500);
Integer reverseColorFrom = getResources().getColor(R.color.red);
Integer reverseColorTo = getResources().getColor(R.color.accent);
ValueAnimator reverseColorAnimation = ValueAnimator.ofObject(new ArgbEvaluator(), reverseColorFrom, reverseColorTo);
reverseColorAnimation.addUpdateListener(animator -> mOpenNotificationSettingsButton.setColorNormal((Integer) animator.getAnimatedValue()));
reverseColorAnimation.setDuration(500);
AnimatorSet animatorSet = new AnimatorSet();
animatorSet.play(colorAnimation);
animatorSet.play(reverseColorAnimation).after(2000);
animatorSet.start();
mSaveInProgress = false;
}
@Override
public void onBackPressed() {
if (mIsSettingsPanelOpen) {
closeNotificationSettingsWindow();
return;
}
super.onBackPressed();
}
public void onSettingsLoaded() {
// Re-enable the submit button
mCloseNotificationSettingsButton.setEnabled(true);
}
}
|
package com.continuuity.internal.app.runtime.schedule;
import com.continuuity.app.runtime.ProgramRuntimeService;
import com.continuuity.app.store.StoreFactory;
import com.continuuity.common.conf.Constants;
import com.google.common.base.Supplier;
import com.google.common.base.Throwables;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Inject;
import org.apache.twill.common.Cancellable;
import org.apache.twill.discovery.DiscoveryServiceClient;
import org.apache.twill.discovery.ServiceDiscovered;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Scheduler service to run in distributed reactor. Waits for transaction service to be available.
*/
public final class DistributedSchedulerService extends DefaultSchedulerService {
private static final Logger LOG = LoggerFactory.getLogger(DistributedSchedulerService.class);
private final DiscoveryServiceClient discoveryServiceClient;
private final ListeningExecutorService executorService;
private final AtomicBoolean schedulerStarted = new AtomicBoolean(false);
private Cancellable cancellable;
@Inject
public DistributedSchedulerService(Supplier<Scheduler> schedulerSupplier, StoreFactory storeFactory,
ProgramRuntimeService programRuntimeService,
DiscoveryServiceClient discoveryServiceClient) {
super(schedulerSupplier, storeFactory, programRuntimeService);
this.discoveryServiceClient = discoveryServiceClient;
executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1));
}
@Override
protected void startScheduler(final WrappedScheduler scheduler) {
//Wait till TransactionService is discovered then start the scheduler.
ServiceDiscovered discover = discoveryServiceClient.discover(Constants.Service.TRANSACTION);
cancellable = discover.watchChanges(
new ServiceDiscovered.ChangeListener() {
@Override
public void onChange(ServiceDiscovered serviceDiscovered) {
if (!Iterables.isEmpty(serviceDiscovered) && !schedulerStarted.get()) {
LOG.info("Starting scheduler, Discovered {} transaction service(s)",
Iterables.size(serviceDiscovered));
try {
scheduler.start();
schedulerStarted.set(true);
} catch (SchedulerException e) {
LOG.error("Error starting scheduler {}", e.getCause(), e);
throw Throwables.propagate(e);
}
}
}
}, MoreExecutors.sameThreadExecutor());
}
@Override
protected void stopScheduler(WrappedScheduler scheduler) {
try {
LOG.info("Stopping scheduler");
scheduler.stop();
} catch (SchedulerException e) {
LOG.debug("Error stopping scheduler {}", e.getCause(), e);
} finally {
if (cancellable != null) {
cancellable.cancel();
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.