answer
stringlengths 17
10.2M
|
|---|
package com.xnx3.j2ee.module.pingxx;
import com.pingplusplus.Pingpp;
import com.pingplusplus.exception.PingppException;
import com.pingplusplus.model.Charge;
import com.pingplusplus.model.Event;
import com.pingplusplus.model.Webhooks;
import com.xnx3.ConfigManagerUtil;
import com.xnx3.DateUtil;
import com.xnx3.j2ee.module.pingxx.bean.SmallCharge;
import net.sf.json.JSONObject;
import org.apache.commons.codec.binary.Base64;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.spec.X509EncodedKeySpec;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.Signature;
import java.security.SignatureException;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.security.KeyFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Ping
* <br/> xnx3Config.xmlPingPlusPlus
* <br><b></b>
* <br/><i>commons-codec-1.10.jar</i>
* <br/><i>gson-2.6.2.jar</i>
* <br/><i>pingpp-java-2.1.7.jar</i>
* <br/><i>json-lib-2.4-jdk15.jar</i>
* @author
*/
public class PingxxUtil {
/**
* ping IDsystemConfig.xml
*/
public static String appId = "";
/**
* pingsystemConfig.xml
*/
public static String publicKey = "";
private static boolean debug = true;
static {
Pingpp.apiKey = ConfigManagerUtil.getSingleton("xnx3Config.xml").getValue("pingxx.apiKey");
appId = ConfigManagerUtil.getSingleton("xnx3Config.xml").getValue("pingxx.appId");
Pingpp.privateKey=ConfigManagerUtil.getSingleton("xnx3Config.xml").getValue("pingxx.privateKey");
publicKey=ConfigManagerUtil.getSingleton("xnx3Config.xml").getValue("pingxx.publicKey");
debug =ConfigManagerUtil.getSingleton("xnx3Config.xml").getValue("pingxx.debug").equals("true");
}
public static void main(String[] args) {
//// Charge charge = createCharge(12, "", "", "3423425", SmallCharge.CHANNEL_ALIPAY, "129.12.12.12");
// System.out.println(charge.toString());
System.out.println(generateOrderNo());
String sign = "l4EUlkWgJD0cUNRshtGhrV/qQ6tXhnCANTDR5D3iHyo0F2WqhubbTUJEzp8Ym00TTIM37lv1mfVozpHF811Vy2ZXwnqPLrLl9SyWMnRML2OOYDpD0XrStHMyE774yf6HyaFD8fcmwlOOFeY26NyfCx3cmbLWHT+me/ZnKtA1N5eZmKkteUIMSgk8jhBHA4RT2nSapKiVbMqueBXrKmtekvuUHwRmuqHmi0ee/7uesIqWfhtGXRbwQvygIo+Mx2OV7qAvl0IyENipZUa+TTJ05GZDM/s6goPkH9pcp/hQ/lUTbIMU42jZJO30W6zCoDsZKUjTo6+Quz1XVW1lK/KuSw==";
//ping webhooksjson,web webhooks(request, response)
String dataString = "{\"id\":\"evt_ugB6x3K43D16wXCcqbplWAJo\",\"created\":1440407501,\"livemode\":false,\"type\":\"charge.succeeded\",\"data\":{\"object\":{\"id\":\"ch_Xsr7u35O3m1Gw4ed2ODmi4Lw\",\"object\":\"charge\",\"created\":1440407501,\"livemode\":true,\"paid\":true,\"refunded\":false,\"app\":\"app_urj1WLzvzfTK0OuL\",\"channel\":\"upacp\",\"order_no\":\"123456789\",\"client_ip\":\"127.0.0.1\",\"amount\":100,\"amount_settle\":0,\"currency\":\"cny\",\"subject\":\"Your Subject\",\"body\":\"Your Body\",\"extra\":{},\"time_paid\":1440407501,\"time_expire\":1440407501,\"time_settle\":null,\"transaction_no\":\"1224524301201505066067849274\",\"refunds\":{\"object\":\"list\",\"url\":\"/v1/charges/ch_Xsr7u35O3m1Gw4ed2ODmi4Lw/refunds\",\"has_more\":false,\"data\":[]},\"amount_refunded\":0,\"failure_code\":null,\"failure_msg\":null,\"metadata\":{},\"credential\":{},\"description\":null}},\"object\":\"event\",\"pending_webhooks\":0,\"request\":\"iar_qH4y1KbTy5eLGm1uHSTS00s\"}";
com.xnx3.j2ee.module.pingxx.bean.Event event = getEventByContent(dataString, sign);
if(event!=null){
if(event.getType().equals(com.xnx3.j2ee.module.pingxx.bean.Event.TYPE_CHARGE_SUCCEEDED)){
System.out.println(""+event.getSmallCharge().getOrderNo());
}else if (event.getType().equals(com.xnx3.j2ee.module.pingxx.bean.Event.TYPE_REFUND_SUCCEEDED)) {
System.out.println(""+event.getSmallCharge().getOrderNo());
}
}
}
/**
* 12102
* @return
*/
public static String generateOrderNo(){
Random random = new Random();
return random.nextInt(10)+""+random.nextInt(10)+DateUtil.timeForUnix10();
}
/**
* webhooksjson {@link com.xnx3.j2ee.module.pingxx.bean.Event}
* @param dataString webhooksjson
* @param sign header
* @return {@link com.xnx3.j2ee.module.pingxx.bean.Event},null
*/
private static com.xnx3.j2ee.module.pingxx.bean.Event getEventByContent(String dataString,String sign){
try {
if(verifyData(dataString, sign, getPubKey())){
Event ev = Webhooks.eventParse(dataString);
com.xnx3.j2ee.module.pingxx.bean.Event event = new com.xnx3.j2ee.module.pingxx.bean.Event();
event.setCreated(ev.getCreated());
event.setData(ev.getData());
event.setId(ev.getId());
event.setLivemode(ev.getLivemode());
event.setPendingWebhooks(event.getPendingWebhooks());
event.setType(ev.getType());
event.setSmallCharge(getSmallChargeByEvent(ev));
return event;
}else{
log("webhook sign verify failure require data :"+dataString);
}
} catch (InvalidKeyException e) {
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (SignatureException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* webhoos {@link SmallCharge}
* @param event webhoos {@link Event}
* @return {@link SmallCharge}
*/
public static SmallCharge getSmallChargeByEvent(Event event){
JSONObject j = JSONObject.fromObject(event.getData().getObject());
SmallCharge sc = new SmallCharge();
sc.setOrderNo(j.getString("orderNo"));
sc.setChannel(j.getString("channel"));
sc.setClientIp(j.getString("clientIp"));
sc.setAmount(j.getInt("amount"));
return sc;
}
public static Charge createCharge(Integer amount, String subject, String body, String orderNo, String channel, String clientIp) {
Charge charge = null;
Map<String, Object> chargeMap = new HashMap<String, Object>();
chargeMap.put("amount", amount);
chargeMap.put("currency", "cny");
chargeMap.put("subject", subject);
chargeMap.put("body", body);
chargeMap.put("order_no", orderNo);
chargeMap.put("channel", channel);
chargeMap.put("client_ip", clientIp);
Map<String, String> app = new HashMap<String, String>();
app.put("id", appId);
chargeMap.put("app", app);
try {
charge = Charge.create(chargeMap);
return charge;
} catch (PingppException e) {
e.printStackTrace();
log("PingplusUtil charge Exception !!!"+e.getMessage());
return null;
}
}
/**
* ping++
* @param request {@link HttpServletRequest}
* @param response {@link HttpServletResponse}
* @return {@link com.xnx3.j2ee.module.pingxx.bean.Event} null
* @throws IOException
*/
public static com.xnx3.j2ee.module.pingxx.bean.Event webhooks(HttpServletRequest request,HttpServletResponse response) throws IOException{
request.setCharacterEncoding("UTF8");
// http body
BufferedReader reader = request.getReader();
StringBuffer buffer = new StringBuffer();
String string;
while ((string = reader.readLine()) != null) {
buffer.append(string);
}
reader.close();
String sign = request.getHeader("X-Pingplusplus-Signature");
String dataString = buffer.toString();
com.xnx3.j2ee.module.pingxx.bean.Event event = getEventByContent(dataString, sign);
return event;
}
/**
*
* @return
* @throws Exception
*/
public static PublicKey getPubKey() throws Exception {
byte[] keyBytes = Base64.decodeBase64(publicKey);
// generate public key
X509EncodedKeySpec spec = new X509EncodedKeySpec(keyBytes);
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
PublicKey pk = keyFactory.generatePublic(spec);
return pk;
}
/**
*
* @param dataString
* @param signatureString
* @param publicKey
* @return
* @throws NoSuchAlgorithmException
* @throws InvalidKeyException
* @throws SignatureException
*/
public static boolean verifyData(String dataString, String signatureString, PublicKey publicKey)
throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, UnsupportedEncodingException {
if(signatureString != null){
byte[] signatureBytes = Base64.decodeBase64(signatureString);
Signature signature = Signature.getInstance("SHA256withRSA");
signature.initVerify(publicKey);
signature.update(dataString.getBytes("UTF-8"));
return signature.verify(signatureBytes);
}else{
return false;
}
}
/**
*
* @param content
*/
private static void log(String content){
if(debug){
System.out.println("pingxx:"+content);
}
}
}
|
package org.rabix.backend.local;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.rabix.bindings.BindingException;
import org.rabix.bindings.ProtocolType;
import org.rabix.bindings.helper.URIHelper;
import org.rabix.bindings.model.Job;
import org.rabix.bindings.model.Job.JobStatus;
import org.rabix.bindings.protocol.draft2.Draft2CommandLineBuilder;
import org.rabix.bindings.protocol.draft2.bean.Draft2CommandLineTool;
import org.rabix.bindings.protocol.draft2.bean.Draft2Job;
import org.rabix.bindings.protocol.draft2.bean.Draft2JobApp;
import org.rabix.bindings.protocol.draft2.bean.Draft2Resources;
import org.rabix.bindings.protocol.draft2.bean.resource.requirement.Draft2CreateFileRequirement;
import org.rabix.bindings.protocol.draft2.bean.resource.requirement.Draft2CreateFileRequirement.Draft2FileRequirement;
import org.rabix.bindings.protocol.draft2.expression.Draft2ExpressionException;
import org.rabix.bindings.protocol.draft2.resolver.Draft2DocumentResolver;
import org.rabix.bindings.protocol.draft3.Draft3CommandLineBuilder;
import org.rabix.bindings.protocol.draft3.bean.Draft3CommandLineTool;
import org.rabix.bindings.protocol.draft3.bean.Draft3Job;
import org.rabix.bindings.protocol.draft3.bean.Draft3JobApp;
import org.rabix.bindings.protocol.draft3.bean.Draft3Resources;
import org.rabix.bindings.protocol.draft3.bean.resource.requirement.Draft3CreateFileRequirement;
import org.rabix.bindings.protocol.draft3.bean.resource.requirement.Draft3CreateFileRequirement.Draft3FileRequirement;
import org.rabix.bindings.protocol.draft3.expression.Draft3ExpressionException;
import org.rabix.bindings.protocol.draft3.resolver.Draft3DocumentResolver;
import org.rabix.common.config.ConfigModule;
import org.rabix.common.helper.JSONHelper;
import org.rabix.common.json.BeanSerializer;
import org.rabix.engine.EngineModule;
import org.rabix.engine.rest.api.BackendHTTPService;
import org.rabix.engine.rest.api.JobHTTPService;
import org.rabix.engine.rest.api.impl.BackendHTTPServiceImpl;
import org.rabix.engine.rest.api.impl.JobHTTPServiceImpl;
import org.rabix.engine.rest.backend.BackendDispatcher;
import org.rabix.engine.rest.db.BackendDB;
import org.rabix.engine.rest.db.JobDB;
import org.rabix.engine.rest.service.BackendService;
import org.rabix.engine.rest.service.JobService;
import org.rabix.engine.rest.service.impl.BackendServiceImpl;
import org.rabix.engine.rest.service.impl.JobServiceImpl;
import org.rabix.executor.ExecutorModule;
import org.rabix.executor.service.ExecutorService;
import org.rabix.ftp.SimpleFTPModule;
import org.rabix.transport.backend.impl.BackendLocal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Scopes;
/**
* Local command line executor
*/
public class BackendCommandLine {
private static final Logger logger = LoggerFactory.getLogger(BackendCommandLine.class);
private static String configDir = "/.bunny/config";
public static void main(String[] commandLineArguments) {
final CommandLineParser commandLineParser = new DefaultParser();
final Options posixOptions = createOptions();
CommandLine commandLine;
try {
commandLine = commandLineParser.parse(posixOptions, commandLineArguments);
if (commandLine.hasOption("h")) {
printUsageAndExit(posixOptions);
}
if (!checkCommandLine(commandLine)) {
printUsageAndExit(posixOptions);
}
String appPath = commandLine.getArgList().get(0);
File appFile = new File(appPath);
if (!appFile.exists()) {
logger.info("Application file {} does not exist.", appFile.getCanonicalPath());
printUsageAndExit(posixOptions);
}
String inputsPath = commandLine.getArgList().get(1);
File inputsFile = new File(inputsPath);
if (!inputsFile.exists()) {
logger.info("Inputs file {} does not exist.", inputsFile.getCanonicalPath());
printUsageAndExit(posixOptions);
}
File configDir = getConfigDir(commandLine, posixOptions);
if (!configDir.exists() || !configDir.isDirectory()) {
logger.info("Config directory {} doesn't exist or is not a directory", configDir.getCanonicalPath());
printUsageAndExit(posixOptions);
}
Map<String, Object> configOverrides = new HashMap<>();
String executionDirPath = commandLine.getOptionValue("basedir");
if (executionDirPath != null) {
File executionDir = new File(executionDirPath);
if (!executionDir.exists() || !executionDir.isDirectory()) {
logger.info("Execution directory {} doesn't exist or is not a directory", executionDirPath);
System.exit(10);
} else {
configOverrides.put("backend.execution.directory", executionDir.getCanonicalPath());
}
} else {
String workingDir = null;
try {
workingDir = inputsFile.getParentFile().getCanonicalPath();
} catch (Exception e) {
workingDir = new File(".").getCanonicalPath();
}
configOverrides.put("backend.execution.directory", workingDir);
}
if(commandLine.hasOption("no-container")) {
configOverrides.put("backend.docker.enabled", false);
}
ConfigModule configModule = new ConfigModule(configDir, configOverrides);
Injector injector = Guice.createInjector(
new SimpleFTPModule(),
new EngineModule(),
new ExecutorModule(configModule),
new AbstractModule() {
@Override
protected void configure() {
bind(JobDB.class).in(Scopes.SINGLETON);
bind(BackendDB.class).in(Scopes.SINGLETON);
bind(JobService.class).to(JobServiceImpl.class).in(Scopes.SINGLETON);
bind(BackendService.class).to(BackendServiceImpl.class).in(Scopes.SINGLETON);
bind(BackendDispatcher.class).in(Scopes.SINGLETON);
bind(JobHTTPService.class).to(JobHTTPServiceImpl.class);
bind(BackendHTTPService.class).to(BackendHTTPServiceImpl.class).in(Scopes.SINGLETON);
}
});
String appURI = URIHelper.createURI(URIHelper.FILE_URI_SCHEME, appPath);
String inputsText = readFile(inputsFile.getAbsolutePath(), Charset.defaultCharset());
Map<String, Object> inputs = JSONHelper.readMap(JSONHelper.transformToJSON(inputsText));
if (commandLine.hasOption("t")) {
System.out.println(JSONHelper.writeObject(createConformanceTestResults(appURI, inputs, ProtocolType.DRAFT2)));
System.exit(0);
}
final JobService jobService = injector.getInstance(JobService.class);
final BackendService backendService = injector.getInstance(BackendService.class);
final ExecutorService executorService = injector.getInstance(ExecutorService.class);
BackendLocal backendLocal = new BackendLocal();
backendLocal = backendService.create(backendLocal);
executorService.initialize(backendLocal);
final Job job = jobService.create(new Job(appURI, inputs));
Thread checker = new Thread(new Runnable() {
@Override
public void run() {
Job rootJob = jobService.get(job.getId());
while(!Job.isFinished(rootJob)) {
try {
Thread.sleep(1000);
rootJob = jobService.get(job.getId());
} catch (InterruptedException e) {
logger.error("Failed to wait for root Job to finish", e);
throw new RuntimeException(e);
}
}
if (rootJob.getStatus().equals(JobStatus.COMPLETED)) {
try {
logger.info(JSONHelper.mapper.writerWithDefaultPrettyPrinter().writeValueAsString(rootJob.getOutputs()));
System.exit(0);
} catch (JsonProcessingException e) {
logger.error("Failed to write outputs to standard out", e);
System.exit(10);
}
} else {
System.exit(10);
}
}
});
checker.start();
checker.join();
} catch (ParseException e) {
logger.error("Encountered exception while parsing using PosixParser.", e);
} catch (Exception e) {
logger.error("Encountered exception while reading a input file.", e);
}
}
@SuppressWarnings("unchecked")
private static Map<String, Object> createConformanceTestResults(String appURI, Map<String, Object> inputs, ProtocolType protocolType) throws BindingException {
switch (protocolType) {
case DRAFT2:
Draft2DocumentResolver draft2DocumentResolver = new Draft2DocumentResolver();
String draft2ResolvedApp = draft2DocumentResolver.resolve(appURI);
Draft2JobApp draft2App = BeanSerializer.deserialize(draft2ResolvedApp, Draft2JobApp.class);
if (!draft2App.isCommandLineTool()) {
logger.error("The application is not a valid command line tool.");
System.exit(10);
}
Draft2CommandLineTool draft2CommandLineTool = BeanSerializer.deserialize(draft2ResolvedApp, Draft2CommandLineTool.class);
Draft2Job draft2Job = new Draft2Job(draft2CommandLineTool, (Map<String, Object>) inputs);
Map<String, Object> draft2AllocatedResources = (Map<String, Object>) inputs.get("allocatedResources");
Integer draft2Cpu = draft2AllocatedResources != null ? (Integer) draft2AllocatedResources.get("cpu") : null;
Integer draft2Mem = draft2AllocatedResources != null ? (Integer) draft2AllocatedResources.get("mem") : null;
draft2Job.setResources(new Draft2Resources(false, draft2Cpu, draft2Mem));
Draft2CommandLineBuilder draft2CommandLineBuilder = new Draft2CommandLineBuilder();
List<Object> draft2CommandLineParts = draft2CommandLineBuilder.buildCommandLineParts(draft2Job);
String draft2Stdin;
try {
draft2Stdin = draft2CommandLineTool.getStdin(draft2Job);
String draft2Stdout = draft2CommandLineTool.getStdout(draft2Job);
Draft2CreateFileRequirement draft2CreateFileRequirement = draft2CommandLineTool.getCreateFileRequirement();
Map<Object, Object> draft2CreatedFiles = new HashMap<>();
if (draft2CreateFileRequirement != null) {
for (Draft2FileRequirement draft2FileRequirement : draft2CreateFileRequirement.getFileRequirements()) {
draft2CreatedFiles.put(draft2FileRequirement.getFilename(draft2Job), draft2FileRequirement.getContent(draft2Job));
}
}
Map<String, Object> draft2Result = new HashMap<>();
draft2Result.put("args", draft2CommandLineParts);
draft2Result.put("stdin", draft2Stdin);
draft2Result.put("stdout", draft2Stdout);
draft2Result.put("createfiles", draft2CreatedFiles);
return draft2Result;
} catch (Draft2ExpressionException e) {
throw new BindingException(e);
}
case DRAFT3:
Draft3DocumentResolver draft3DocumentResolver = new Draft3DocumentResolver();
String draft3ResolvedApp = draft3DocumentResolver.resolve(appURI);
Draft3JobApp draft3App = BeanSerializer.deserialize(draft3ResolvedApp, Draft3JobApp.class);
if (!draft3App.isCommandLineTool()) {
logger.error("The application is not a valid command line tool.");
System.exit(10);
}
Draft3CommandLineTool draft3CommandLineTool = BeanSerializer.deserialize(draft3ResolvedApp, Draft3CommandLineTool.class);
Draft3Job draft3Job = new Draft3Job(draft3CommandLineTool, (Map<String, Object>) inputs);
Map<String, Object> draft3AllocatedResources = (Map<String, Object>) inputs.get("allocatedResources");
Integer draft3Cpu = draft3AllocatedResources != null ? (Integer) draft3AllocatedResources.get("cpu") : null;
Integer draft3Mem = draft3AllocatedResources != null ? (Integer) draft3AllocatedResources.get("mem") : null;
draft3Job.setResources(new Draft3Resources(false, draft3Cpu, draft3Mem));
Draft3CommandLineBuilder draft3CommandLineBuilder = new Draft3CommandLineBuilder();
List<Object> draft3CommandLineParts = draft3CommandLineBuilder.buildCommandLineParts(draft3Job);
String draft3Stdin;
try {
draft3Stdin = draft3CommandLineTool.getStdin(draft3Job);
String draft3Stdout = draft3CommandLineTool.getStdout(draft3Job);
Draft3CreateFileRequirement draft3CreateFileRequirement = draft3CommandLineTool.getCreateFileRequirement();
Map<Object, Object> draft3CreatedFiles = new HashMap<>();
if (draft3CreateFileRequirement != null) {
for (Draft3FileRequirement draft3FileRequirement : draft3CreateFileRequirement.getFileRequirements()) {
draft3CreatedFiles.put(draft3FileRequirement.getFilename(draft3Job), draft3FileRequirement.getContent(draft3Job));
}
}
Map<String, Object> result = new HashMap<>();
result.put("args", draft3CommandLineParts);
result.put("stdin", draft3Stdin);
result.put("stdout", draft3Stdout);
result.put("createfiles", draft3CreatedFiles);
return result;
} catch (Draft3ExpressionException e) {
throw new BindingException(e);
}
default:
break;
}
return null;
}
/**
* Reads content from a file
*/
static String readFile(String path, Charset encoding) throws IOException {
byte[] encoded = Files.readAllBytes(Paths.get(path));
return new String(encoded, encoding);
}
/**
* Create command line options
*/
private static Options createOptions() {
Options options = new Options();
options.addOption("v", "verbose", false, "verbose");
options.addOption("b", "basedir", true, "execution directory");
options.addOption("l", "log-iterations-dir", true, "log engine tables directory");
options.addOption("c", "configuration-dir", true, "configuration directory");
options.addOption("t", "conformance-test", false, "conformance test");
options.addOption("", "no-container", false, "don't use containers");
options.addOption("", "tmpdir-prefix", true, "doesn't do anything");
options.addOption("", "tmp-outdir-prefix", true, "doesn't do anything");
options.addOption("", "quiet", false, "quiet");
options.addOption("h", "help", false, "help");
return options;
}
/**
* Check for missing options
*/
private static boolean checkCommandLine(CommandLine commandLine) {
if (commandLine.getArgList().size() != 2) {
logger.info("Invalid number of arguments");
return false;
}
return true;
}
/**
* Prints command line usage
*/
private static void printUsageAndExit(Options options) {
new HelpFormatter().printHelp("rabix [OPTION]... <tool> <job>", options);
System.exit(0);
}
private static File getConfigDir(CommandLine commandLine, Options options) throws IOException {
String configPath = commandLine.getOptionValue("configuration-dir");
if (configPath != null) {
File config = new File(configPath);
if (config.exists() && config.isDirectory()) {
return config;
} else {
logger.debug("Configuration directory {} doesn't exist or is not a directory.", configPath);
}
}
File config = new File(new File(BackendCommandLine.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getParentFile().getParentFile() + "/config");
logger.debug("Config path: " + config.getCanonicalPath());
if (config.exists() && config.isDirectory()) {
logger.debug("Configuration directory found localy.");
return config;
}
String homeDir = System.getProperty("user.home");
config = new File(homeDir, configDir);
if (!config.exists() || !config.isDirectory()) {
logger.info("Config directory doesn't exist or is not a directory");
printUsageAndExit(options);
}
return config;
}
}
|
package org.requirementsascode.builder;
import static org.requirementsascode.builder.FlowPositionPart.flowPositionPart;
import static org.requirementsascode.builder.StepPart.interruptableFlowStepPart;
import java.util.Objects;
import org.requirementsascode.Condition;
import org.requirementsascode.Flow;
import org.requirementsascode.FlowStep;
import org.requirementsascode.Model;
import org.requirementsascode.UseCase;
import org.requirementsascode.exception.ElementAlreadyInModel;
import org.requirementsascode.exception.NoSuchElementInModel;
import org.requirementsascode.flowposition.After;
import org.requirementsascode.flowposition.Anytime;
import org.requirementsascode.flowposition.InsteadOf;
/**
* Part used by the {@link ModelBuilder} to build a {@link Model}.
*
* @see Flow
* @author b_muth
*/
public class FlowPart {
private Flow flow;
private UseCase useCase;
private UseCasePart useCasePart;
private FlowPositionPart optionalFlowPositionPart;
private FlowPart(Flow flow, UseCasePart useCasePart) {
this.flow = Objects.requireNonNull(flow);
this.useCasePart = Objects.requireNonNull(useCasePart);
this.useCase = useCasePart.getUseCase();
}
static FlowPart buildBasicFlowPart(UseCasePart useCasePart) {
final Flow basicFlow = useCasePart.getUseCase().getBasicFlow();
return new FlowPart(basicFlow, useCasePart);
}
static FlowPart buildFlowPart(String flowName, UseCasePart useCasePart) {
final Flow newFlow = useCasePart.getUseCase().newFlow(flowName);
return new FlowPart(newFlow, useCasePart);
}
/**
* Starts the flow after the specified step has been run, in this flow's use
* case.
*
* Note: You should use after to handle exceptions that occurred in the
* specified step.
*
* @param stepName the name of the step to start the flow after
* @return the flow position part, to ease creation of the condition and the
* first step of the flow
* @throws NoSuchElementInModel if the specified step is not found in a flow of
* this use case
*
*/
public FlowPositionPart after(String stepName) {
FlowStep flowStep = (FlowStep)useCase.findStep(stepName);
After after = new After(flowStep);
optionalFlowPositionPart = flowPositionPart(after, this);
return optionalFlowPositionPart;
}
/**
* Creates the first step of this flow, without specifying position or
* condition. It can be interrupted by any other flow that has an explicit
* position and/or condition. It can be run when no other step has been run
* before.
*
* @param stepName the name of the step to be created
* @return the newly created step part, to ease creation of further steps
* @throws ElementAlreadyInModel if a step with the specified name already
* exists in the use case
*/
public StepPart step(String stepName) {
StepPart stepPart = interruptableFlowStepPart(stepName, FlowPart.this);
return stepPart;
}
/**
* Starts the flow as an alternative to the specified step, in this flow's use
* case.
*
* @param stepName the name of the specified step
* @return the flow position part, to ease creation of the condition and the
* first step of the flow
* @throws NoSuchElementInModel if the specified step is not found in this
* flow's use case
*/
public FlowPositionPart insteadOf(String stepName) {
FlowStep flowStep = (FlowStep) useCase.findStep(stepName);
InsteadOf insteadOf = new InsteadOf(flowStep);
optionalFlowPositionPart = flowPositionPart(insteadOf, this);
return optionalFlowPositionPart;
}
/**
* Starts the flow after any step that has been run, or at the beginning.
*
* @return the flow position part, to ease creation of the condition and the
* first step of the flow
*/
public FlowPositionPart anytime() {
Anytime anytime = new Anytime();
optionalFlowPositionPart = flowPositionPart(anytime, this);
return optionalFlowPositionPart;
}
/**
* Constrains the flow's condition: only if the specified condition is true, the
* flow is started.
*
* @param condition the condition that constrains when the flow is started
* @return the condition part, to ease creation of the first step of the flow
*/
public FlowConditionPart condition(Condition condition) {
Objects.requireNonNull(condition);
FlowConditionPart conditionPart = anytime().condition(condition);
return conditionPart;
}
Flow getFlow() {
return flow;
}
UseCasePart getUseCasePart() {
return useCasePart;
}
ModelBuilder getModelBuilder() {
return useCasePart.getModelBuilder();
}
}
|
package filter;
import model.TurboIssue;
import model.TurboLabel;
public class Predicate implements FilterExpression {
private String name;
private String content;
public Predicate(String name, String content) {
this.name = name;
this.content = content;
}
public Predicate() {
this.name = null;
this.content = null;
}
@Override
public String toString() {
return name + "(" + content + ")";
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Predicate other = (Predicate) obj;
if (content == null) {
if (other.content != null)
return false;
} else if (!content.equals(other.content))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
public boolean isSatisfiedBy(TurboIssue issue) {
if (name == null && content == null) return true;
switch (name) {
case "title":
return issue.getTitle().toLowerCase().contains(content.toLowerCase());
case "milestone":
if (issue.getMilestone() == null) return false;
return issue.getMilestone().getTitle().toLowerCase().contains(content.toLowerCase());
case "parent":
content = content.toLowerCase();
if (content.startsWith("
return issue.getParentNumbers().contains(Integer.parseInt(content.substring(1)));
} else if (Character.isDigit(content.charAt(0))) {
return issue.getParentNumbers().contains(Integer.parseInt(content));
} else {
// search parent name instead
return false;
}
// case "child":
case "label":
for (TurboLabel l : issue.getLabels()) {
if (l.getName().toLowerCase().contains(content.toLowerCase())) {
return true;
}
}
return false;
case "assignee":
if (issue.getAssignee() == null) return false;
return issue.getAssignee().getGithubName().toLowerCase().contains(content.toLowerCase())
|| (issue.getAssignee().getRealName() != null && issue.getAssignee().getRealName().toLowerCase().contains(content.toLowerCase()));
case "state":
if (content.toLowerCase().contains("open")) {
return issue.getOpen();
} else if (content.toLowerCase().contains("closed")) {
return !issue.getOpen();
} else {
return false;
}
default:
return false;
}
}
}
|
package eu.cloudopting.web.rest;
import java.util.Date;
import java.util.HashMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.hibernate.validator.internal.util.privilegedactions.NewJaxbContext;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import eu.cloudopting.domain.Applications;
import eu.cloudopting.domain.Customizations;
import eu.cloudopting.domain.User;
import eu.cloudopting.service.ApplicationService;
import eu.cloudopting.service.CustomizationService;
import eu.cloudopting.service.UserService;
import eu.cloudopting.tosca.ToscaService;
@RestController
@RequestMapping("/api")
public class CustomizationController {
private final Logger log = LoggerFactory.getLogger(CustomizationController.class);
@Autowired
private UserService userService;
@Autowired
private ToscaService toscaService;
@Autowired
private ApplicationService applicationService;
@Autowired
CustomizationService customizationService;
@RequestMapping(value = "/application/{idApp}/getSizings",
method = RequestMethod.GET)
public void getSizing(@PathVariable("appId") final Long id){
}
@RequestMapping(value = "/application/{idApp}/getCustomizationForm",
method = RequestMethod.GET)
@ResponseBody
public String getCustomizationForm(@PathVariable("idApp") final Long idApp){
log.debug("in getCustomizationForm");
log.debug(idApp.toString());
// JSONObject jret = new JSONObject("{\"type\": \"object\",\"title\": \"Compute\",\"properties\": {\"node_id\": {\"title\": \"Node ID\",\"type\": \"string\"},\"node_label\": {\"title\": \"Node Label\",\"type\": \"string\",\"description\": \"Email will be used for evil.\"},\"memory\": {\"title\": \"Memory\",\"type\": \"string\",\"enum\": [\"512\",\"1024\",\"2048\"]},\"cpu\": {\"title\": \"CPU\",\"type\": \"integer\",\"maxLength\": 20,\"validationMessage\": \"Dont be greedy!\"}},\"required\": [\"node_id\",\"node_label\",\"memory\", \"cpu\"]}");
Applications application = applicationService.findOne(idApp);
String csarPath = application.getApplicationToscaTemplate();
JSONObject jret = toscaService.getCustomizationFormData(idApp, csarPath);
return jret.toString();
}
@RequestMapping(value = "/application/{idApp}/sendCustomizationForm",
method = RequestMethod.POST)
public String postCustomizationForm(@PathVariable("idApp") final Long idApp, @RequestParam(value = "formData") String formData,HttpServletRequest request, HttpServletResponse response){
log.debug("in postCustomizationForm");
log.debug(idApp.toString());
log.debug(formData);
JSONObject jsonData = null;;
try {
jsonData = new JSONObject(formData);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
User user = userService.loadUserByLogin(request.getUserPrincipal().getName());
// Long orgId = user.getOrganizationId().getId()
Applications application = applicationService.findOne(idApp);
String csarPath = application.getApplicationToscaTemplate();
String theTosca = toscaService.generateCustomizedTosca(idApp, csarPath, jsonData);
Customizations newC = new Customizations();
newC.setApplicationId(idApp);
newC.setCustomizationToscaFile(theTosca);
newC.setCustomerOrganizationId(user.getOrganizationId());
newC.setCustomizationActivation(new Date());
newC.setCustomizationCreation(new Date());
newC.setCustomizationDecommission(new Date());
newC.setStatusId(new Long(100));
//TODO Check this is correct
log.debug(formData);
log.debug(jsonData.toString());
newC.setCustomizationFormValue(formData);
customizationService.create(newC);
// here we need to create the mail and send it to the SP and sub.
// Here I need mail of the sub
String mailSub = user.getOrganizationId().getEmail();
// than the mail of the organization
String mailSp = application.getOrganizationId().getEmail());
HashMap<String, Object> mailData = new HashMap<String, Object>();
mailData.put("serviceName", application.getApplicationName());
mailData.put("serviceOrganization", application.getOrganizationId().getOrganizationName());
mailData.put("serviceProviderMail", mailSp);
mailData.put("serviceOrganizationContact", application.getOrganizationId().getContactRepresentative());
mailData.put("serviceOrganizationContactPhone", application.getOrganizationId().getContactPhone());
mailData.put("subscriberFirstName", user.getFirstName());
mailData.put("subscriberLastName", user.getLastName());
mailData.put("subscriberMail", mailSub);
mailData.put("subscriberOrganization", user.getOrganizationId().getOrganizationName());
// than getting the template
// compile the template
// send the template as mail
response.setStatus(HttpServletResponse.SC_OK);
return "Customization successfully saved";
}
}
|
package org.itevents.utils.mailBuilder;
import org.itevents.model.City;
import org.itevents.model.Currency;
import org.itevents.model.Event;
import org.itevents.model.Location;
import org.junit.Test;
import javax.xml.bind.JAXBException;
import javax.xml.transform.TransformerException;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class MailBuilderUtilTest {
@Test
public void MailBuilderUtil() throws JAXBException, ParseException, IOException, TransformerException {
SimpleDateFormat eventsDateFormat = new SimpleDateFormat("dd.MM.yyyy");
List<Event> events = new ArrayList<>();
events.add(new Event(1, "Java", eventsDateFormat.parse("10.07.2015"), null, "http:
"Beresteyska", new Location(50.458585, 30.742017), "java@gmail.com", true, 0, new Currency("USD"), new City()));
events.add(new Event(2, "Ruby", eventsDateFormat.parse("20.07.2015"), null, "http:
new Location(50.454605, 30.445495), "ruby@gmail.com", true, 0, new Currency("USD"), new City()));
String expectedHTML = "<html>\n" +
"<head>\n" +
"<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">\n" +
"<title> !</title>\n" +
"<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n" +
"<meta name=\"robots\" content=\"noindex, nofollow\">\n" +
"</head>\n" +
"<body>\n" +
"<table cellpadding=\"0\" cellspacing=\"0\" width=\"100%\" border=\"0\" bgcolor=\"#f2f2f2\">\n" +
"<tr>\n" +
"<td valign=\"top\" align=\"center\">\n" +
"<table cellspacing=\"0\" cellpadding=\"0\" width=\"560\" border=\"0\" style=\"border-width:0px;border-color:#cccccc;border-style:solid;\" bgcolor=\"#ffffff\">\n" +
"<tr>\n" +
"<td align=\"center\" width=\"560\">\n" +
"<table border=\"0\" cellspacing=\"0\" width=\"560\" cellpadding=\"0\">\n" +
"<tr>\n" +
"<td align=\"center\" width=\"560\"><img height=\"100\" src=\"\" width=\"530\" border=\"0\" alt=\"Our logo\"></td>\n" +
"</tr>\n" +
"</table>\n" +
"<table width=\"560\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\">\n" +
"<tr>\n" +
"<td width=\"560\" height=\"20\"></td>\n" +
"</tr>\n" +
"</table>\n" +
"<table width=\"520\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\">\n" +
"<tr>\n" +
"<td align=\"left\" valign=\"top\" style=\"color:#000000;font-family:Arial, Helvetica, sans-serif;line-height:17px;font-size:11px;\">\n" +
"<p style=\"margin:0px 0px 10px 0px; margin-bottom: 10px;\">\n" +
"<h3 style=\"margin:0px;\">Java</h3>\n" +
"<span style=\"font-style: italic;\">10.07.2015</span>\n" +
"<br>\n" +
"<span>Beresteyska - java@gmail.com</span>\n" +
"<br>\n" +
"<a href=\"http:
"</p>\n" +
"</td>\n" +
"</tr>\n" +
"</table>\n" +
"<table width=\"520\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\">\n" +
"<tr>\n" +
"<td width=\"520\" height=\"10\"></td>\n" +
"</tr>\n" +
"</table>\n" +
"<table width=\"520\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\">\n" +
"<tr>\n" +
"<td align=\"left\" valign=\"top\" style=\"color:#000000;font-family:Arial, Helvetica, sans-serif;line-height:17px;font-size:11px;\">\n" +
"<p style=\"margin:0px 0px 10px 0px; margin-bottom: 10px;\">\n" +
"<h3 style=\"margin:0px;\">Ruby</h3>\n" +
"<span style=\"font-style: italic;\">20.07.2015</span>\n" +
"<br>\n" +
"<span>Shulyavska - ruby@gmail.com</span>\n" +
"<br>\n" +
"<a href=\"http:
"</p>\n" +
"</td>\n" +
"</tr>\n" +
"</table>\n" +
"<table width=\"520\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\">\n" +
"<tr>\n" +
"<td width=\"520\" height=\"10\"></td>\n" +
"</tr>\n" +
"</table>\n" +
"</td>\n" +
"</tr>\n" +
"</table>\n" +
"</td>\n" +
"</tr>\n" +
"</table>\n" +
"</body>\n" +
"</html>\n" +
"\n";
String mailHtml = new MailBuilderUtil().buildHtmlFromEventsList(events);
assertEquals(expectedHTML, mailHtml);
}
}
|
package org.lilycms.rowlog.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
import org.apache.zookeeper.ZooKeeper.States;
import org.apache.zookeeper.data.Stat;
import org.lilycms.rowlog.api.RowLog;
import org.lilycms.rowlog.api.RowLogException;
import org.lilycms.rowlog.api.RowLogProcessor;
import org.lilycms.rowlog.api.SubscriptionContext;
import org.lilycms.rowlog.api.SubscriptionContext.Type;
import org.lilycms.util.zookeeper.ZkPathCreationException;
import org.lilycms.util.zookeeper.ZkUtil;
public class RowLogConfigurationManager {
private Log log = LogFactory.getLog(getClass());
private String lilyPath = "/lily";
private String rowLogPath = lilyPath + "/rowlog";
private ZooKeeper zooKeeper;
@Override
protected void finalize() throws Throwable {
stop();
super.finalize();
}
public void stop() throws InterruptedException {
if (zooKeeper != null) {
long sessionId = zooKeeper.getSessionId();
zooKeeper.close();
log.info("Closed zookeeper connection with sessionId 0x"+Long.toHexString(sessionId));
}
}
public RowLogConfigurationManager(String connectString) throws RowLogException {
try {
zooKeeper = new ZooKeeper(connectString, 5000, new ZkWatcher());
} catch (Exception e) {
throw new RowLogException("Failed to connect with Zookeeper @ <"+connectString+">", e);
}
long waitUntil = System.currentTimeMillis() + 5000;
boolean connected = false;
while (!connected && waitUntil > System.currentTimeMillis()) {
connected = (States.CONNECTED).equals(zooKeeper.getState());
}
if (!connected)
throw new RowLogException("Failed to connect with Zookeeper @ <"+connectString+">");
log.info("Connected to zookeeper with sessionId 0x"+Long.toHexString(zooKeeper.getSessionId()));
}
// Subscriptions
public List<SubscriptionContext> getAndMonitorSubscriptions(RowLogProcessor processor, RowLog rowLog) throws KeeperException, InterruptedException {
List<SubscriptionContext> subscriptions = new ArrayList<SubscriptionContext>();
try {
String rowLogId = rowLog.getId();
List<String> subscriptionIds = zooKeeper.getChildren(subscriptionsPath(rowLogId), new SubscriptionsWatcher(processor, rowLog));
for (String subscriptionId : subscriptionIds) {
byte[] data = zooKeeper.getData(subscriptionPath(rowLogId, Integer.valueOf(subscriptionId)), false, new Stat());
String dataString = Bytes.toString(data);
String[] splitData = dataString.split(",");
Type type = Type.valueOf(splitData[0]);
int workerCount = Integer.valueOf(splitData[1]);
subscriptions.add(new SubscriptionContext(Integer.valueOf(subscriptionId), type, workerCount));
}
} catch (NoNodeException exception) {
// TODO Do we need to put another watcher here? How to cope with non-existing paths? Make them mandatory?
} catch (SessionExpiredException exception) {
// TODO ok to ignore this? Should I rather throw an exception
}
return subscriptions;
}
public void addSubscription(String rowLogId, int subscriptionId, SubscriptionContext.Type type, int workerCount) throws KeeperException, InterruptedException {
String path = subscriptionPath(rowLogId, subscriptionId);
String dataString = type.name() + "," + workerCount;
byte[] data = Bytes.toBytes(dataString);
if (zooKeeper.exists(path, false) == null) { // TODO currently not possible to update a subscription or add it twice
try {
ZkUtil.createPath(zooKeeper, path, data, CreateMode.PERSISTENT);
} catch (ZkPathCreationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public void removeSubscription(String rowLogId, int subscriptionId) throws InterruptedException, KeeperException {
String path = subscriptionPath(rowLogId, subscriptionId);
try {
zooKeeper.delete(path, -1);
} catch (KeeperException.NoNodeException ignore) {
} catch (KeeperException.NotEmptyException e) {
// TODO Think what should happen here
// Remove listeners first?
}
}
// Listeners
public List<String> getAndMonitorListeners(ListenersWatcherCallBack callBack, String rowLogId, int subscriptionId) throws KeeperException, InterruptedException {
List<String> listeners = new ArrayList<String>();
try {
return zooKeeper.getChildren(subscriptionPath(rowLogId, subscriptionId), new ListenersWatcher(callBack, rowLogId, subscriptionId));
} catch (NoNodeException exception) {
// TODO Do we need to put another watcher here? How to cope with non-existing paths? Make them mandatory?
} catch (SessionExpiredException exception) {
// TODO ok to ignore this? Should I rather throw an exception
}
return listeners;
}
public void addListener(String rowLogId, int subscriptionId, String listenerId) throws RowLogException {
String path = listenerPath(rowLogId, subscriptionId, listenerId);
try {
if (zooKeeper.exists(path, false) == null) {
ZkUtil.createPath(zooKeeper, path, null, CreateMode.EPHEMERAL);
}
} catch (Exception e) {
throw new RowLogException("Failed to add listener to rowlog configuration", e);
}
}
public void removeListener(String rowLogId, int subscriptionId, String listenerId) throws RowLogException {
String path = listenerPath(rowLogId, subscriptionId, listenerId);
try {
zooKeeper.delete(path, -1);
} catch (KeeperException.NoNodeException ignore) {
} catch (Exception e) {
throw new RowLogException("Failed to remove listener from rowlog configuration", e);
}
}
// Processor Host
public void publishProcessorHost(String hostName, int port, String rowLogId, String shardId) {
String path = processorPath(rowLogId, shardId);
try {
if (zooKeeper.exists(path, false) == null) {
ZkUtil.createPath(zooKeeper, path);
}
zooKeeper.setData(path, Bytes.toBytes(hostName + ":" + port), -1);
} catch (Exception e) {
//TODO log? throw?
}
}
public void unPublishProcessorHost(String rowLogId, String shardId) {
try {
zooKeeper.delete(processorPath(rowLogId, shardId), -1);
} catch (Exception e) {
//TODO log? throw?
}
}
public String getProcessorHost(String rowLogId, String shardId) {
try {
return Bytes.toString(zooKeeper.getData(processorPath(rowLogId, shardId), false, new Stat()));
} catch (Exception e) {
return null;
}
}
// Paths
private String subscriptionPath(String rowLogId, int subscriptionId) {
return subscriptionsPath(rowLogId) + "/" + subscriptionId;
}
private String subscriptionsPath(String rowLogId) {
return rowLogPath + "/" + rowLogId + "/subscriptions";
}
private String shardPath(String rowLogId, String shardId) {
return rowLogPath + "/" + rowLogId + "/shards" + "/" + shardId;
}
private String processorPath(String rowLogId, String shardId) {
return shardPath(rowLogId, shardId) + "/" + "processorHost";
}
private String listenerPath(String rowLogId, int subscriptionId, String listenerId) {
return subscriptionPath(rowLogId, subscriptionId) + "/" + listenerId;
}
// Watchers
private class SubscriptionsWatcher implements Watcher {
private final RowLogProcessor processor;
private final RowLog rowLog;
public SubscriptionsWatcher(RowLogProcessor processor, RowLog rowLog) {
this.processor = processor;
this.rowLog = rowLog;
}
public void process(WatchedEvent event) {
try {
if (event.getState() == Event.KeeperState.Disconnected)
return;
if (event.getState() == Event.KeeperState.Expired)
return;
processor.subscriptionsChanged(getAndMonitorSubscriptions(processor, rowLog));
} catch (KeeperException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private class ListenersWatcher implements Watcher {
private final String rowLogId;
private final int subscriptionId;
private final ListenersWatcherCallBack callBack;
public ListenersWatcher(ListenersWatcherCallBack callBack, String rowLogId, int subscriptionId) {
this.callBack = callBack;
this.rowLogId = rowLogId;
this.subscriptionId = subscriptionId;
}
public void process(WatchedEvent event) {
try {
if (event.getState() == Event.KeeperState.Disconnected)
return;
if (event.getState() == Event.KeeperState.Expired)
return;
callBack.listenersChanged(getAndMonitorListeners(callBack, rowLogId, subscriptionId));
} catch (KeeperException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
private class ZkWatcher implements Watcher {
public void process(WatchedEvent event) {
}
}
}
|
package net.runelite.client.plugins.cannon;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import lombok.Getter;
import net.runelite.api.coords.WorldPoint;
public enum CannonSpots
{
BLOODVELDS(new WorldPoint(2439, 9821, 0), new WorldPoint(2448, 9821, 0), new WorldPoint(2472, 9833, 0), new WorldPoint(2453, 9817, 0)),
FIRE_GIANTS(new WorldPoint(2393, 9782, 0), new WorldPoint(2412, 9776, 0), new WorldPoint(2401, 9780, 0)),
ABBERANT_SPECTRES(new WorldPoint(2456, 9791, 0)),
HELLHOUNDS(new WorldPoint(2431, 9776, 0), new WorldPoint(2413, 9786, 0), new WorldPoint(2783, 9686, 0)),
BLACK_DEMONS(new WorldPoint(2859, 9778, 0), new WorldPoint(2841, 9791, 0)),
ELVES(new WorldPoint(2044, 4635, 0)),
SUQAHS(new WorldPoint(2114, 3943, 0)),
TROLLS(new WorldPoint(2405, 3857, 0)),
GREATER_DEMONS(new WorldPoint(1435, 10086, 2)),
BRINE_RAT(new WorldPoint(2707, 10132, 0)),
DAGGANOTH(new WorldPoint(2524, 10020, 0)),
DARK_BEAST(new WorldPoint(1992, 4655, 0)),
DUST_DEVIL(new WorldPoint(3218, 9366, 0)),
KALPHITE(new WorldPoint(3307, 9528, 0)),
LESSER_DEMON(new WorldPoint(2838, 9559, 0)),
LIZARDMEN(new WorldPoint(1500, 3703, 0)),
MINIONS_OF_SCARABAS(new WorldPoint(3297, 9252, 0)),
SMOKE_DEVIL(new WorldPoint(2398, 9444, 0)),
CAVE_HORROR(new WorldPoint(3785, 9460, 0));
@Getter
private static final List<WorldPoint> cannonSpots = new ArrayList<>();
static
{
for (CannonSpots cannonSpot : values())
{
cannonSpots.addAll(Arrays.asList(cannonSpot.spots));
}
}
private final WorldPoint[] spots;
CannonSpots(WorldPoint... spots)
{
this.spots = spots;
}
}
|
package edu.samplu.krad.travelview;
import edu.samplu.common.KradMenuLegacyITBase;
import org.junit.Test;
/**
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class MaintenanceRouteLogLegacyIT extends KradMenuLegacyITBase {
@Override
protected String getLinkLocator() {
return "Travel Account Maintenance (New)";
}
@Test
/**
* Verify the Route Log section exists and contains an IFrame
*/
public void testVerifyRouteLog() throws Exception {
gotoMenuLinkLocator();
waitAndClickByLinkText("Route Log");
waitForElementPresent("//iframe[contains(@src,'RouteLog.do')]");
}
}
|
package dyvil.tools.repl.context;
import dyvil.collection.List;
import dyvil.collection.Map;
import dyvil.collection.mutable.ArrayList;
import dyvil.collection.mutable.HashMap;
import dyvil.collection.mutable.IdentityHashMap;
import dyvil.reflect.Modifiers;
import dyvil.tools.compiler.ast.access.FieldAccess;
import dyvil.tools.compiler.ast.classes.IClass;
import dyvil.tools.compiler.ast.constructor.IConstructor;
import dyvil.tools.compiler.ast.constructor.IInitializer;
import dyvil.tools.compiler.ast.consumer.IMemberConsumer;
import dyvil.tools.compiler.ast.consumer.IValueConsumer;
import dyvil.tools.compiler.ast.context.IContext;
import dyvil.tools.compiler.ast.expression.IValue;
import dyvil.tools.compiler.ast.field.IDataMember;
import dyvil.tools.compiler.ast.field.IField;
import dyvil.tools.compiler.ast.field.IProperty;
import dyvil.tools.compiler.ast.header.ImportDeclaration;
import dyvil.tools.compiler.ast.header.IncludeDeclaration;
import dyvil.tools.compiler.ast.member.IClassMember;
import dyvil.tools.compiler.ast.method.IMethod;
import dyvil.tools.compiler.ast.method.MethodMatchList;
import dyvil.tools.compiler.ast.modifiers.BaseModifiers;
import dyvil.tools.compiler.ast.modifiers.ModifierList;
import dyvil.tools.compiler.ast.modifiers.ModifierSet;
import dyvil.tools.compiler.ast.operator.Operator;
import dyvil.tools.compiler.ast.parameter.IArguments;
import dyvil.tools.compiler.ast.structure.DyvilHeader;
import dyvil.tools.compiler.ast.structure.IClassCompilableList;
import dyvil.tools.compiler.ast.type.IType;
import dyvil.tools.compiler.ast.type.alias.ITypeAlias;
import dyvil.tools.compiler.ast.type.builtin.Types;
import dyvil.tools.compiler.backend.ClassWriter;
import dyvil.tools.compiler.backend.IClassCompilable;
import dyvil.tools.compiler.util.Util;
import dyvil.tools.parsing.Name;
import dyvil.tools.parsing.marker.Marker;
import dyvil.tools.parsing.marker.MarkerList;
import dyvil.tools.parsing.position.ICodePosition;
import dyvil.tools.repl.DyvilREPL;
import java.util.concurrent.atomic.AtomicInteger;
public class REPLContext extends DyvilHeader implements IValueConsumer, IMemberConsumer, IClassCompilableList
{
private static final String REPL$CLASSES = "repl$classes/";
public static final int ACCESS_MODIFIERS = Modifiers.PUBLIC | Modifiers.PRIVATE | Modifiers.PROTECTED;
protected final DyvilREPL repl;
// Persistent members
private final Map<Name, IField> fields = new IdentityHashMap<>();
private final Map<Name, IProperty> properties = new IdentityHashMap<>();
private final List<IMethod> methods = new ArrayList<>();
private final Map<Name, IClass> classes = new IdentityHashMap<>();
private Map<String, AtomicInteger> resultIndexes = new HashMap<>();
// Cleared / Updated for every evaluation
protected String currentCode;
private int classIndex;
private String className;
protected final MarkerList markers = new MarkerList();
protected final List<IClassCompilable> compilableList = new ArrayList<>();
protected final List<IClassCompilable> innerClassList = new ArrayList<>();
private IClass memberClass;
public REPLContext(DyvilREPL repl)
{
super(repl.getCompiler(), Name.getQualified("REPL"));
this.repl = repl;
}
// Getters
public Map<Name, IField> getFields()
{
return this.fields;
}
public Map<Name, IProperty> getProperties()
{
return this.properties;
}
public List<IMethod> getMethods()
{
return this.methods;
}
public Map<Name, IClass> getClasses()
{
return this.classes;
}
// Evaluation
public void startEvaluation(String code)
{
this.currentCode = code;
this.className = REPL$CLASSES + "REPL$Result$" + this.classIndex++;
this.cleanup();
}
public MarkerList getMarkers()
{
return this.markers;
}
protected boolean hasErrors()
{
return this.markers.getErrors() > 0;
}
public void reportErrors()
{
if (this.markers.isEmpty())
{
return;
}
StringBuilder buf = new StringBuilder();
this.markers.sort();
for (Marker m : this.markers)
{
m.log(this.currentCode, buf);
}
this.compiler.getErrorOutput().println(buf.toString());
}
private void compileInnerClasses()
{
for (IClassCompilable icc : this.innerClassList)
{
try
{
String fileName = icc.getFileName();
byte[] bytes = ClassWriter.compile(icc);
REPLMemberClass.loadClass(this.repl, REPL$CLASSES.concat(fileName), bytes);
}
catch (Throwable t)
{
t.printStackTrace(this.compiler.getErrorOutput());
}
}
}
private boolean computeVariable(REPLVariable field)
{
field.resolveTypes(this.markers, this);
field.resolve(this.markers, this);
field.checkTypes(this.markers, this);
field.check(this.markers, this);
if (this.hasErrors())
{
return false;
}
final int folding = this.compiler.config.getConstantFolding();
for (int i = 0; i < folding; i++)
{
field.foldConstants();
}
field.cleanup(this, this);
this.compileVariable(field);
return true;
}
private static void getClassName(StringBuilder builder, IType type)
{
if (type.isArrayType())
{
getClassName(builder, type.getElementType());
builder.append("Array");
return;
}
if (type.typeTag() == IType.REFERENCE)
{
getClassName(builder, type.getElementType());
builder.append("Ref");
return;
}
builder.append(type.getName().unqualified);
}
private Name getFieldName(IType type)
{
StringBuilder sb = new StringBuilder();
getClassName(sb, type);
// Make the first character lower case
sb.setCharAt(0, Character.toLowerCase(sb.charAt(0)));
// Strip trailing digits
for (int i = 0, len = sb.length(); i < len; i++)
{
if (Character.isDigit(sb.charAt(i)))
{
sb.delete(i, len);
break;
}
}
// The final variable name, without the index
String shortName = sb.toString();
AtomicInteger ai = this.resultIndexes.get(shortName);
if (ai == null)
{
this.resultIndexes.put(shortName, ai = new AtomicInteger(0));
}
int index = ai.incrementAndGet();
return Name.get(sb.append(index).toString());
}
private void compileVariable(REPLVariable field)
{
this.compileInnerClasses();
field.compute(this.repl, this.compilableList);
}
private REPLMemberClass getREPLClass(IClassMember member)
{
REPLMemberClass iclass = new REPLMemberClass(Name.getQualified(this.className), member, this);
member.setEnclosingClass(iclass);
member.getModifiers().addIntModifier(Modifiers.STATIC);
return iclass;
}
private void compileClass(IClass iclass)
{
this.compileInnerClasses();
REPLMemberClass.compile(this.repl, iclass);
}
@Override
public void cleanup()
{
this.compilableList.clear();
this.innerClassList.clear();
this.markers.clear();
}
@Override
public void setValue(IValue value)
{
ModifierList modifierList = new ModifierList();
modifierList.addModifier(BaseModifiers.FINAL);
REPLVariable field = new REPLVariable(this, ICodePosition.ORIGIN, null, Types.UNKNOWN, value, this.className,
modifierList);
this.memberClass = this.getREPLClass(field);
value.resolveTypes(this.markers, this);
value = value.resolve(this.markers, this);
if (value.valueTag() == IValue.FIELD_ACCESS)
{
IDataMember f = ((FieldAccess) value).getField();
if (f instanceof REPLVariable)
{
((REPLVariable) f).updateValue(this.repl);
this.compiler.getOutput().println(f);
return;
}
}
IType type = value.getType();
IValue typedValue = value.withType(type, type, this.markers, this);
if (typedValue != null)
{
value = typedValue;
}
type = value.getType();
value.checkTypes(this.markers, this);
value.check(this.markers, this);
if (this.hasErrors())
{
return;
}
final int folding = this.compiler.config.getConstantFolding();
for (int i = 0; i < folding; i++)
{
value = value.foldConstants();
}
value = value.cleanup(this, this);
field.setValue(value);
field.setType(type);
field.setName(this.getFieldName(type));
this.compileVariable(field);
if (type != Types.VOID)
{
this.fields.put(field.getName(), field);
this.compiler.getOutput().println(field.toString());
}
}
@Override
public void addOperator(Operator operator)
{
super.addOperator(operator);
this.compiler.getOutput().println("Defined " + operator);
}
@Override
public void addImport(ImportDeclaration declaration)
{
declaration.resolveTypes(this.markers, this, false);
if (this.hasErrors())
{
return;
}
super.addImport(declaration);
this.compiler.getOutput().println(declaration);
}
@Override
public void addUsing(ImportDeclaration usingDeclaration)
{
usingDeclaration.resolveTypes(this.markers, this, true);
if (this.hasErrors())
{
return;
}
super.addUsing(usingDeclaration);
this.compiler.getOutput().println(usingDeclaration);
}
@Override
public void addInclude(IncludeDeclaration includeDeclaration)
{
includeDeclaration.resolve(this.markers, this);
if (this.hasErrors())
{
return;
}
super.addInclude(includeDeclaration);
this.compiler.getOutput().println(includeDeclaration);
}
@Override
public void addTypeAlias(ITypeAlias typeAlias)
{
typeAlias.resolveTypes(this.markers, this);
typeAlias.resolve(this.markers, this);
typeAlias.checkTypes(this.markers, this);
typeAlias.check(this.markers, this);
if (this.hasErrors())
{
return;
}
typeAlias.foldConstants();
typeAlias.cleanup(this, this);
super.addTypeAlias(typeAlias);
this.compiler.getOutput().println(typeAlias);
}
@Override
public void addClass(IClass iclass)
{
iclass.setHeader(this);
// Check if the class is already defined
try
{
Class.forName(iclass.getFullName(), false, REPLMemberClass.CLASS_LOADER);
this.compiler.getErrorOutput().println("The class '" + iclass.getName() + "' cannot be re-defined");
return;
}
catch (ClassNotFoundException ignored)
{
}
// Run the usual phases
iclass.resolveTypes(this.markers, this);
iclass.resolve(this.markers, this);
iclass.checkTypes(this.markers, this);
iclass.check(this.markers, this);
if (this.hasErrors())
{
return;
}
final int folding = this.compiler.config.getConstantFolding();
for (int i = 0; i < folding; i++)
{
iclass.foldConstants();
}
iclass.cleanup(this, this);
// Compile and load the class
this.compileClass(iclass);
this.classes.put(iclass.getName(), iclass);
StringBuilder buf = new StringBuilder("Defined ");
Util.classSignatureToString(iclass, buf);
this.compiler.getOutput().println(buf.toString());
}
@Override
public void addInnerClass(IClassCompilable iclass)
{
if (iclass.hasSeparateFile())
{
iclass.setInnerIndex(this.className, this.innerClassList.size());
this.innerClassList.add(iclass);
}
else
{
this.addCompilable(iclass);
}
}
@Override
public int compilableCount()
{
return this.compilableList.size();
}
@Override
public void addCompilable(IClassCompilable compilable)
{
compilable.setInnerIndex(this.className, this.compilableList.size());
this.compilableList.add(compilable);
}
@Override
public IClassCompilable getCompilable(int index)
{
return this.compilableList.get(index);
}
@Override
public void addField(IField field)
{
REPLVariable var = new REPLVariable(this, field.getPosition(), field.getName(), field.getType(),
field.getValue(), this.className, field.getModifiers());
var.setAnnotations(field.getAnnotations());
this.memberClass = this.getREPLClass(var);
if (this.computeVariable(var))
{
this.fields.put(var.getName(), var);
this.compiler.getOutput().println(var.toString());
}
}
@Override
public void addProperty(IProperty property)
{
REPLMemberClass iclass = this.getREPLClass(property);
property.setEnclosingClass(iclass);
property.resolveTypes(this.markers, this);
property.resolve(this.markers, this);
property.checkTypes(this.markers, this);
property.check(this.markers, this);
if (this.hasErrors())
{
return;
}
final int folding = this.compiler.config.getConstantFolding();
for (int i = 0; i < folding; i++)
{
property.foldConstants();
}
property.cleanup(this, this);
this.compileClass(iclass);
this.properties.put(property.getName(), property);
StringBuilder buf = new StringBuilder("Defined Property '");
Util.memberSignatureToString(property, buf);
this.compiler.getOutput().println(buf.append('\'').toString());
this.cleanup();
}
@Override
public void addMethod(IMethod method)
{
updateModifiers(method.getModifiers());
REPLMemberClass iclass = this.getREPLClass(method);
method.resolveTypes(this.markers, this);
if (this.hasErrors())
{
return;
}
method.resolve(this.markers, this);
method.checkTypes(this.markers, this);
method.check(this.markers, this);
if (this.hasErrors())
{
return;
}
final int folding = this.compiler.config.getConstantFolding();
for (int i = 0; i < folding; i++)
{
method.foldConstants();
}
method.cleanup(this, this);
this.compileClass(iclass);
this.registerMethod(method, iclass);
this.cleanup();
}
public static void updateModifiers(ModifierSet modifiers)
{
if ((modifiers.toFlags() & ACCESS_MODIFIERS) == 0)
{
modifiers.addIntModifier(Modifiers.PUBLIC);
}
modifiers.addIntModifier(Modifiers.STATIC);
}
private void registerMethod(IMethod method, REPLMemberClass iclass)
{
boolean replaced = false;
int methods = this.methods.size();
for (int i = 0; i < methods; i++)
{
if (this.methods.get(i).checkOverride(this.markers, iclass, method, null))
{
this.methods.set(i, method);
replaced = true;
break;
}
}
StringBuilder buf = new StringBuilder();
if (!replaced)
{
this.methods.add(method);
buf.append("Defined method ");
}
else
{
buf.append("Re-defined method ");
}
buf.append('\'');
Util.methodSignatureToString(method, buf);
this.compiler.getOutput().println(buf.append('\'').toString());
}
@Override
public void addInitializer(IInitializer initializer)
{
}
@Override
public void addConstructor(IConstructor constructor)
{
}
@Override
public IClass resolveClass(Name name)
{
IClass c = this.classes.get(name);
if (c != null)
{
return c;
}
return super.resolveClass(name);
}
@Override
public IDataMember resolveField(Name name)
{
IField f = this.fields.get(name);
if (f != null)
{
return f;
}
return super.resolveField(name);
}
@Override
public void getMethodMatches(MethodMatchList list, IValue receiver, Name name, IArguments arguments)
{
for (IMethod method : this.methods)
{
IContext.getMethodMatch(list, receiver, name, arguments, method);
}
final IProperty property = this.properties.get(Util.removeEq(name));
if (property != null)
{
property.getMethodMatches(list, receiver, name, arguments);
}
if (!list.isEmpty())
{
return;
}
super.getMethodMatches(list, receiver, name, arguments);
}
@Override
public IType getReturnType()
{
return null;
}
@Override
public byte checkException(IType type)
{
return TRUE;
}
@Override
public IClass getThisClass()
{
return this.memberClass;
}
@Override
public Name getName()
{
return Name.getQualified(this.className);
}
@Override
public String getFullName()
{
return this.className;
}
@Override
public String getFullName(Name name)
{
return "repl$classes." + name.qualified;
}
@Override
public String getInternalName()
{
return this.className;
}
@Override
public String getInternalName(Name name)
{
return REPL$CLASSES + name.qualified;
}
}
|
package core.userDefinedTask;
import java.io.File;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
import utilities.ExceptableFunction;
import utilities.FileUtility;
import utilities.Function;
import utilities.ILoggable;
import utilities.JSONUtility;
import argo.jdom.JsonNode;
import argo.jdom.JsonNodeFactories;
import argo.jdom.JsonRootNode;
import core.config.IJsonable;
import core.controller.Core;
import core.keyChain.KeyChain;
import core.languageHandler.Language;
import core.languageHandler.compiler.AbstractNativeCompiler;
import core.languageHandler.compiler.DynamicCompilerManager;
public abstract class UserDefinedAction implements IJsonable, ILoggable {
private static final Logger LOGGER = Logger.getLogger(UserDefinedAction.class.getName());
protected String name;
protected Set<KeyChain> hotkeys;
protected String sourcePath;
protected Language compiler;
protected boolean enabled;
protected ExceptableFunction<Integer, Void, InterruptedException> executeTaskInGroup;
protected KeyChain invokingKeyChain;
protected UsageStatistics statistics;
public UserDefinedAction() {
invokingKeyChain = new KeyChain();
statistics = new UsageStatistics();
enabled = true;
}
/**
* Custom action defined by user
* @param controller See {@link core.controller.Core} class
* @throws InterruptedException
*/
public abstract void action(Core controller) throws InterruptedException;
/**
* Perform the action and track the statistics related to this action.
* @param controller
* @throws InterruptedException
*/
public final void trackedAction(Core controller) throws InterruptedException {
long time = System.currentTimeMillis();
statistics.useNow();
action(controller);
time = System.currentTimeMillis() - time;
statistics.updateAverageExecutionTime(time);
}
public final void setName(String name) {
this.name = name;
}
public final void setHotKeys(Set<KeyChain> hotkeys) {
this.hotkeys = hotkeys;
}
public final Set<KeyChain> getHotkeys() {
if (hotkeys == null) {
hotkeys = new HashSet<KeyChain>();
}
return hotkeys;
}
/**
* Retrieve a random key chain from the set of key chains. If there's no keychain for the task, return an empty key chain.
* @return a random key chain from the set of key chains.
*/
public final KeyChain getRepresentativeHotkey() {
Set<KeyChain> hotkeys = getHotkeys();
if (hotkeys == null || hotkeys.isEmpty()) {
return new KeyChain();
} else {
return hotkeys.iterator().next();
}
}
public final String getName() {
return name;
}
public final String getSourcePath() {
return sourcePath;
}
public String getSource() {
StringBuffer source = FileUtility.readFromFile(sourcePath);
if (source == null) {
return null;
}
return source.toString();
}
public final void setSourcePath(String sourcePath) {
this.sourcePath = sourcePath;
}
public final Language getCompiler() {
return compiler;
}
public final void setCompiler(Language compiler) {
this.compiler = compiler;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public final UsageStatistics getStatistics() {
return statistics;
}
public final void override(UserDefinedAction other) {
setName(other.getName());
setHotKeys(other.getHotkeys());
this.statistics = other.statistics;
}
/**
* This method is called to dynamically allow the current task to execute other tasks in group
* @param executeTaskInGroup
*/
public final void setExecuteTaskInGroup(ExceptableFunction<Integer, Void, InterruptedException> executeTaskInGroup) {
this.executeTaskInGroup = executeTaskInGroup;
}
/**
* This method is called to dynamically allow the current task to determine which key chain activated it among
* its hotkeys. This will only change the key chain definition of the current key chain, not substituting the real object
* @param invokingKeyChain
*/
public final void setInvokingKeyChain(KeyChain invokingKeyChain) {
this.invokingKeyChain.getKeys().clear();
this.invokingKeyChain.getKeys().addAll(invokingKeyChain.getKeys());
}
public UserDefinedAction recompile(AbstractNativeCompiler compiler, boolean clean) {
if (!clean) {
return this;
} else {
//TODO recompile the current task
getLogger().warning("Not supported");
return null;
}
}
public final void syncContent(UserDefinedAction other) {
this.sourcePath = other.sourcePath;
this.compiler = other.compiler;
this.name = other.name;
this.hotkeys = other.hotkeys;
this.enabled = other.enabled;
}
@Override
public final JsonRootNode jsonize() {
return JsonNodeFactories.object(
JsonNodeFactories.field("source_path", JsonNodeFactories.string(sourcePath)),
JsonNodeFactories.field("compiler", JsonNodeFactories.string(compiler.toString())),
JsonNodeFactories.field("name", JsonNodeFactories.string(name)),
JsonNodeFactories.field("hotkey", JsonNodeFactories.array(JSONUtility.listToJson(getHotkeys()))),
JsonNodeFactories.field("enabled", JsonNodeFactories.booleanNode(enabled)),
JsonNodeFactories.field("statistics", statistics.jsonize())
);
}
public static final UserDefinedAction parseJSON(DynamicCompilerManager factory, JsonNode node) {
try {
String sourcePath = node.getStringValue("source_path");
AbstractNativeCompiler compiler = factory.getCompiler(node.getStringValue("compiler"));
if (compiler == null) {
JOptionPane.showMessageDialog(null, "Unknown compiler " + node.getStringValue("compiler"));
return null;
}
String name = node.getStringValue("name");
List<JsonNode> hotkeyJSONs = node.getArrayNode("hotkey");
Set<KeyChain> hotkeys = new HashSet<>();
JSONUtility.addAllJson(hotkeyJSONs, new Function<JsonNode, KeyChain>(){
@Override
public KeyChain apply(JsonNode d) {
KeyChain value = KeyChain.parseJSON(d.getArrayNode());
return value;
}}, hotkeys);
File sourceFile = new File(sourcePath);
StringBuffer sourceBuffer = FileUtility.readFromFile(sourceFile);
String source = null;
if (sourceBuffer == null) {
JOptionPane.showMessageDialog(null, "Cannot get source at path " + sourcePath);
return null;
} else {
source = sourceBuffer.toString();
}
File objectFile = new File(FileUtility.joinPath("core", FileUtility.removeExtension(sourceFile).getName()));
objectFile = FileUtility.addExtension(objectFile, compiler.getObjectExtension());
UserDefinedAction output = compiler.compile(source, objectFile).getB();
if (output == null) {
JOptionPane.showMessageDialog(null, "Compilation failed for task " + name + " with source at path " + sourcePath);
return null;
}
UsageStatistics statistics = UsageStatistics.parseJSON(node.getNode("statistics"));
if (statistics != null) {
output.statistics = statistics;
} else {
output.statistics.createNow();
LOGGER.warning("Unable to retrieve statistics for task " + name);
}
boolean enabled = node.getBooleanValue("enabled");
output.sourcePath = sourcePath;
output.compiler = compiler.getName();
output.name = name;
output.hotkeys = hotkeys;
output.enabled = enabled;
return output;
} catch (Exception e) {
Logger.getLogger(UserDefinedAction.class.getName()).log(Level.WARNING, "Exception parsing task from JSON", e);
return null;
}
}
@Override
public final Logger getLogger() {
return Logger.getLogger(UserDefinedAction.class.getName());
}
}
|
package de.lmu.ifi.dbs.distance;
import de.lmu.ifi.dbs.utilities.Util;
/**
* Provides a Distance for a double-valued distance.
*
* @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>)
*/
class DoubleDistance extends AbstractDistance {
/**
* The double value of this distance.
*/
private double value;
/**
* Constructs a new DoubleDistance object that represents the double argument.
* @param value the value to be represented by the DoubleDistance.
*/
public DoubleDistance(double value) {
this.value = value;
}
/**
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
long bits = Double.doubleToLongBits(value);
return (int) (bits ^ (bits >>> 32));
}
/**
* @see de.lmu.ifi.dbs.distance.Distance
*/
public Distance plus(Distance distance) {
DoubleDistance other = (DoubleDistance) distance;
return new DoubleDistance(this.value + other.value);
}
/**
* @see de.lmu.ifi.dbs.distance.Distance
*/
public Distance minus(Distance distance) {
DoubleDistance other = (DoubleDistance) distance;
return new DoubleDistance(this.value - other.value);
}
/**
* @see de.lmu.ifi.dbs.distance.Distance
*/
public String description() {
return "distance";
}
/**
*
* @see java.lang.Comparable#compareTo(Object)
*/
public int compareTo(Distance o) {
DoubleDistance other = (DoubleDistance) o;
return Double.compare(this.value, other.value);
}
/**
* Returns a string representation of this distance.
*
* @return a string representation of this distance.
*/
public String toString() {
return Util.format(value);
}
}
|
package de.podfetcher.service;
import de.podfetcher.storage.DownloadRequester;
import de.podfetcher.feed.*;
import de.podfetcher.R;
import android.content.Context;
import android.app.DownloadManager;
import android.util.Log;
import android.database.Cursor;
import java.util.concurrent.Callable;
import android.os.AsyncTask;
/** Observes the status of a specific Download */
public class DownloadObserver extends AsyncTask<FeedFile, Integer, Boolean> {
private static final String TAG = "DownloadObserver";
/** Types of downloads to observe. */
public static final int TYPE_FEED = 0;
public static final int TYPE_IMAGE = 1;
public static final int TYPE_MEDIA = 2;
/** Error codes */
public static final int ALREADY_DOWNLOADED = 1;
public static final int NO_DOWNLOAD_FOUND = 2;
private final long DEFAULT_WAITING_INTERVALL = 1000L;
private int progressPercent;
private int statusMsg;
private int reason;
private DownloadRequester requester;
private FeedFile feedfile;
private Context context;
public DownloadObserver(Context context) {
super();
this.context = context;
}
protected Boolean doInBackground(FeedFile... files) {
Log.d(TAG, "Background Task started.");
feedfile = files[0];
if (feedfile.getFile_url() == null) {
reason = NO_DOWNLOAD_FOUND;
return Boolean.valueOf(false);
}
if (feedfile.isDownloaded()) {
reason = ALREADY_DOWNLOADED;
return Boolean.valueOf(false);
}
while(true) {
Cursor cursor = getDownloadCursor();
int status = getDownloadStatus(cursor, DownloadManager.COLUMN_STATUS);
int progressPercent = getDownloadProgress(cursor);
switch(status) {
case DownloadManager.STATUS_SUCCESSFUL:
statusMsg = R.string.download_successful;
return Boolean.valueOf(true);
case DownloadManager.STATUS_RUNNING:
statusMsg = R.string.download_running;
break;
case DownloadManager.STATUS_FAILED:
statusMsg = R.string.download_failed;
requester.notifyDownloadService(context);
return Boolean.valueOf(false);
case DownloadManager.STATUS_PENDING:
statusMsg = R.string.download_pending;
break;
}
publishProgress(progressPercent);
try {
Thread.sleep(DEFAULT_WAITING_INTERVALL);
} catch (InterruptedException e) {
Log.w(TAG, "Thread was interrupted while waiting.");
}
}
}
public Cursor getDownloadCursor() {
DownloadManager.Query query = buildQuery(feedfile.getDownloadId());
DownloadManager manager = (DownloadManager) context.getSystemService(Context.DOWNLOAD_SERVICE);
Cursor result = manager.query(query);
return result;
}
public int getDownloadStatus(Cursor c, String column) {
if(c.moveToFirst()) {
int status = c.getInt(c.getColumnIndex(column));
return status;
} else {
return -1;
}
}
private int getDownloadProgress(Cursor c) {
if (c.moveToFirst()) {
long size = c.getLong(c.getColumnIndex(DownloadManager.COLUMN_TOTAL_SIZE_BYTES));
long soFar = c.getLong(c.getColumnIndex(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR));
int progress = (int) (((double) soFar / (double) size) * 100);
Log.d(TAG, "Setting progress to " + progress);
return progress;
} else {
return -1;
}
}
private DownloadManager.Query buildQuery(long id) {
DownloadManager.Query query = new DownloadManager.Query();
query.setFilterById(id);
return query;
}
public int getProgressPercent() {
return progressPercent;
}
public int getStatusMsg() {
return statusMsg;
}
public Context getContext() {
return context;
}
}
|
package de.tub.citydb.db.exporter;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.HashSet;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeConstants;
import javax.xml.datatype.DatatypeFactory;
import oracle.spatial.geometry.JGeometry;
import oracle.sql.STRUCT;
import org.citygml4j.factory.CityGMLFactory;
import org.citygml4j.geometry.BoundingVolume;
import org.citygml4j.geometry.Point;
import org.citygml4j.impl.jaxb.gml._3_1_1.BoundingShapeImpl;
import org.citygml4j.impl.jaxb.gml._3_1_1.EnvelopeImpl;
import org.citygml4j.model.citygml.CityGMLClass;
import org.citygml4j.model.citygml.CityGMLModuleType;
import org.citygml4j.model.citygml.core.CityObject;
import org.citygml4j.model.citygml.core.CoreModule;
import org.citygml4j.model.citygml.core.ExternalObject;
import org.citygml4j.model.citygml.core.ExternalReference;
import org.citygml4j.model.citygml.generics.GenericAttribute;
import org.citygml4j.model.citygml.generics.GenericDateAttribute;
import org.citygml4j.model.citygml.generics.GenericDoubleAttribute;
import org.citygml4j.model.citygml.generics.GenericIntAttribute;
import org.citygml4j.model.citygml.generics.GenericStringAttribute;
import org.citygml4j.model.citygml.generics.GenericUriAttribute;
import org.citygml4j.model.citygml.generics.GenericsModule;
import org.citygml4j.model.gml.BoundingShape;
import org.citygml4j.model.gml.Envelope;
import org.citygml4j.util.CityGMLModules;
import de.tub.citydb.config.Config;
import de.tub.citydb.config.project.filter.Tiling;
import de.tub.citydb.config.project.filter.TilingMode;
import de.tub.citydb.filter.ExportFilter;
import de.tub.citydb.filter.feature.BoundingBoxFilter;
import de.tub.citydb.log.Logger;
import de.tub.citydb.util.Util;
public class DBCityObject implements DBExporter {
private final Logger LOG = Logger.getInstance();
private final DBExporterManager dbExporterManager;
private final CityGMLFactory cityGMLFactory;
private final Config config;
private final Connection connection;
private PreparedStatement psCityObject;
private DBAppearance appearanceExporter;
private DBGeneralization generalizesToExporter;
private String gmlSrsName;
private boolean exportAppearance;
private boolean useInternalBBoxFilter;
private boolean useTiling;
private boolean setTileInfoAsGenericAttribute;
private boolean transformCoords;
private BoundingBoxFilter boundingBoxFilter;
private BoundingVolume activeTile;
private Tiling tiling;
private DatatypeFactory datatypeFactory;
private HashSet<Long> generalizesToSet;
private HashSet<Long> externalReferenceSet;
private HashSet<Long> genericAttributeSet;
public DBCityObject(Connection connection, CityGMLFactory cityGMLFactory, ExportFilter exportFilter, Config config, DBExporterManager dbExporterManager) throws SQLException {
this.dbExporterManager = dbExporterManager;
this.cityGMLFactory = cityGMLFactory;
this.config = config;
this.connection = connection;
this.boundingBoxFilter = exportFilter.getBoundingBoxFilter();
init();
}
private void init() throws SQLException {
exportAppearance = config.getProject().getExporter().getAppearances().isSetExportAppearance();
useInternalBBoxFilter = config.getInternal().isUseInternalBBoxFilter();
tiling = config.getProject().getExporter().getFilter().getComplexFilter().getTiledBoundingBox().getTiling();
useTiling = boundingBoxFilter.isActive() && tiling.getMode() != TilingMode.NO_TILING;
setTileInfoAsGenericAttribute = useTiling && tiling.isIncludeTileAsGenericAttribute();
if (setTileInfoAsGenericAttribute)
activeTile = boundingBoxFilter.getFilterState();
generalizesToSet = new HashSet<Long>();
externalReferenceSet = new HashSet<Long>();
genericAttributeSet = new HashSet<Long>();
transformCoords = config.getInternal().isTransformCoordinates();
if (!transformCoords) {
gmlSrsName = config.getInternal().getOpenConnection().getMetaData().getSrsName();
psCityObject = connection.prepareStatement("select co.GMLID, co.ENVELOPE, co.CREATION_DATE, co.TERMINATION_DATE, ex.ID as EXID, ex.INFOSYS, ex.NAME, ex.URI, " +
"ga.ID as GAID, ga.ATTRNAME, ga.DATATYPE, ga.STRVAL, ga.INTVAL, ga.REALVAL, ga.URIVAL, ga.DATEVAL, ge.GENERALIZES_TO_ID " +
"from CITYOBJECT co left join EXTERNAL_REFERENCE ex on co.ID = ex.CITYOBJECT_ID " +
"left join CITYOBJECT_GENERICATTRIB ga on co.ID = ga.CITYOBJECT_ID " +
"left join GENERALIZATION ge on ge.CITYOBJECT_ID=co.ID where co.ID = ?");
} else {
int srid = config.getInternal().getExportTargetSRS().getSrid();
gmlSrsName = config.getInternal().getExportTargetSRS().getSrsName();
psCityObject = connection.prepareStatement("select co.GMLID, " +
"geodb_util.transform_or_null(co.ENVELOPE, " + srid + ") AS ENVELOPE, " +
"co.CREATION_DATE, co.TERMINATION_DATE, ex.ID as EXID, ex.INFOSYS, ex.NAME, ex.URI, " +
"ga.ID as GAID, ga.ATTRNAME, ga.DATATYPE, ga.STRVAL, ga.INTVAL, ga.REALVAL, ga.URIVAL, ga.DATEVAL, ge.GENERALIZES_TO_ID " +
"from CITYOBJECT co left join EXTERNAL_REFERENCE ex on co.ID = ex.CITYOBJECT_ID " +
"left join CITYOBJECT_GENERICATTRIB ga on co.ID = ga.CITYOBJECT_ID " +
"left join GENERALIZATION ge on ge.CITYOBJECT_ID=co.ID where co.ID = ?");
}
appearanceExporter = (DBAppearance)dbExporterManager.getDBExporter(DBExporterEnum.APPEARANCE);
generalizesToExporter = (DBGeneralization)dbExporterManager.getDBExporter(DBExporterEnum.GENERALIZATION);
try {
datatypeFactory = DatatypeFactory.newInstance();
} catch (DatatypeConfigurationException e) {
}
}
public boolean read(CityObject cityObject, long parentId) throws SQLException {
return read(cityObject, parentId, false);
}
public boolean read(CityObject cityObject, long parentId, boolean isTopLevelObject) throws SQLException {
ResultSet rs = null;
try {
psCityObject.setLong(1, parentId);
rs = psCityObject.executeQuery();
CoreModule core = (CoreModule)cityObject.getCityGMLModule().getModuleDependencies().getModule(CityGMLModuleType.CORE);
GenericsModule gen = (GenericsModule)CityGMLModules.getModuleByTypeAndVersion(CityGMLModuleType.GENERICS, core.getModuleVersion());
if (rs.next()) {
generalizesToSet.clear();
externalReferenceSet.clear();
genericAttributeSet.clear();
// boundedBy
STRUCT struct = (STRUCT)rs.getObject("ENVELOPE");
if (!rs.wasNull() && struct != null) {
JGeometry jGeom = JGeometry.load(struct);
int dim = jGeom.getDimensions();
if (dim == 2 || dim == 3) {
double[] points = jGeom.getOrdinatesArray();
Envelope env = new EnvelopeImpl();
Point lower = null;
Point upper = null;
if (dim == 2) {
lower = new Point(points[0], points[1], 0);
upper = new Point(points[2], points[3], 0);
} else {
lower = new Point(points[0], points[1], points[2]);
upper = new Point(points[3], points[4], points[5]);
}
env.setLowerCorner(lower);
env.setUpperCorner(upper);
env.setSrsDimension(3);
env.setSrsName(gmlSrsName);
BoundingShape boundedBy = new BoundingShapeImpl();
boundedBy.setEnvelope(env);
cityObject.setBoundedBy(boundedBy);
}
}
// check bounding volume filter
if (isTopLevelObject && (useInternalBBoxFilter || useTiling)) {
if (!cityObject.isSetBoundedBy() ||
!cityObject.getBoundedBy().isSetEnvelope() ||
boundingBoxFilter.filter(cityObject.getBoundedBy().getEnvelope()))
return false;
}
String gmlId = rs.getString("GMLID");
if (gmlId != null)
cityObject.setId(gmlId);
// creationDate
Date creationDate = rs.getDate("CREATION_DATE");
if (creationDate != null) {
GregorianCalendar gregDate = new GregorianCalendar();
gregDate.setTime(creationDate);
if (datatypeFactory != null)
cityObject.setCreationDate(datatypeFactory.newXMLGregorianCalendarDate(
gregDate.get(Calendar.YEAR),
gregDate.get(Calendar.MONTH) + 1,
gregDate.get(Calendar.DAY_OF_MONTH),
DatatypeConstants.FIELD_UNDEFINED));
else
LOG.error(Util.getFeatureSignature(cityObject.getCityGMLClass(), cityObject.getId()) +
": Failed to write attribute 'creationDate' due to an internal error.");
}
// terminationDate
Date terminationDate = rs.getDate("TERMINATION_DATE");
if (terminationDate != null) {
GregorianCalendar gregDate = new GregorianCalendar();
gregDate.setTime(terminationDate);
if (datatypeFactory != null)
cityObject.setTerminationDate(datatypeFactory.newXMLGregorianCalendarDate(
gregDate.get(Calendar.YEAR),
gregDate.get(Calendar.MONTH) + 1,
gregDate.get(Calendar.DAY_OF_MONTH),
DatatypeConstants.FIELD_UNDEFINED));
else
LOG.error(Util.getFeatureSignature(cityObject.getCityGMLClass(), cityObject.getId()) +
": Failed to write attribute 'terminationDate' due to an internal error.");
}
do {
// generalizesTo
long generalizesTo = rs.getLong("GENERALIZES_TO_ID");
if (!rs.wasNull())
generalizesToSet.add(generalizesTo);
// externalReference
long externalReferenceId = rs.getLong("EXID");
if (!rs.wasNull() && !externalReferenceSet.contains(externalReferenceId)) {
externalReferenceSet.add(externalReferenceId);
ExternalReference externalReference = cityGMLFactory.createExternalReference(core);
ExternalObject externalObject = cityGMLFactory.createExternalObject(core);
String infoSys = rs.getString("INFOSYS");
if (infoSys != null)
externalReference.setInformationSystem(infoSys);
String name = rs.getString("NAME");
String uri = rs.getString("URI");
if (name != null || uri != null) {
if (name != null)
externalObject.setName(name);
if (uri != null)
externalObject.setUri(uri);
} else if (name == null && uri == null) {
externalObject.setUri("");
}
externalReference.setExternalObject(externalObject);
cityObject.addExternalReference(externalReference);
}
// generic attributes
long genericAttribId = rs.getLong("GAID");
if (!rs.wasNull() && !genericAttributeSet.contains(genericAttribId)) {
genericAttributeSet.add(genericAttribId);
GenericAttribute genericAttrib = null;
String attrName = rs.getString("ATTRNAME");
int dataType = rs.getInt("DATATYPE");
switch (dataType) {
case 1:
String strVal = rs.getString("STRVAL");
if (strVal != null) {
genericAttrib = cityGMLFactory.createGenericStringAttribute(gen);
((GenericStringAttribute)genericAttrib).setValue(strVal);
}
break;
case 2:
Integer intVal = rs.getInt("INTVAL");
if (!rs.wasNull() && intVal != null) {
genericAttrib = cityGMLFactory.createGenericIntAttribute(gen);
((GenericIntAttribute)genericAttrib).setValue(intVal);
}
break;
case 3:
Double realVal = rs.getDouble("REALVAL");
if (!rs.wasNull() && realVal != null) {
genericAttrib = cityGMLFactory.createGenericDoubleAttribute(gen);
((GenericDoubleAttribute)genericAttrib).setValue(realVal);
}
break;
case 4:
String uriVal = rs.getString("URIVAL");
if (uriVal != null) {
genericAttrib = cityGMLFactory.createGenericUriAttribute(gen);
((GenericUriAttribute)genericAttrib).setValue(uriVal);
}
break;
case 5:
Date dateVal = rs.getDate("DATEVAL");
if (dateVal != null) {
genericAttrib = cityGMLFactory.createGenericDateAttribute(gen);
GregorianCalendar gregDate = new GregorianCalendar();
gregDate.setTime(dateVal);
if (datatypeFactory != null)
((GenericDateAttribute)genericAttrib).setValue(datatypeFactory.newXMLGregorianCalendarDate(
gregDate.get(Calendar.YEAR),
gregDate.get(Calendar.MONTH) + 1,
gregDate.get(Calendar.DAY_OF_MONTH),
DatatypeConstants.FIELD_UNDEFINED));
else
LOG.error(Util.getFeatureSignature(cityObject.getCityGMLClass(), cityObject.getId()) +
": Failed to write generic dateAttribute '" + genericAttrib.getName() + "' due to an internal error.");
}
break;
}
if (genericAttrib != null) {
genericAttrib.setName(attrName);
cityObject.addGenericAttribute(genericAttrib);
}
}
} while (rs.next());
if (isTopLevelObject && setTileInfoAsGenericAttribute) {
String value;
double minX = activeTile.getLowerCorner().getX();
double minY = activeTile.getLowerCorner().getY();
double maxX = activeTile.getUpperCorner().getX();
double maxY = activeTile.getUpperCorner().getY();
switch (tiling.getGenericAttributeValue()) {
case XMIN_YMIN:
value = String.valueOf(minX) + ' ' + String.valueOf(minY);
break;
case XMAX_YMIN:
value = String.valueOf(maxX) + ' ' + String.valueOf(minY);
break;
case XMIN_YMAX:
value = String.valueOf(minX) + ' ' + String.valueOf(maxY);
break;
case XMAX_YMAX:
value = String.valueOf(maxX) + ' ' + String.valueOf(maxY);
break;
case XMIN_YMIN_XMAX_YMAX:
value = String.valueOf(minX) + ' ' + String.valueOf(minY) + ' ' + String.valueOf(maxX) + ' ' + String.valueOf(maxY);
break;
default:
value = String.valueOf(boundingBoxFilter.getTileRow()) + ' ' + String.valueOf(boundingBoxFilter.getTileColumn());
}
GenericStringAttribute genericStringAttrib = cityGMLFactory.createGenericStringAttribute(gen);
genericStringAttrib.setName("TILE");
genericStringAttrib.setValue(value);
cityObject.addGenericAttribute(genericStringAttrib);
}
// generalizesTo relation
if (!generalizesToSet.isEmpty())
generalizesToExporter.read(cityObject, parentId, core, generalizesToSet);
// get appearance information associated with the cityobject
if (exportAppearance)
appearanceExporter.read(cityObject, parentId);
if (cityObject.getCityGMLClass() != CityGMLClass.CITYOBJECTGROUP)
dbExporterManager.updateFeatureCounter(cityObject.getCityGMLClass());
}
return true;
} finally {
if (rs != null)
rs.close();
}
}
@Override
public void close() throws SQLException {
psCityObject.close();
}
@Override
public DBExporterEnum getDBExporterType() {
return DBExporterEnum.CITYOBJECT;
}
}
|
package dr.app.beauti.generator;
import dr.app.beast.BeastVersion;
import dr.app.beauti.BeautiFrame;
import dr.app.beauti.components.ComponentFactory;
import dr.app.beauti.components.marginalLikelihoodEstimation.MarginalLikelihoodEstimationOptions;
import dr.app.beauti.options.*;
import dr.app.beauti.types.*;
import dr.app.beauti.util.XMLWriter;
import dr.app.util.Arguments;
import dr.evolution.alignment.Alignment;
import dr.evolution.alignment.Patterns;
import dr.evolution.datatype.DataType;
import dr.evolution.datatype.Microsatellite;
import dr.evolution.util.Taxa;
import dr.evolution.util.Taxon;
import dr.evolution.util.TaxonList;
import dr.evolution.util.Units;
import dr.evomodelxml.speciation.MultiSpeciesCoalescentParser;
import dr.evomodelxml.speciation.SpeciationLikelihoodParser;
import dr.evoxml.AlignmentParser;
import dr.evoxml.DateParser;
import dr.evoxml.TaxaParser;
import dr.evoxml.TaxonParser;
import dr.inferencexml.distribution.MixedDistributionLikelihoodParser;
import dr.inferencexml.model.CompoundLikelihoodParser;
import dr.inferencexml.model.CompoundParameterParser;
import dr.inferencexml.operators.SimpleOperatorScheduleParser;
import dr.util.Attribute;
import dr.util.Version;
import dr.xml.AttributeParser;
import dr.xml.XMLParser;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This class holds all the data for the current BEAUti Document
*
* @author Andrew Rambaut
* @author Alexei Drummond
* @author Walter Xie
* @version $Id: BeastGenerator.java,v 1.4 2006/09/05 13:29:34 rambaut Exp $
*/
public class BeastGenerator extends Generator {
private final static Version version = new BeastVersion();
private static final String MESSAGE_CAL_YULE = "Calibrated Yule requires 1 calibrated internal node \n" +
"with a proper prior and monophyly enforced for each tree.";
private final String MESSAGE_CAL = "\nas another element (taxon, sequence, taxon set, species, etc.):\nAll ids should be unique.";
private final AlignmentGenerator alignmentGenerator;
private final PatternListGenerator patternListGenerator;
private final TreePriorGenerator treePriorGenerator;
private final TreeLikelihoodGenerator treeLikelihoodGenerator;
private final SubstitutionModelGenerator substitutionModelGenerator;
private final InitialTreeGenerator initialTreeGenerator;
private final TreeModelGenerator treeModelGenerator;
private final BranchRatesModelGenerator branchRatesModelGenerator;
private final OperatorsGenerator operatorsGenerator;
private final ParameterPriorGenerator parameterPriorGenerator;
private final LogGenerator logGenerator;
// private final DiscreteTraitGenerator discreteTraitGenerator;
private final STARBEASTGenerator starBeastGenerator;
private final TMRCAStatisticsGenerator tmrcaStatisticsGenerator;
public BeastGenerator(BeautiOptions options, ComponentFactory[] components) {
super(options, components);
alignmentGenerator = new AlignmentGenerator(options, components);
patternListGenerator = new PatternListGenerator(options, components);
tmrcaStatisticsGenerator = new TMRCAStatisticsGenerator(options, components);
substitutionModelGenerator = new SubstitutionModelGenerator(options, components);
treePriorGenerator = new TreePriorGenerator(options, components);
treeLikelihoodGenerator = new TreeLikelihoodGenerator(options, components);
initialTreeGenerator = new InitialTreeGenerator(options, components);
treeModelGenerator = new TreeModelGenerator(options, components);
branchRatesModelGenerator = new BranchRatesModelGenerator(options, components);
operatorsGenerator = new OperatorsGenerator(options, components);
parameterPriorGenerator = new ParameterPriorGenerator(options, components);
logGenerator = new LogGenerator(options, components);
// this has moved into the component system...
// discreteTraitGenerator = new DiscreteTraitGenerator(options, components);
starBeastGenerator = new STARBEASTGenerator(options, components);
}
public void checkOptions() throws GeneratorException {
//++++++++++++++ Microsatellite +++++++++++++++
// this has to execute before all checking below
// mask all ? from microsatellite data for whose tree only has 1 data partition
try{
if (options.contains(Microsatellite.INSTANCE)) {
// clear all masks
for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
partitionPattern.getPatterns().clearMask();
}
// set mask
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
// if a tree only has 1 data partition, which mostly mean unlinked trees
if (options.getDataPartitions(model).size() == 1) {
PartitionPattern partition = (PartitionPattern) options.getDataPartitions(model).get(0);
Patterns patterns = partition.getPatterns();
for (int i = 0; i < patterns.getTaxonCount(); i++) {
int state = patterns.getPatternState(i, 0);
// mask ? from data
if (state < 0) {
patterns.addMask(i);
}
}
// System.out.println("mask set = " + patterns.getMaskSet() + " in partition " + partition.getName());
}
}
}
} catch (Exception e) {
throw new GeneratorException(e.getMessage());
}
//++++++++++++++++ Taxon List ++++++++++++++++++
TaxonList taxonList = options.taxonList;
Set<String> ids = new HashSet<String>();
ids.add(TaxaParser.TAXA);
ids.add(AlignmentParser.ALIGNMENT);
ids.add(TraitData.TRAIT_SPECIES);
if (taxonList != null) {
if (taxonList.getTaxonCount() < 2) {
throw new GeneratorException("BEAST requires at least two taxa to run.");
}
for (int i = 0; i < taxonList.getTaxonCount(); i++) {
Taxon taxon = taxonList.getTaxon(i);
if (ids.contains(taxon.getId())) {
throw new GeneratorException("A taxon has the same id," + taxon.getId() + MESSAGE_CAL);
}
ids.add(taxon.getId());
}
}
//++++++++++++++++ Taxon Sets ++++++++++++++++++
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
// should be only 1 calibrated internal node with a proper prior and monophyletic for each tree at moment
if (model.getPartitionTreePrior().getNodeHeightPrior() == TreePriorType.YULE_CALIBRATION) {
if (options.treeModelOptions.isNodeCalibrated(model) < 0) // invalid node calibration
throw new GeneratorException(MESSAGE_CAL_YULE);
if (options.treeModelOptions.isNodeCalibrated(model) > 0) { // internal node calibration
List taxonSetsList = options.getKeysFromValue(options.taxonSetsTreeModel, model);
if (taxonSetsList.size() != 1 || !options.taxonSetsMono.get(taxonSetsList.get(0))) { // 1 tmrca per tree && monophyletic
throw new GeneratorException(MESSAGE_CAL_YULE, BeautiFrame.TAXON_SETS);
}
}
}
}
for (Taxa taxa : options.taxonSets) {
// AR - we should allow single taxon taxon sets...
if (taxa.getTaxonCount() < 1 // && !options.taxonSetsIncludeStem.get(taxa)
) {
throw new GeneratorException(
"Taxon set, " + taxa.getId() + ", should contain \n" +
"at least one taxa. Please go back to Taxon Sets \n" +
"panel to correct this.", BeautiFrame.TAXON_SETS);
}
if (ids.contains(taxa.getId())) {
throw new GeneratorException("A taxon set has the same id," + taxa.getId() +
MESSAGE_CAL, BeautiFrame.TAXON_SETS);
}
ids.add(taxa.getId());
}
//++++++++++++++++ *BEAST ++++++++++++++++++
if (options.useStarBEAST) {
if (!options.traitExists(TraitData.TRAIT_SPECIES))
throw new GeneratorException("A trait labelled \"species\" is required for *BEAST species designations." +
"\nPlease create or import the species designations in the Traits table.", BeautiFrame.TRAITS);
//++++++++++++++++ Species Sets ++++++++++++++++++
// should be only 1 calibrated internal node with monophyletic at moment
if (options.getPartitionTreePriors().get(0).getNodeHeightPrior() == TreePriorType.SPECIES_YULE_CALIBRATION) {
if (options.speciesSets.size() != 1 || !options.speciesSetsMono.get(options.speciesSets.get(0))) {
throw new GeneratorException(MESSAGE_CAL_YULE, BeautiFrame.TAXON_SETS);
}
}
for (Taxa species : options.speciesSets) {
if (species.getTaxonCount() < 2) {
throw new GeneratorException("Species set, " + species.getId() + ",\n should contain" +
"at least two species. \nPlease go back to Species Sets panel to select included species.", BeautiFrame.TAXON_SETS);
}
if (ids.contains(species.getId())) {
throw new GeneratorException("A species set has the same id," + species.getId() +
MESSAGE_CAL, BeautiFrame.TAXON_SETS);
}
ids.add(species.getId());
}
int tId = options.starBEASTOptions.getEmptySpeciesIndex();
if (tId >= 0) {
throw new GeneratorException("The taxon " + options.taxonList.getTaxonId(tId) +
" has NULL value for \"species\" trait", BeautiFrame.TRAITS);
}
}
//++++++++++++++++ Traits ++++++++++++++++++
// missing data is not necessarily an issue...
// for (TraitData trait : options.traits) {
// for (int i = 0; i < trait.getTaxaCount(); i++) {
//// System.out.println("Taxon " + trait.getTaxon(i).getId() + " : [" + trait.getTaxon(i).getAttribute(trait.getName()) + "]");
// if (!trait.hasValue(i))
// " has no value for Trait " + trait.getName());
//++++++++++++++++ Tree Prior ++++++++++++++++++
// if (options.isShareSameTreePrior()) {
if (options.getPartitionTreeModels().size() > 1) { //TODO not allowed multi-prior yet
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
if (prior.getNodeHeightPrior() == TreePriorType.GMRF_SKYRIDE) {
throw new GeneratorException("For the Skyride, tree model/tree prior combination not implemented by BEAST." +
"\nThe Skyride is only available for a single tree model partition in this release.", BeautiFrame.TREES);
}
}
}
//+++++++++++++++ Starting tree ++++++++++++++++
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
if (model.getStartingTreeType() == StartingTreeType.USER) {
if (model.getUserStartingTree() == null) {
throw new GeneratorException("Please select a starting tree in " + BeautiFrame.TREES + " panel, " +
"\nwhen choosing user specified starting tree option.", BeautiFrame.TREES);
}
}
}
//++++++++++++++++ Random local clock model validation ++++++++++++++++++
for (PartitionClockModel model : options.getPartitionClockModels()) {
// 1 random local clock CANNOT have different tree models
if (model.getClockType() == ClockType.RANDOM_LOCAL_CLOCK) { // || AUTOCORRELATED_LOGNORMAL
PartitionTreeModel treeModel = null;
for (AbstractPartitionData pd : options.getDataPartitions(model)) { // only the PDs linked to this tree model
if (treeModel != null && treeModel != pd.getPartitionTreeModel()) {
throw new GeneratorException("A single random local clock cannot be applied to multiple trees.", BeautiFrame.CLOCK_MODELS);
}
treeModel = pd.getPartitionTreeModel();
}
}
}
//++++++++++++++++ Tree Model ++++++++++++++++++
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
int numOfTaxa = -1;
for (AbstractPartitionData pd : options.getDataPartitions(model)) {
if (pd.getTaxonCount() > 0) {
if (numOfTaxa > 0) {
if (numOfTaxa != pd.getTaxonCount()) {
throw new GeneratorException("Partitions with different taxa cannot share the same tree.", BeautiFrame.DATA_PARTITIONS);
}
} else {
numOfTaxa = pd.getTaxonCount();
}
}
}
}
//++++++++++++++++ Prior Bounds ++++++++++++++++++
for (Parameter param : options.selectParameters()) {
if (param.initial != Double.NaN) {
if (param.isTruncated && (param.initial < param.truncationLower || param.initial > param.truncationUpper)) {
throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " is NOT in the range [" + param.truncationLower + ", " + param.truncationUpper + "]," +
"\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
} else if (param.priorType == PriorType.UNIFORM_PRIOR && (param.initial < param.uniformLower || param.initial > param.uniformUpper)) {
throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " is NOT in the range [" + param.uniformLower + ", " + param.uniformUpper + "]," +
"\nor this range is wrong. Please check the Prior panel.", BeautiFrame.PRIORS);
}
if (param.isNonNegative && param.initial < 0.0) {
throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " should be non-negative. Please check the Prior panel.", BeautiFrame.PRIORS);
}
if (param.isZeroOne && (param.initial < 0.0 || param.initial > 1.0)) {
throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
"\ninitial value " + param.initial + " should lie in the interval [0, 1]. Please check the Prior panel.", BeautiFrame.PRIORS);
}
}
}
MarginalLikelihoodEstimationOptions mleOptions = (MarginalLikelihoodEstimationOptions)options.getComponentOptions(MarginalLikelihoodEstimationOptions.class);
if (mleOptions.performMLE) {
for (Parameter param : options.selectParameters()) {
if (param.isPriorImproper() || param.priorType == PriorType.ONE_OVER_X_PRIOR) {
throw new GeneratorException("Parameter \"" + param.getName() + "\":" +
"\nhas an improper prior and will not sample correctly when estimating " +
"the marginal likelihood. " +
"\nPlease check the Prior panel.", BeautiFrame.PRIORS);
}
}
}
// add other tests and warnings here
// Speciation model with dated tips
// Sampling rates without dated tips or priors on rate or nodes
}
/**
* Generate a beast xml file from these beast options
*
* @param file File
* @throws java.io.IOException IOException
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
public void generateXML(File file) throws GeneratorException, IOException, Arguments.ArgumentException {
XMLWriter writer = new XMLWriter(new BufferedWriter(new FileWriter(file)));
writer.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>");
writer.writeComment("Generated by BEAUTi " + version.getVersionString(),
" by Alexei J. Drummond, Andrew Rambaut and Marc A. Suchard",
" Department of Computer Science, University of Auckland and",
" Institute of Evolutionary Biology, University of Edinburgh",
" David Geffen School of Medicine, University of California, Los Angeles",
" http://beast.bio.ed.ac.uk/");
writer.writeOpenTag("beast");
writer.writeText("");
// this gives any added implementations of the 'Component' interface a
// chance to generate XML at this point in the BEAST file.
generateInsertionPoint(ComponentGenerator.InsertionPoint.BEFORE_TAXA, writer);
if (options.originDate != null) {
// Create a dummy taxon whose job is to specify the origin date
Taxon originTaxon = new Taxon("originTaxon");
options.originDate.setUnits(options.units);
originTaxon.setDate(options.originDate);
writeTaxon(originTaxon, true, false, writer);
}
//++++++++++++++++ Taxon List ++++++++++++++++++
try {
// write complete taxon list
writeTaxa(options.taxonList, writer);
writer.writeText("");
if (!options.hasIdenticalTaxa()) {
// write all taxa in each gene tree regarding each data partition,
for (AbstractPartitionData partition : options.dataPartitions) {
if (partition.getTaxonList() != null) {
writeDifferentTaxa(partition, writer);
}
}
} else {
// microsat
for (PartitionPattern partitionPattern : options.getPartitionPattern()) {
if (partitionPattern.getTaxonList() != null && partitionPattern.getPatterns().hasMask()) {
writeDifferentTaxa(partitionPattern, writer);
}
}
}
} catch (Exception e) {
System.err.println(e);
throw new GeneratorException("Taxon list generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Taxon Sets ++++++++++++++++++
List<Taxa> taxonSets = options.taxonSets;
try {
if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, taxonSets);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Taxon sets generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TAXA, writer);
//++++++++++++++++ Alignments ++++++++++++++++++
List<Alignment> alignments = new ArrayList<Alignment>();
try {
for (AbstractPartitionData partition : options.dataPartitions) {
Alignment alignment = null;
if (partition instanceof PartitionData) { // microsat has no alignment
alignment = ((PartitionData) partition).getAlignment();
}
if (alignment != null && !alignments.contains(alignment)) {
alignments.add(alignment);
}
}
if (alignments.size() > 0) {
alignmentGenerator.writeAlignments(alignments, writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SEQUENCES, writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Alignments generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Pattern Lists ++++++++++++++++++
try {
if (!options.samplePriorOnly) {
List<Microsatellite> microsatList = new ArrayList<Microsatellite>();
for (AbstractPartitionData partition : options.dataPartitions) { // Each PD has one TreeLikelihood
if (partition.getTaxonList() != null) {
switch (partition.getDataType().getType()) {
case DataType.NUCLEOTIDES:
case DataType.AMINO_ACIDS:
case DataType.CODONS:
case DataType.COVARION:
case DataType.TWO_STATES:
patternListGenerator.writePatternList((PartitionData) partition, writer);
break;
case DataType.GENERAL:
case DataType.CONTINUOUS:
// no patternlist for trait data - discrete (general) data type uses an
// attribute patterns which is generated next bit of this method.
break;
case DataType.MICRO_SAT:
// microsat does not have alignment
patternListGenerator.writePatternList((PartitionPattern) partition, microsatList, writer);
break;
default:
throw new IllegalArgumentException("Unsupported data type");
}
writer.writeText("");
}
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Pattern lists generation has failed:\n" + e.getMessage());
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_PATTERNS, writer);
//++++++++++++++++ Tree Prior Model ++++++++++++++++++
try {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeTreePriorModel(prior, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Starting Tree ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
initialTreeGenerator.writeStartingTree(model, writer);
writer.writeText("");
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Starting tree generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Tree Model +++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treeModelGenerator.writeTreeModel(model, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Statistics ++++++++++++++++++
try {
if (taxonSets != null && taxonSets.size() > 0 && !options.useStarBEAST) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("TMRCA statistics generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Tree Prior Likelihood ++++++++++++++++++
try {
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
treePriorGenerator.writePriorLikelihood(model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeMultiLociTreePriors(prior, writer);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_PRIOR, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree prior likelihood generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Branch Rates Model ++++++++++++++++++
try {
for (PartitionClockModel model : options.getPartitionClockModels()) {
branchRatesModelGenerator.writeBranchRatesModel(model, writer);
writer.writeText("");
}
// write allClockRate for fix mean option in clock model panel
for (ClockModelGroup clockModelGroup : options.clockModelOptions.getClockModelGroups()) {
if (clockModelGroup.getRateTypeOption() == FixRateType.FIX_MEAN) {
writer.writeOpenTag(CompoundParameterParser.COMPOUND_PARAMETER,
new Attribute[]{new Attribute.Default<String>(XMLParser.ID, clockModelGroup.getName())});
for (PartitionClockModel model : options.getPartitionClockModels(clockModelGroup)) {
branchRatesModelGenerator.writeAllClockRateRefs(model, writer);
}
writer.writeCloseTag(CompoundParameterParser.COMPOUND_PARAMETER);
writer.writeText("");
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Branch rates model generation is failed:\n" + e.getMessage());
}
//++++++++++++++++ Substitution Model & Site Model ++++++++++++++++++
try {
for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
substitutionModelGenerator.writeSubstitutionSiteModel(model, writer);
substitutionModelGenerator.writeAllMus(model, writer); // allMus
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SUBSTITUTION_MODEL, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Substitution model or site model generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Site Model ++++++++++++++++++
// for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) {
// substitutionModelGenerator.writeSiteModel(model, writer); // site model
// substitutionModelGenerator.writeAllMus(model, writer); // allMus
// writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SITE_MODEL, writer);
//++++++++++++++++ Tree Likelihood ++++++++++++++++++
try {
for (AbstractPartitionData partition : options.dataPartitions) {
// generate tree likelihoods for alignment data partitions
if (partition.getTaxonList() != null) {
if (partition instanceof PartitionData) {
if (partition.getDataType().getType() != DataType.GENERAL &&
partition.getDataType().getType() != DataType.CONTINUOUS) {
treeLikelihoodGenerator.writeTreeLikelihood((PartitionData) partition, writer);
writer.writeText("");
}
} else if (partition instanceof PartitionPattern) { // microsat
treeLikelihoodGenerator.writeTreeLikelihood((PartitionPattern) partition, writer);
writer.writeText("");
} else {
throw new GeneratorException("Find unrecognized partition:\n" + partition.getName());
}
}
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREE_LIKELIHOOD, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Tree likelihood generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ *BEAST ++++++++++++++++++
if (options.useStarBEAST) {
//++++++++++++++++ species ++++++++++++++++++
try {
starBeastGenerator.writeSpecies(writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST species section generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Species Sets ++++++++++++++++++
List<Taxa> speciesSets = options.speciesSets;
try {
if (speciesSets != null && speciesSets.size() > 0) {
tmrcaStatisticsGenerator.writeTaxonSets(writer, speciesSets);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Species sets generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ trees ++++++++++++++++++
try {
if (speciesSets != null && speciesSets.size() > 0) {
starBeastGenerator.writeStartingTreeForCalibration(writer);
}
starBeastGenerator.writeSpeciesTree(writer, speciesSets != null && speciesSets.size() > 0);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST trees generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ Statistics ++++++++++++++++++
try {
if (speciesSets != null && speciesSets.size() > 0) {
tmrcaStatisticsGenerator.writeTMRCAStatistics(writer);
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST TMRCA statistics generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ prior and likelihood ++++++++++++++++++
try {
starBeastGenerator.writeSTARBEAST(writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("*BEAST trees section generation has failed:\n" + e.getMessage());
}
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TRAITS, writer);
//++++++++++++++++ Operators ++++++++++++++++++
try {
List<Operator> operators = options.selectOperators();
operatorsGenerator.writeOperatorSchedule(operators, writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_OPERATORS, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("Operators generation has failed:\n" + e.getMessage());
}
//++++++++++++++++ MCMC ++++++++++++++++++
try {
// XMLWriter writer, List<PartitionSubstitutionModel> models,
writeMCMC(writer);
writer.writeText("");
generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_MCMC, writer);
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("MCMC or log generation has failed:\n" + e.getMessage());
}
try {
writeTimerReport(writer);
writer.writeText("");
if (options.performTraceAnalysis) {
writeTraceAnalysis(writer);
}
if (options.generateCSV) {
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeEBSPAnalysisToCSVfile(prior, writer);
}
}
} catch (Exception e) {
e.printStackTrace();
throw new GeneratorException("The last part of XML generation has failed:\n" + e.getMessage());
}
writer.writeCloseTag("beast");
writer.flush();
writer.close();
}
/**
* Generate a taxa block from these beast options
*
* @param writer the writer
* @param taxonList the taxon list to write
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
private void writeTaxa(TaxonList taxonList, XMLWriter writer) throws Arguments.ArgumentException {
// -1 (single taxa), 0 (1st gene of multi-taxa)
writer.writeComment("The list of taxa to be analysed (can also include dates/ages).",
"ntax=" + taxonList.getTaxonCount());
writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, TaxaParser.TAXA)});
boolean hasAttr = options.traits.size() > 0;
boolean firstDate = true;
for (int i = 0; i < taxonList.getTaxonCount(); i++) {
Taxon taxon = taxonList.getTaxon(i);
boolean hasDate = false;
if (options.clockModelOptions.isTipCalibrated()) {
hasDate = TaxonList.Utils.hasAttribute(taxonList, i, dr.evolution.util.Date.DATE);
}
if (hasDate) {
dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute(dr.evolution.util.Date.DATE);
if (firstDate) {
options.units = date.getUnits();
firstDate = false;
} else {
if (options.units != date.getUnits()) {
System.err.println("Error: Units in dates do not match.");
}
}
}
writeTaxon(taxon, hasDate, hasAttr, writer);
}
writer.writeCloseTag(TaxaParser.TAXA);
}
/**
* Generate a taxa block from these beast options
*
* @param writer the writer
* @param taxon the taxon to write
* @throws dr.app.util.Arguments.ArgumentException
* ArgumentException
*/
private void writeTaxon(Taxon taxon, boolean hasDate, boolean hasAttr, XMLWriter writer) throws Arguments.ArgumentException {
writer.writeTag(TaxonParser.TAXON, new Attribute[]{
new Attribute.Default<String>(XMLParser.ID, taxon.getId())},
!(hasDate || hasAttr)); // false if any of hasDate or hasAttr is true
if (hasDate) {
dr.evolution.util.Date date = (dr.evolution.util.Date) taxon.getAttribute(dr.evolution.util.Date.DATE);
Attribute[] attributes;
if (date.getPrecision() > 0.0) {
attributes = new Attribute[] {
new Attribute.Default<Double>(DateParser.VALUE, date.getTimeValue()),
new Attribute.Default<String>(DateParser.DIRECTION, date.isBackwards() ? DateParser.BACKWARDS : DateParser.FORWARDS),
new Attribute.Default<String>(DateParser.UNITS, Units.Utils.getDefaultUnitName(options.units)),
new Attribute.Default<Double>(DateParser.PRECISION, date.getPrecision())
};
} else {
attributes = new Attribute[] {
new Attribute.Default<Double>(DateParser.VALUE, date.getTimeValue()),
new Attribute.Default<String>(DateParser.DIRECTION, date.isBackwards() ? DateParser.BACKWARDS : DateParser.FORWARDS),
new Attribute.Default<String>(DateParser.UNITS, Units.Utils.getDefaultUnitName(options.units))
//new Attribute.Default("origin", date.getOrigin()+"")
};
}
writer.writeTag(dr.evolution.util.Date.DATE, attributes, true);
}
for (TraitData trait : options.traits) {
// there is no harm in allowing the species trait to be listed in the taxa
// if (!trait.getName().equalsIgnoreCase(TraitData.TRAIT_SPECIES)) {
writer.writeOpenTag(AttributeParser.ATTRIBUTE, new Attribute[]{
new Attribute.Default<String>(Attribute.NAME, trait.getName())});
// denotes missing data using '?'
writer.writeText(taxon.containsAttribute(trait.getName()) ? taxon.getAttribute(trait.getName()).toString() : "?");
writer.writeCloseTag(AttributeParser.ATTRIBUTE);
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TAXON, taxon, writer);
if (hasDate || hasAttr) writer.writeCloseTag(TaxonParser.TAXON);
}
public void writeDifferentTaxa(AbstractPartitionData dataPartition, XMLWriter writer) {
TaxonList taxonList = dataPartition.getTaxonList();
String name = dataPartition.getName();
writer.writeComment("gene name = " + name + ", ntax= " + taxonList.getTaxonCount());
writer.writeOpenTag(TaxaParser.TAXA, new Attribute[]{new Attribute.Default<String>(XMLParser.ID, name + "." + TaxaParser.TAXA)});
for (int i = 0; i < taxonList.getTaxonCount(); i++) {
if ( !(dataPartition instanceof PartitionPattern && ((PartitionPattern) dataPartition).getPatterns().isMasked(i) ) ) {
final Taxon taxon = taxonList.getTaxon(i);
writer.writeIDref(TaxonParser.TAXON, taxon.getId());
}
}
writer.writeCloseTag(TaxaParser.TAXA);
}
/**
* Write the timer report block.
*
* @param writer the writer
*/
public void writeTimerReport(XMLWriter writer) {
writer.writeOpenTag("report");
writer.writeOpenTag("property", new Attribute.Default<String>("name", "timer"));
writer.writeIDref("mcmc", "mcmc");
writer.writeCloseTag("property");
writer.writeCloseTag("report");
}
/**
* Write the trace analysis block.
*
* @param writer the writer
*/
public void writeTraceAnalysis(XMLWriter writer) {
writer.writeTag(
"traceAnalysis",
new Attribute[]{
new Attribute.Default<String>("fileName", options.logFileName)
},
true
);
}
/**
* Write the MCMC block.
*
* @param writer XMLWriter
*/
public void writeMCMC(XMLWriter writer) {
writer.writeComment("Define MCMC");
List<Attribute> attributes = new ArrayList<Attribute>();
attributes.add(new Attribute.Default<String>(XMLParser.ID, "mcmc"));
attributes.add(new Attribute.Default<Integer>("chainLength", options.chainLength));
attributes.add(new Attribute.Default<String>("autoOptimize", options.autoOptimize ? "true" : "false"));
if (options.operatorAnalysis) {
attributes.add(new Attribute.Default<String>("operatorAnalysis", options.operatorAnalysisFileName));
}
writer.writeOpenTag("mcmc", attributes);
if (options.hasData()) {
writer.writeOpenTag(CompoundLikelihoodParser.POSTERIOR, new Attribute.Default<String>(XMLParser.ID, "posterior"));
}
// write prior block
writer.writeOpenTag(CompoundLikelihoodParser.PRIOR, new Attribute.Default<String>(XMLParser.ID, "prior"));
if (options.useStarBEAST) { // species
// coalescent prior
writer.writeIDref(MultiSpeciesCoalescentParser.SPECIES_COALESCENT, TraitData.TRAIT_SPECIES + "." + COALESCENT);
// prior on population sizes
// if (options.speciesTreePrior == TreePriorType.SPECIES_YULE) {
writer.writeIDref(MixedDistributionLikelihoodParser.DISTRIBUTION_LIKELIHOOD, SPOPS);
// } else {
// writer.writeIDref(SpeciesTreeBMPrior.STPRIOR, STP);
// prior on species tree
writer.writeIDref(SpeciationLikelihoodParser.SPECIATION_LIKELIHOOD, SPECIATION_LIKE);
}
parameterPriorGenerator.writeParameterPriors(writer, options.useStarBEAST);
for (PartitionTreeModel model : options.getPartitionTreeModels()) {
PartitionTreePrior prior = model.getPartitionTreePrior();
treePriorGenerator.writePriorLikelihoodReference(prior, model, writer);
writer.writeText("");
}
for (PartitionTreePrior prior : options.getPartitionTreePriors()) {
treePriorGenerator.writeMultiLociLikelihoodReference(prior, writer);
writer.writeText("");
}
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_PRIOR, writer);
writer.writeCloseTag(CompoundLikelihoodParser.PRIOR);
if (options.hasData()) {
// write likelihood block
writer.writeOpenTag(CompoundLikelihoodParser.LIKELIHOOD, new Attribute.Default<String>(XMLParser.ID, "likelihood"));
treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer);
branchRatesModelGenerator.writeClockLikelihoodReferences(writer);
generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_MCMC_LIKELIHOOD, writer);
writer.writeCloseTag(CompoundLikelihoodParser.LIKELIHOOD);
writer.writeCloseTag(CompoundLikelihoodParser.POSTERIOR);
}
writer.writeIDref(SimpleOperatorScheduleParser.OPERATOR_SCHEDULE, "operators");
// write log to screen
logGenerator.writeLogToScreen(writer, branchRatesModelGenerator, substitutionModelGenerator);
// write log to file
logGenerator.writeLogToFile(writer, treePriorGenerator, branchRatesModelGenerator,
substitutionModelGenerator, treeLikelihoodGenerator);
// write tree log to file
logGenerator.writeTreeLogToFile(writer);
writer.writeCloseTag("mcmc");
}
}
|
// $ANTLR 3.4 C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g 2017-11-21 09:15:55
package se.raddo.raddose3D.parser;
import se.raddo.raddose3D.*;
import java.util.Vector;
import java.util.HashMap;
import java.util.Map;
import org.antlr.runtime.*;
import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
/** "Here's an initializer, here's an input file. Good luck and God's Speed." */
@SuppressWarnings({"all", "warnings", "unchecked"})
public class InputfileParser extends Parser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "ABSCOEFCALC", "ANGLEL", "ANGLEP", "ANGULARRESOLUTION", "AVERAGE", "BEAM", "CALCULATEFLESCAPE", "CALCULATEPEESCAPE", "CIRCULAR", "COLLIMATION", "COMMENT", "CONTAINERDENSITY", "CONTAINERMATERIALELEMENTS", "CONTAINERMATERIALMIXTURE", "CONTAINERMATERIALTYPE", "CONTAINERTHICKNESS", "CRYSTAL", "DDM", "DECAYPARAM", "DEFAULT", "DIFFRACTIONDECAYMODEL", "DIMENSION", "DUMMY", "ELEMENT", "ELEMENTAL", "ENERGY", "EXPONENT", "EXPOSURETIME", "FILE", "FLOAT", "FLRESOLUTION", "FLUX", "FWHM", "HORIZONTAL", "KEV", "LEAL", "LINEAR", "MATERIALELEMENTS", "MATERIALMIXTURE", "MATERIALTYPE", "MIXTURE", "MODELFILE", "NONE", "NUMDNA", "NUMMONOMERS", "NUMRESIDUES", "NUMRNA", "PDB", "PDBNAME", "PERESOLUTION", "PIXELSIZE", "PIXELSPERMICRON", "PROTEINCONC", "PROTEINCONCENTRATION", "PROTEINHEAVYATOMS", "RDFORTAN", "RDJAVA", "RECTANGULAR", "ROTAXBEAMOFFSET", "SAXS", "SAXSSEQ", "SEQFILE", "SEQUENCE", "SEQUENCEFILE", "SIMPLE", "SOLVENTFRACTION", "SOLVENTHEAVYCONC", "STARTOFFSET", "STRING", "TRANSLATEPERDEGREE", "TYPE", "UNITCELL", "VERTICAL", "WEDGE", "WIREFRAMETYPE", "WS"
};
public static final int EOF=-1;
public static final int ABSCOEFCALC=4;
public static final int ANGLEL=5;
public static final int ANGLEP=6;
public static final int ANGULARRESOLUTION=7;
public static final int AVERAGE=8;
public static final int BEAM=9;
public static final int CALCULATEFLESCAPE=10;
public static final int CALCULATEPEESCAPE=11;
public static final int CIRCULAR=12;
public static final int COLLIMATION=13;
public static final int COMMENT=14;
public static final int CONTAINERDENSITY=15;
public static final int CONTAINERMATERIALELEMENTS=16;
public static final int CONTAINERMATERIALMIXTURE=17;
public static final int CONTAINERMATERIALTYPE=18;
public static final int CONTAINERTHICKNESS=19;
public static final int CRYSTAL=20;
public static final int DDM=21;
public static final int DECAYPARAM=22;
public static final int DEFAULT=23;
public static final int DIFFRACTIONDECAYMODEL=24;
public static final int DIMENSION=25;
public static final int DUMMY=26;
public static final int ELEMENT=27;
public static final int ELEMENTAL=28;
public static final int ENERGY=29;
public static final int EXPONENT=30;
public static final int EXPOSURETIME=31;
public static final int FILE=32;
public static final int FLOAT=33;
public static final int FLRESOLUTION=34;
public static final int FLUX=35;
public static final int FWHM=36;
public static final int HORIZONTAL=37;
public static final int KEV=38;
public static final int LEAL=39;
public static final int LINEAR=40;
public static final int MATERIALELEMENTS=41;
public static final int MATERIALMIXTURE=42;
public static final int MATERIALTYPE=43;
public static final int MIXTURE=44;
public static final int MODELFILE=45;
public static final int NONE=46;
public static final int NUMDNA=47;
public static final int NUMMONOMERS=48;
public static final int NUMRESIDUES=49;
public static final int NUMRNA=50;
public static final int PDB=51;
public static final int PDBNAME=52;
public static final int PERESOLUTION=53;
public static final int PIXELSIZE=54;
public static final int PIXELSPERMICRON=55;
public static final int PROTEINCONC=56;
public static final int PROTEINCONCENTRATION=57;
public static final int PROTEINHEAVYATOMS=58;
public static final int RDFORTAN=59;
public static final int RDJAVA=60;
public static final int RECTANGULAR=61;
public static final int ROTAXBEAMOFFSET=62;
public static final int SAXS=63;
public static final int SAXSSEQ=64;
public static final int SEQFILE=65;
public static final int SEQUENCE=66;
public static final int SEQUENCEFILE=67;
public static final int SIMPLE=68;
public static final int SOLVENTFRACTION=69;
public static final int SOLVENTHEAVYCONC=70;
public static final int STARTOFFSET=71;
public static final int STRING=72;
public static final int TRANSLATEPERDEGREE=73;
public static final int TYPE=74;
public static final int UNITCELL=75;
public static final int VERTICAL=76;
public static final int WEDGE=77;
public static final int WIREFRAMETYPE=78;
public static final int WS=79;
// delegates
public Parser[] getDelegates() {
return new Parser[] {};
}
// delegators
public InputfileParser(TokenStream input) {
this(input, new RecognizerSharedState());
}
public InputfileParser(TokenStream input, RecognizerSharedState state) {
super(input, state);
}
public String[] getTokenNames() { return InputfileParser.tokenNames; }
public String getGrammarFileName() { return "C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g"; }
private BeamFactory beamFactory = null;
private CrystalFactory crystalFactory = null;
private Initializer raddoseInitializer = null;
private Vector<String> parsingErrors = new Vector<String>();
public void setInitializer(Initializer i) {
this.raddoseInitializer = i;
}
public void setBeamFactory(BeamFactory bf) {
this.beamFactory = bf;
}
public void setCrystalFactory(CrystalFactory cf) {
this.crystalFactory = cf;
}
public Vector<String> getErrors() {
Vector<String> fetchedErrors = parsingErrors;
parsingErrors = new Vector<String>();
return fetchedErrors;
}
public void emitErrorMessage(String msg) {
parsingErrors.add(msg);
}
// $ANTLR start "configfile"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:41:1: configfile : (a= crystal |b= wedge |c= beam )* EOF ;
public final void configfile() throws RecognitionException {
Crystal a =null;
Wedge b =null;
Beam c =null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:41:11: ( (a= crystal |b= wedge |c= beam )* EOF )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:42:11: (a= crystal |b= wedge |c= beam )* EOF
{
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:42:11: (a= crystal |b= wedge |c= beam )*
loop1:
do {
int alt1=4;
switch ( input.LA(1) ) {
case CRYSTAL:
{
alt1=1;
}
break;
case WEDGE:
{
alt1=2;
}
break;
case BEAM:
{
alt1=3;
}
break;
}
switch (alt1) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:42:13: a= crystal
{
pushFollow(FOLLOW_crystal_in_configfile47);
a=crystal();
state._fsp
raddoseInitializer.setCrystal(a);
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:43:13: b= wedge
{
pushFollow(FOLLOW_wedge_in_configfile65);
b=wedge();
state._fsp
raddoseInitializer.exposeWedge(b);
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:44:13: c= beam
{
pushFollow(FOLLOW_beam_in_configfile85);
c=beam();
state._fsp
raddoseInitializer.setBeam(c);
}
break;
default :
break loop1;
}
} while (true);
match(input,EOF,FOLLOW_EOF_in_configfile105);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return ;
}
// $ANTLR end "configfile"
protected static class crystal_scope {
String crystalType;
int crystalCoefCalc;
CoefCalc crystalCoefCalcClass;
int crystalDdm;
DDM crystalDdmClass;
int crystalContainerMaterial;
Container crystalContainerMaterialClass;
Double gammaParam;
Double b0Param;
Double betaParam;
String containerMixture;
Double containerThickness;
Double containerDensity;
List<String> containerElementNames;
List<Double> containerElementNums;
String pdb;
String seqFile;
Double proteinConc;
Double cellA;
Double cellB;
Double cellC;
Double cellAl;
Double cellBe;
Double cellGa;
int numMon;
int numRes;
int numRNA;
int numDNA;
List<String> heavyProteinAtomNames;
List<Double> heavyProteinAtomNums;
List<String> heavySolutionConcNames;
List<Double> heavySolutionConcNums;
Double solFrac;
HashMap<Object, Object> crystalProperties;
}
protected Stack<InputfileParser.crystal_scope> crystal_stack = new Stack<>();
// $ANTLR start "crystal"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:49:1: crystal returns [Crystal cObj] : CRYSTAL ( crystalLine )+ ;
public final Crystal crystal() throws RecognitionException {
crystal_stack.push(new crystal_scope());
Crystal cObj = null;
(crystal_stack.peek()).crystalCoefCalc = 2; // 0 = error, 1 = Simple, 2 = DEFAULT, 3 = RDV2, 4 = PDB, 5 = SAXS
(crystal_stack.peek()).crystalProperties = new HashMap<Object, Object>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:186:2: ( CRYSTAL ( crystalLine )+ )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:186:4: CRYSTAL ( crystalLine )+
{
match(input,CRYSTAL,FOLLOW_CRYSTAL_in_crystal134);
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:186:12: ( crystalLine )+
int cnt2=0;
loop2:
do {
int alt2=2;
int LA2_0 = input.LA(1);
if ( ((LA2_0 >= ABSCOEFCALC && LA2_0 <= ANGLEP)||(LA2_0 >= CALCULATEFLESCAPE && LA2_0 <= CALCULATEPEESCAPE)||(LA2_0 >= CONTAINERDENSITY && LA2_0 <= CONTAINERTHICKNESS)||(LA2_0 >= DDM && LA2_0 <= DECAYPARAM)||(LA2_0 >= DIFFRACTIONDECAYMODEL && LA2_0 <= DIMENSION)||LA2_0==FLRESOLUTION||(LA2_0 >= MATERIALELEMENTS && LA2_0 <= MATERIALTYPE)||LA2_0==MODELFILE||(LA2_0 >= NUMDNA && LA2_0 <= NUMRNA)||(LA2_0 >= PDBNAME && LA2_0 <= PERESOLUTION)||(LA2_0 >= PIXELSPERMICRON && LA2_0 <= PROTEINHEAVYATOMS)||LA2_0==SEQFILE||LA2_0==SEQUENCEFILE||(LA2_0 >= SOLVENTFRACTION && LA2_0 <= SOLVENTHEAVYCONC)||(LA2_0 >= TYPE && LA2_0 <= UNITCELL)||LA2_0==WIREFRAMETYPE) ) {
alt2=1;
}
switch (alt2) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:186:12: crystalLine
{
pushFollow(FOLLOW_crystalLine_in_crystal136);
crystalLine();
state._fsp
}
break;
default :
if ( cnt2 >= 1 ) break loop2;
EarlyExitException eee =
new EarlyExitException(2, input);
throw eee;
}
cnt2++;
} while (true);
}
if ((crystal_stack.peek()).crystalCoefCalc == 1) {
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcAverage();
}
if ((crystal_stack.peek()).crystalCoefCalc == 2)
{
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcFromParams((crystal_stack.peek()).cellA, (crystal_stack.peek()).cellB, (crystal_stack.peek()).cellC, (crystal_stack.peek()).cellAl, (crystal_stack.peek()).cellBe, (crystal_stack.peek()).cellGa,
(crystal_stack.peek()).numMon, (crystal_stack.peek()).numRes, (crystal_stack.peek()).numRNA, (crystal_stack.peek()).numDNA,
(crystal_stack.peek()).heavyProteinAtomNames, (crystal_stack.peek()).heavyProteinAtomNums,
(crystal_stack.peek()).heavySolutionConcNames, (crystal_stack.peek()).heavySolutionConcNums,
(crystal_stack.peek()).solFrac);
}
if ((crystal_stack.peek()).crystalCoefCalc == 3) {
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcRaddose((crystal_stack.peek()).cellA, (crystal_stack.peek()).cellB, (crystal_stack.peek()).cellC, (crystal_stack.peek()).cellAl, (crystal_stack.peek()).cellBe, (crystal_stack.peek()).cellGa,
(crystal_stack.peek()).numMon, (crystal_stack.peek()).numRes, (crystal_stack.peek()).numRNA, (crystal_stack.peek()).numDNA,
(crystal_stack.peek()).heavyProteinAtomNames, (crystal_stack.peek()).heavyProteinAtomNums,
(crystal_stack.peek()).heavySolutionConcNames, (crystal_stack.peek()).heavySolutionConcNums,
(crystal_stack.peek()).solFrac);
}
if ((crystal_stack.peek()).crystalCoefCalc == 4)
{
if ((crystal_stack.peek()).heavySolutionConcNames != null)
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcFromPDB((crystal_stack.peek()).pdb, (crystal_stack.peek()).heavySolutionConcNames, (crystal_stack.peek()).heavySolutionConcNums);
else
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcFromPDB((crystal_stack.peek()).pdb);
}
if ((crystal_stack.peek()).crystalCoefCalc == 5)
{
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcSAXS((crystal_stack.peek()).cellA, (crystal_stack.peek()).cellB, (crystal_stack.peek()).cellC, (crystal_stack.peek()).cellAl, (crystal_stack.peek()).cellBe, (crystal_stack.peek()).cellGa,
(crystal_stack.peek()).numRes, (crystal_stack.peek()).numRNA, (crystal_stack.peek()).numDNA,
(crystal_stack.peek()).heavyProteinAtomNames, (crystal_stack.peek()).heavyProteinAtomNums,
(crystal_stack.peek()).heavySolutionConcNames, (crystal_stack.peek()).heavySolutionConcNums,
(crystal_stack.peek()).solFrac, (crystal_stack.peek()).proteinConc);
}
if ((crystal_stack.peek()).crystalCoefCalc == 6)
{
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcFromSequence((crystal_stack.peek()).cellA, (crystal_stack.peek()).cellB, (crystal_stack.peek()).cellC, (crystal_stack.peek()).cellAl, (crystal_stack.peek()).cellBe, (crystal_stack.peek()).cellGa,
(crystal_stack.peek()).numMon,
(crystal_stack.peek()).heavyProteinAtomNames, (crystal_stack.peek()).heavyProteinAtomNums,
(crystal_stack.peek()).heavySolutionConcNames, (crystal_stack.peek()).heavySolutionConcNums,
(crystal_stack.peek()).solFrac, (crystal_stack.peek()).seqFile);
}
if ((crystal_stack.peek()).crystalCoefCalc == 7)
{
(crystal_stack.peek()).crystalCoefCalcClass = new CoefCalcFromSequenceSAXS((crystal_stack.peek()).cellA, (crystal_stack.peek()).cellB, (crystal_stack.peek()).cellC, (crystal_stack.peek()).cellAl, (crystal_stack.peek()).cellBe, (crystal_stack.peek()).cellGa,
(crystal_stack.peek()).heavyProteinAtomNames, (crystal_stack.peek()).heavyProteinAtomNums,
(crystal_stack.peek()).heavySolutionConcNames, (crystal_stack.peek()).heavySolutionConcNums,
(crystal_stack.peek()).solFrac, (crystal_stack.peek()).proteinConc, (crystal_stack.peek()).seqFile);
}
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_COEFCALC, (crystal_stack.peek()).crystalCoefCalcClass);
if ((crystal_stack.peek()).crystalDdm == 1)
{
(crystal_stack.peek()).crystalDdmClass = new DDMSimple();
}
if ((crystal_stack.peek()).crystalDdm == 2)
{
(crystal_stack.peek()).crystalDdmClass = new DDMLinear();
}
if ((crystal_stack.peek()).crystalDdm == 3)
{
(crystal_stack.peek()).crystalDdmClass = new DDMLeal((crystal_stack.peek()).gammaParam, (crystal_stack.peek()).b0Param, (crystal_stack.peek()).betaParam);
}
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_DDM, (crystal_stack.peek()).crystalDdmClass);
if ((crystal_stack.peek()).crystalContainerMaterial == 1)
{
(crystal_stack.peek()).crystalContainerMaterialClass = new ContainerTransparent();
}
if ((crystal_stack.peek()).crystalContainerMaterial == 2)
{
(crystal_stack.peek()).crystalContainerMaterialClass = new ContainerMixture((crystal_stack.peek()).containerThickness, (crystal_stack.peek()).containerDensity, (crystal_stack.peek()).containerMixture);
}
if ((crystal_stack.peek()).crystalContainerMaterial == 3)
{
(crystal_stack.peek()).crystalContainerMaterialClass = new ContainerElemental((crystal_stack.peek()).containerThickness, (crystal_stack.peek()).containerDensity, (crystal_stack.peek()).containerElementNames,
(crystal_stack.peek()).containerElementNums);
}
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_CONTAINER, (crystal_stack.peek()).crystalContainerMaterialClass);
cObj = crystalFactory.createCrystal((crystal_stack.peek()).crystalType, (crystal_stack.peek()).crystalProperties);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
crystal_stack.pop();
}
return cObj;
}
// $ANTLR end "crystal"
// $ANTLR start "crystalLine"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:189:1: crystalLine : (a= crystalType |b= crystalDDM |c= crystalCoefcalc |d= crystalDim |e= crystalPPM |f= crystalAngP |g= crystalAngL |h= crystalDecayParam |i= containerThickness |j= containerDensity |k= crystalContainerMaterial |l= containerMaterialMixture |m= unitcell |n= nummonomers |o= numresidues |p= numRNA |q= numDNA |r= heavyProteinAtoms |s= heavySolutionConc |t= solventFraction |u= pdb |v= wireframeType |w= modelFile |x= calculatePEEscape |y= proteinConcentration |z= containerMaterialElements |aa= sequenceFile |bb= calculateFLEscape |cc= flResolution |dd= peResolution );
public final void crystalLine() throws RecognitionException {
String a =null;
int b =0;
int c =0;
Map<Object, Object> d =null;
double e =0.0;
double f =0.0;
double g =0.0;
InputfileParser.crystalDecayParam_return h =null;
double i =0.0;
double j =0.0;
int k =0;
String l =null;
InputfileParser.unitcell_return m =null;
int n =0;
int o =0;
int p =0;
int q =0;
InputfileParser.heavyProteinAtoms_return r =null;
InputfileParser.heavySolutionConc_return s =null;
double t =0.0;
String u =null;
String v =null;
String w =null;
String x =null;
Double y =null;
InputfileParser.containerMaterialElements_return z =null;
String aa =null;
String bb =null;
int cc =0;
int dd =0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:190:2: (a= crystalType |b= crystalDDM |c= crystalCoefcalc |d= crystalDim |e= crystalPPM |f= crystalAngP |g= crystalAngL |h= crystalDecayParam |i= containerThickness |j= containerDensity |k= crystalContainerMaterial |l= containerMaterialMixture |m= unitcell |n= nummonomers |o= numresidues |p= numRNA |q= numDNA |r= heavyProteinAtoms |s= heavySolutionConc |t= solventFraction |u= pdb |v= wireframeType |w= modelFile |x= calculatePEEscape |y= proteinConcentration |z= containerMaterialElements |aa= sequenceFile |bb= calculateFLEscape |cc= flResolution |dd= peResolution )
int alt3=30;
switch ( input.LA(1) ) {
case TYPE:
{
alt3=1;
}
break;
case DDM:
case DIFFRACTIONDECAYMODEL:
{
alt3=2;
}
break;
case ABSCOEFCALC:
{
alt3=3;
}
break;
case DIMENSION:
{
alt3=4;
}
break;
case PIXELSPERMICRON:
{
alt3=5;
}
break;
case ANGLEP:
{
alt3=6;
}
break;
case ANGLEL:
{
alt3=7;
}
break;
case DECAYPARAM:
{
alt3=8;
}
break;
case CONTAINERTHICKNESS:
{
alt3=9;
}
break;
case CONTAINERDENSITY:
{
alt3=10;
}
break;
case CONTAINERMATERIALTYPE:
case MATERIALTYPE:
{
alt3=11;
}
break;
case CONTAINERMATERIALMIXTURE:
case MATERIALMIXTURE:
{
alt3=12;
}
break;
case UNITCELL:
{
alt3=13;
}
break;
case NUMMONOMERS:
{
alt3=14;
}
break;
case NUMRESIDUES:
{
alt3=15;
}
break;
case NUMRNA:
{
alt3=16;
}
break;
case NUMDNA:
{
alt3=17;
}
break;
case PROTEINHEAVYATOMS:
{
alt3=18;
}
break;
case SOLVENTHEAVYCONC:
{
alt3=19;
}
break;
case SOLVENTFRACTION:
{
alt3=20;
}
break;
case PDBNAME:
{
alt3=21;
}
break;
case WIREFRAMETYPE:
{
alt3=22;
}
break;
case MODELFILE:
{
alt3=23;
}
break;
case CALCULATEPEESCAPE:
{
alt3=24;
}
break;
case PROTEINCONC:
case PROTEINCONCENTRATION:
{
alt3=25;
}
break;
case CONTAINERMATERIALELEMENTS:
case MATERIALELEMENTS:
{
alt3=26;
}
break;
case SEQFILE:
case SEQUENCEFILE:
{
alt3=27;
}
break;
case CALCULATEFLESCAPE:
{
alt3=28;
}
break;
case FLRESOLUTION:
{
alt3=29;
}
break;
case PERESOLUTION:
{
alt3=30;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 3, 0, input);
throw nvae;
}
switch (alt3) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:190:4: a= crystalType
{
pushFollow(FOLLOW_crystalType_in_crystalLine192);
a=crystalType();
state._fsp
(crystal_stack.peek()).crystalType = a;
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:191:4: b= crystalDDM
{
pushFollow(FOLLOW_crystalDDM_in_crystalLine203);
b=crystalDDM();
state._fsp
(crystal_stack.peek()).crystalDdm = b;
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:192:4: c= crystalCoefcalc
{
pushFollow(FOLLOW_crystalCoefcalc_in_crystalLine215);
c=crystalCoefcalc();
state._fsp
(crystal_stack.peek()).crystalCoefCalc = c;
}
break;
case 4 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:193:4: d= crystalDim
{
pushFollow(FOLLOW_crystalDim_in_crystalLine225);
d=crystalDim();
state._fsp
if (d != null) {
(crystal_stack.peek()).crystalProperties.putAll(d);
};
}
break;
case 5 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:196:4: e= crystalPPM
{
pushFollow(FOLLOW_crystalPPM_in_crystalLine236);
e=crystalPPM();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_RESOLUTION, e);
}
break;
case 6 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:197:4: f= crystalAngP
{
pushFollow(FOLLOW_crystalAngP_in_crystalLine247);
f=crystalAngP();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_ANGLE_P, f);
}
break;
case 7 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:198:4: g= crystalAngL
{
pushFollow(FOLLOW_crystalAngL_in_crystalLine258);
g=crystalAngL();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_ANGLE_L, g);
}
break;
case 8 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:199:4: h= crystalDecayParam
{
pushFollow(FOLLOW_crystalDecayParam_in_crystalLine269);
h=crystalDecayParam();
state._fsp
(crystal_stack.peek()).gammaParam = (h!=null?h.gammaParam:null);
(crystal_stack.peek()).b0Param = (h!=null?h.b0Param:null);
(crystal_stack.peek()).betaParam = (h!=null?h.betaParam:null);
}
break;
case 9 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:202:4: i= containerThickness
{
pushFollow(FOLLOW_containerThickness_in_crystalLine279);
i=containerThickness();
state._fsp
(crystal_stack.peek()).containerThickness = i;
}
break;
case 10 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:203:4: j= containerDensity
{
pushFollow(FOLLOW_containerDensity_in_crystalLine289);
j=containerDensity();
state._fsp
(crystal_stack.peek()).containerDensity = j;
}
break;
case 11 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:204:4: k= crystalContainerMaterial
{
pushFollow(FOLLOW_crystalContainerMaterial_in_crystalLine299);
k=crystalContainerMaterial();
state._fsp
(crystal_stack.peek()).crystalContainerMaterial = k;
}
break;
case 12 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:205:4: l= containerMaterialMixture
{
pushFollow(FOLLOW_containerMaterialMixture_in_crystalLine308);
l=containerMaterialMixture();
state._fsp
(crystal_stack.peek()).containerMixture = l;
}
break;
case 13 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:206:4: m= unitcell
{
pushFollow(FOLLOW_unitcell_in_crystalLine317);
m=unitcell();
state._fsp
(crystal_stack.peek()).cellA = (m!=null?m.dimA:null);
(crystal_stack.peek()).cellB = (m!=null?m.dimB:null);
(crystal_stack.peek()).cellC = (m!=null?m.dimC:null);
(crystal_stack.peek()).cellAl = (m!=null?m.angA:null);
(crystal_stack.peek()).cellBe = (m!=null?m.angB:null);
(crystal_stack.peek()).cellGa = (m!=null?m.angC:null);
}
break;
case 14 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:212:4: n= nummonomers
{
pushFollow(FOLLOW_nummonomers_in_crystalLine328);
n=nummonomers();
state._fsp
(crystal_stack.peek()).numMon = n;
}
break;
case 15 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:213:4: o= numresidues
{
pushFollow(FOLLOW_numresidues_in_crystalLine339);
o=numresidues();
state._fsp
(crystal_stack.peek()).numRes = o;
}
break;
case 16 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:214:4: p= numRNA
{
pushFollow(FOLLOW_numRNA_in_crystalLine350);
p=numRNA();
state._fsp
(crystal_stack.peek()).numRNA = p;
}
break;
case 17 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:215:4: q= numDNA
{
pushFollow(FOLLOW_numDNA_in_crystalLine363);
q=numDNA();
state._fsp
(crystal_stack.peek()).numDNA = q;
}
break;
case 18 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:216:4: r= heavyProteinAtoms
{
pushFollow(FOLLOW_heavyProteinAtoms_in_crystalLine376);
r=heavyProteinAtoms();
state._fsp
(crystal_stack.peek()).heavyProteinAtomNames = (r!=null?r.names:null);
(crystal_stack.peek()).heavyProteinAtomNums = (r!=null?r.num:null);
}
break;
case 19 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:218:4: s= heavySolutionConc
{
pushFollow(FOLLOW_heavySolutionConc_in_crystalLine385);
s=heavySolutionConc();
state._fsp
(crystal_stack.peek()).heavySolutionConcNames = (s!=null?s.names:null);
(crystal_stack.peek()).heavySolutionConcNums = (s!=null?s.num:null);
}
break;
case 20 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:220:4: t= solventFraction
{
pushFollow(FOLLOW_solventFraction_in_crystalLine394);
t=solventFraction();
state._fsp
(crystal_stack.peek()).solFrac = t;
}
break;
case 21 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:221:4: u= pdb
{
pushFollow(FOLLOW_pdb_in_crystalLine404);
u=pdb();
state._fsp
(crystal_stack.peek()).pdb = u;
}
break;
case 22 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:222:4: v= wireframeType
{
pushFollow(FOLLOW_wireframeType_in_crystalLine417);
v=wireframeType();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_WIREFRAME_TYPE, v);
}
break;
case 23 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:223:4: w= modelFile
{
pushFollow(FOLLOW_modelFile_in_crystalLine428);
w=modelFile();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_WIREFRAME_FILE, w);
}
break;
case 24 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:224:4: x= calculatePEEscape
{
pushFollow(FOLLOW_calculatePEEscape_in_crystalLine440);
x=calculatePEEscape();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_ELECTRON_ESCAPE, x);
}
break;
case 25 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:225:4: y= proteinConcentration
{
pushFollow(FOLLOW_proteinConcentration_in_crystalLine450);
y=proteinConcentration();
state._fsp
(crystal_stack.peek()).proteinConc = y;
}
break;
case 26 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:226:4: z= containerMaterialElements
{
pushFollow(FOLLOW_containerMaterialElements_in_crystalLine459);
z=containerMaterialElements();
state._fsp
(crystal_stack.peek()).containerElementNames = (z!=null?z.names:null);
(crystal_stack.peek()).containerElementNums = (z!=null?z.num:null);
}
break;
case 27 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:228:4: aa= sequenceFile
{
pushFollow(FOLLOW_sequenceFile_in_crystalLine468);
aa=sequenceFile();
state._fsp
(crystal_stack.peek()).seqFile = aa;
}
break;
case 28 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:230:4: bb= calculateFLEscape
{
pushFollow(FOLLOW_calculateFLEscape_in_crystalLine481);
bb=calculateFLEscape();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_FLUORESCENT_ESCAPE, bb);
}
break;
case 29 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:231:4: cc= flResolution
{
pushFollow(FOLLOW_flResolution_in_crystalLine491);
cc=flResolution();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_FLUORESCENT_RESOLUTION, cc);
}
break;
case 30 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:232:4: dd= peResolution
{
pushFollow(FOLLOW_peResolution_in_crystalLine502);
dd=peResolution();
state._fsp
(crystal_stack.peek()).crystalProperties.put(Crystal.CRYSTAL_PHOTOELECTRON_RESOLUTION, dd);
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return ;
}
// $ANTLR end "crystalLine"
// $ANTLR start "crystalType"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:236:1: crystalType returns [String crystalType] : TYPE e= STRING ;
public final String crystalType() throws RecognitionException {
String crystalType = null;
Token e=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:237:2: ( TYPE e= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:237:4: TYPE e= STRING
{
match(input,TYPE,FOLLOW_TYPE_in_crystalType523);
e=(Token)match(input,STRING,FOLLOW_STRING_in_crystalType527);
crystalType = (e!=null?e.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return crystalType;
}
// $ANTLR end "crystalType"
// $ANTLR start "crystalDDM"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:240:1: crystalDDM returns [int value] : ( DIFFRACTIONDECAYMODEL | DDM ) e= crystalDDMKeyword ;
public final int crystalDDM() throws RecognitionException {
int value = 0;
int e =0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:241:2: ( ( DIFFRACTIONDECAYMODEL | DDM ) e= crystalDDMKeyword )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:241:4: ( DIFFRACTIONDECAYMODEL | DDM ) e= crystalDDMKeyword
{
if ( input.LA(1)==DDM||input.LA(1)==DIFFRACTIONDECAYMODEL ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
pushFollow(FOLLOW_crystalDDMKeyword_in_crystalDDM581);
e=crystalDDMKeyword();
state._fsp
value = e;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalDDM"
// $ANTLR start "crystalDDMKeyword"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:244:1: crystalDDMKeyword returns [int value] : ( SIMPLE | LINEAR | LEAL );
public final int crystalDDMKeyword() throws RecognitionException {
int value = 0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:245:2: ( SIMPLE | LINEAR | LEAL )
int alt4=3;
switch ( input.LA(1) ) {
case SIMPLE:
{
alt4=1;
}
break;
case LINEAR:
{
alt4=2;
}
break;
case LEAL:
{
alt4=3;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 4, 0, input);
throw nvae;
}
switch (alt4) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:245:4: SIMPLE
{
match(input,SIMPLE,FOLLOW_SIMPLE_in_crystalDDMKeyword729);
value = 1;
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:246:4: LINEAR
{
match(input,LINEAR,FOLLOW_LINEAR_in_crystalDDMKeyword736);
value = 2;
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:247:4: LEAL
{
match(input,LEAL,FOLLOW_LEAL_in_crystalDDMKeyword743);
value = 3;
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalDDMKeyword"
public static class crystalDecayParam_return extends ParserRuleReturnScope {
public Double gammaParam;
public Double b0Param;
public Double betaParam;
};
// $ANTLR start "crystalDecayParam"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:253:1: crystalDecayParam returns [Double gammaParam, Double b0Param, Double betaParam] : DECAYPARAM a= FLOAT b= FLOAT c= FLOAT ;
public final InputfileParser.crystalDecayParam_return crystalDecayParam() throws RecognitionException {
InputfileParser.crystalDecayParam_return retval = new InputfileParser.crystalDecayParam_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
Token c=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:254:2: ( DECAYPARAM a= FLOAT b= FLOAT c= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:254:4: DECAYPARAM a= FLOAT b= FLOAT c= FLOAT
{
match(input,DECAYPARAM,FOLLOW_DECAYPARAM_in_crystalDecayParam863);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDecayParam867);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDecayParam871);
c=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDecayParam875);
retval.gammaParam = Double.parseDouble((a!=null?a.getText():null)); retval.b0Param = Double.parseDouble((b!=null?b.getText():null)); retval.betaParam = Double.parseDouble((c!=null?c.getText():null));
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "crystalDecayParam"
// $ANTLR start "crystalCoefcalc"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:257:1: crystalCoefcalc returns [int value] : ABSCOEFCALC a= crystalCoefcalcKeyword ;
public final int crystalCoefcalc() throws RecognitionException {
int value = 0;
int a =0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:258:2: ( ABSCOEFCALC a= crystalCoefcalcKeyword )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:258:4: ABSCOEFCALC a= crystalCoefcalcKeyword
{
match(input,ABSCOEFCALC,FOLLOW_ABSCOEFCALC_in_crystalCoefcalc947);
pushFollow(FOLLOW_crystalCoefcalcKeyword_in_crystalCoefcalc951);
a=crystalCoefcalcKeyword();
state._fsp
value = a;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalCoefcalc"
// $ANTLR start "crystalCoefcalcKeyword"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:260:1: crystalCoefcalcKeyword returns [int value] : ( DUMMY | AVERAGE | DEFAULT | RDJAVA | RDFORTAN | PDB | SAXS | SEQUENCE | SAXSSEQ );
public final int crystalCoefcalcKeyword() throws RecognitionException {
int value = 0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:261:2: ( DUMMY | AVERAGE | DEFAULT | RDJAVA | RDFORTAN | PDB | SAXS | SEQUENCE | SAXSSEQ )
int alt5=9;
switch ( input.LA(1) ) {
case DUMMY:
{
alt5=1;
}
break;
case AVERAGE:
{
alt5=2;
}
break;
case DEFAULT:
{
alt5=3;
}
break;
case RDJAVA:
{
alt5=4;
}
break;
case RDFORTAN:
{
alt5=5;
}
break;
case PDB:
{
alt5=6;
}
break;
case SAXS:
{
alt5=7;
}
break;
case SEQUENCE:
{
alt5=8;
}
break;
case SAXSSEQ:
{
alt5=9;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 5, 0, input);
throw nvae;
}
switch (alt5) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:261:4: DUMMY
{
match(input,DUMMY,FOLLOW_DUMMY_in_crystalCoefcalcKeyword1030);
value = 1;
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:262:4: AVERAGE
{
match(input,AVERAGE,FOLLOW_AVERAGE_in_crystalCoefcalcKeyword1040);
value = 1;
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:263:4: DEFAULT
{
match(input,DEFAULT,FOLLOW_DEFAULT_in_crystalCoefcalcKeyword1048);
value = 2;
}
break;
case 4 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:264:4: RDJAVA
{
match(input,RDJAVA,FOLLOW_RDJAVA_in_crystalCoefcalcKeyword1056);
value = 2;
}
break;
case 5 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:265:4: RDFORTAN
{
match(input,RDFORTAN,FOLLOW_RDFORTAN_in_crystalCoefcalcKeyword1063);
value = 3;
}
break;
case 6 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:266:4: PDB
{
match(input,PDB,FOLLOW_PDB_in_crystalCoefcalcKeyword1070);
value = 4;
}
break;
case 7 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:267:4: SAXS
{
match(input,SAXS,FOLLOW_SAXS_in_crystalCoefcalcKeyword1080);
value = 5;
}
break;
case 8 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:268:4: SEQUENCE
{
match(input,SEQUENCE,FOLLOW_SEQUENCE_in_crystalCoefcalcKeyword1088);
value = 6;
}
break;
case 9 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:269:4: SAXSSEQ
{
match(input,SAXSSEQ,FOLLOW_SAXSSEQ_in_crystalCoefcalcKeyword1095);
value = 7;
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalCoefcalcKeyword"
// $ANTLR start "crystalDim"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:281:1: crystalDim returns [Map<Object, Object> properties] : DIMENSION (a= FLOAT b= FLOAT c= FLOAT |e= FLOAT f= FLOAT |d= FLOAT ) ;
public final Map<Object, Object> crystalDim() throws RecognitionException {
Map<Object, Object> properties = null;
Token a=null;
Token b=null;
Token c=null;
Token e=null;
Token f=null;
Token d=null;
properties = new HashMap<Object, Object>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:284:3: ( DIMENSION (a= FLOAT b= FLOAT c= FLOAT |e= FLOAT f= FLOAT |d= FLOAT ) )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:284:5: DIMENSION (a= FLOAT b= FLOAT c= FLOAT |e= FLOAT f= FLOAT |d= FLOAT )
{
match(input,DIMENSION,FOLLOW_DIMENSION_in_crystalDim1419);
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:285:2: (a= FLOAT b= FLOAT c= FLOAT |e= FLOAT f= FLOAT |d= FLOAT )
int alt6=3;
int LA6_0 = input.LA(1);
if ( (LA6_0==FLOAT) ) {
int LA6_1 = input.LA(2);
if ( (LA6_1==FLOAT) ) {
int LA6_2 = input.LA(3);
if ( (LA6_2==FLOAT) ) {
alt6=1;
}
else if ( (LA6_2==EOF||(LA6_2 >= ABSCOEFCALC && LA6_2 <= ANGLEP)||(LA6_2 >= BEAM && LA6_2 <= CALCULATEPEESCAPE)||(LA6_2 >= CONTAINERDENSITY && LA6_2 <= DECAYPARAM)||(LA6_2 >= DIFFRACTIONDECAYMODEL && LA6_2 <= DIMENSION)||LA6_2==FLRESOLUTION||(LA6_2 >= MATERIALELEMENTS && LA6_2 <= MATERIALTYPE)||LA6_2==MODELFILE||(LA6_2 >= NUMDNA && LA6_2 <= NUMRNA)||(LA6_2 >= PDBNAME && LA6_2 <= PERESOLUTION)||(LA6_2 >= PIXELSPERMICRON && LA6_2 <= PROTEINHEAVYATOMS)||LA6_2==SEQFILE||LA6_2==SEQUENCEFILE||(LA6_2 >= SOLVENTFRACTION && LA6_2 <= SOLVENTHEAVYCONC)||(LA6_2 >= TYPE && LA6_2 <= UNITCELL)||(LA6_2 >= WEDGE && LA6_2 <= WIREFRAMETYPE)) ) {
alt6=2;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 2, input);
throw nvae;
}
}
else if ( (LA6_1==EOF||(LA6_1 >= ABSCOEFCALC && LA6_1 <= ANGLEP)||(LA6_1 >= BEAM && LA6_1 <= CALCULATEPEESCAPE)||(LA6_1 >= CONTAINERDENSITY && LA6_1 <= DECAYPARAM)||(LA6_1 >= DIFFRACTIONDECAYMODEL && LA6_1 <= DIMENSION)||LA6_1==FLRESOLUTION||(LA6_1 >= MATERIALELEMENTS && LA6_1 <= MATERIALTYPE)||LA6_1==MODELFILE||(LA6_1 >= NUMDNA && LA6_1 <= NUMRNA)||(LA6_1 >= PDBNAME && LA6_1 <= PERESOLUTION)||(LA6_1 >= PIXELSPERMICRON && LA6_1 <= PROTEINHEAVYATOMS)||LA6_1==SEQFILE||LA6_1==SEQUENCEFILE||(LA6_1 >= SOLVENTFRACTION && LA6_1 <= SOLVENTHEAVYCONC)||(LA6_1 >= TYPE && LA6_1 <= UNITCELL)||(LA6_1 >= WEDGE && LA6_1 <= WIREFRAMETYPE)) ) {
alt6=3;
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 1, input);
throw nvae;
}
}
else {
NoViableAltException nvae =
new NoViableAltException("", 6, 0, input);
throw nvae;
}
switch (alt6) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:286:7: a= FLOAT b= FLOAT c= FLOAT
{
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDim1432);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDim1436);
c=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDim1440);
properties.put(Crystal.CRYSTAL_DIM_X, Double.parseDouble((a!=null?a.getText():null)));
properties.put(Crystal.CRYSTAL_DIM_Y, Double.parseDouble((b!=null?b.getText():null)));
properties.put(Crystal.CRYSTAL_DIM_Z, Double.parseDouble((c!=null?c.getText():null)));
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:289:7: e= FLOAT f= FLOAT
{
e=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDim1452);
f=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDim1456);
properties.put(Crystal.CRYSTAL_DIM_X, Double.parseDouble((e!=null?e.getText():null)));
properties.put(Crystal.CRYSTAL_DIM_Y, Double.parseDouble((f!=null?f.getText():null)));
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:291:7: d= FLOAT
{
d=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalDim1468);
properties.put(Crystal.CRYSTAL_DIM_X, Double.parseDouble((d!=null?d.getText():null)));
}
break;
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return properties;
}
// $ANTLR end "crystalDim"
// $ANTLR start "crystalAngP"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:295:1: crystalAngP returns [double value] : ANGLEP a= FLOAT ;
public final double crystalAngP() throws RecognitionException {
double value = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:296:2: ( ANGLEP a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:296:4: ANGLEP a= FLOAT
{
match(input,ANGLEP,FOLLOW_ANGLEP_in_crystalAngP1545);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalAngP1549);
value = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalAngP"
// $ANTLR start "crystalAngL"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:300:1: crystalAngL returns [double value] : ANGLEL a= FLOAT ;
public final double crystalAngL() throws RecognitionException {
double value = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:301:2: ( ANGLEL a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:301:4: ANGLEL a= FLOAT
{
match(input,ANGLEL,FOLLOW_ANGLEL_in_crystalAngL1604);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalAngL1608);
value = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalAngL"
// $ANTLR start "crystalPPM"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:305:1: crystalPPM returns [double ppm] : PIXELSPERMICRON FLOAT ;
public final double crystalPPM() throws RecognitionException {
double ppm = 0.0;
Token FLOAT1=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:306:2: ( PIXELSPERMICRON FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:306:4: PIXELSPERMICRON FLOAT
{
match(input,PIXELSPERMICRON,FOLLOW_PIXELSPERMICRON_in_crystalPPM1662);
FLOAT1=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_crystalPPM1664);
ppm = Double.parseDouble((FLOAT1!=null?FLOAT1.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return ppm;
}
// $ANTLR end "crystalPPM"
public static class unitcell_return extends ParserRuleReturnScope {
public Double dimA;
public Double dimB;
public Double dimC;
public Double angA;
public Double angB;
public Double angC;
};
// $ANTLR start "unitcell"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:309:1: unitcell returns [Double dimA, Double dimB, Double dimC, Double angA, Double angB, Double angC] : UNITCELL a= FLOAT b= FLOAT c= FLOAT (al= FLOAT be= FLOAT ga= FLOAT )? ;
public final InputfileParser.unitcell_return unitcell() throws RecognitionException {
InputfileParser.unitcell_return retval = new InputfileParser.unitcell_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
Token c=null;
Token al=null;
Token be=null;
Token ga=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:310:2: ( UNITCELL a= FLOAT b= FLOAT c= FLOAT (al= FLOAT be= FLOAT ga= FLOAT )? )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:310:4: UNITCELL a= FLOAT b= FLOAT c= FLOAT (al= FLOAT be= FLOAT ga= FLOAT )?
{
match(input,UNITCELL,FOLLOW_UNITCELL_in_unitcell1762);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_unitcell1766);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_unitcell1770);
c=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_unitcell1774);
retval.dimA = Double.parseDouble((a!=null?a.getText():null));
retval.dimB = Double.parseDouble((b!=null?b.getText():null));
retval.dimC = Double.parseDouble((c!=null?c.getText():null));
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:314:7: (al= FLOAT be= FLOAT ga= FLOAT )?
int alt7=2;
int LA7_0 = input.LA(1);
if ( (LA7_0==FLOAT) ) {
alt7=1;
}
switch (alt7) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:314:8: al= FLOAT be= FLOAT ga= FLOAT
{
al=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_unitcell1789);
be=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_unitcell1793);
ga=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_unitcell1797);
retval.angA = Double.parseDouble((al!=null?al.getText():null));
retval.angB = Double.parseDouble((be!=null?be.getText():null));
retval.angC = Double.parseDouble((ga!=null?ga.getText():null));
}
break;
}
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "unitcell"
// $ANTLR start "proteinConcentration"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:322:1: proteinConcentration returns [Double proteinConc] : ( PROTEINCONCENTRATION | PROTEINCONC ) a= FLOAT ;
public final Double proteinConcentration() throws RecognitionException {
Double proteinConc = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:323:2: ( ( PROTEINCONCENTRATION | PROTEINCONC ) a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:323:4: ( PROTEINCONCENTRATION | PROTEINCONC ) a= FLOAT
{
if ( (input.LA(1) >= PROTEINCONC && input.LA(1) <= PROTEINCONCENTRATION) ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_proteinConcentration1885);
proteinConc = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return proteinConc;
}
// $ANTLR end "proteinConcentration"
// $ANTLR start "nummonomers"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:327:1: nummonomers returns [int value] : NUMMONOMERS a= FLOAT ;
public final int nummonomers() throws RecognitionException {
int value = 0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:328:2: ( NUMMONOMERS a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:328:4: NUMMONOMERS a= FLOAT
{
match(input,NUMMONOMERS,FOLLOW_NUMMONOMERS_in_nummonomers2067);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_nummonomers2071);
value = Integer.parseInt((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "nummonomers"
// $ANTLR start "numresidues"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:331:1: numresidues returns [int value] : NUMRESIDUES a= FLOAT ;
public final int numresidues() throws RecognitionException {
int value = 0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:332:2: ( NUMRESIDUES a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:332:4: NUMRESIDUES a= FLOAT
{
match(input,NUMRESIDUES,FOLLOW_NUMRESIDUES_in_numresidues2148);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_numresidues2152);
value = Integer.parseInt((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "numresidues"
// $ANTLR start "numRNA"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:335:1: numRNA returns [int value] : NUMRNA a= FLOAT ;
public final int numRNA() throws RecognitionException {
int value = 0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:336:2: ( NUMRNA a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:336:4: NUMRNA a= FLOAT
{
match(input,NUMRNA,FOLLOW_NUMRNA_in_numRNA2230);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_numRNA2234);
value = Integer.parseInt((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "numRNA"
// $ANTLR start "numDNA"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:339:1: numDNA returns [int value] : NUMDNA a= FLOAT ;
public final int numDNA() throws RecognitionException {
int value = 0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:340:2: ( NUMDNA a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:340:4: NUMDNA a= FLOAT
{
match(input,NUMDNA,FOLLOW_NUMDNA_in_numDNA2287);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_numDNA2291);
value = Integer.parseInt((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "numDNA"
public static class heavyProteinAtoms_return extends ParserRuleReturnScope {
public List<String> names;
public List<Double> num;;
};
// $ANTLR start "heavyProteinAtoms"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:343:1: heavyProteinAtoms returns [List<String> names, List<Double> num;] : PROTEINHEAVYATOMS (a= ELEMENT b= FLOAT )+ ;
public final InputfileParser.heavyProteinAtoms_return heavyProteinAtoms() throws RecognitionException {
InputfileParser.heavyProteinAtoms_return retval = new InputfileParser.heavyProteinAtoms_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
retval.names = new ArrayList<String>();
retval.num = new ArrayList<Double>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:348:2: ( PROTEINHEAVYATOMS (a= ELEMENT b= FLOAT )+ )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:348:4: PROTEINHEAVYATOMS (a= ELEMENT b= FLOAT )+
{
match(input,PROTEINHEAVYATOMS,FOLLOW_PROTEINHEAVYATOMS_in_heavyProteinAtoms2347);
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:348:22: (a= ELEMENT b= FLOAT )+
int cnt8=0;
loop8:
do {
int alt8=2;
int LA8_0 = input.LA(1);
if ( (LA8_0==ELEMENT) ) {
alt8=1;
}
switch (alt8) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:348:23: a= ELEMENT b= FLOAT
{
a=(Token)match(input,ELEMENT,FOLLOW_ELEMENT_in_heavyProteinAtoms2352);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_heavyProteinAtoms2356);
retval.names.add((a!=null?a.getText():null)); retval.num.add(Double.parseDouble((b!=null?b.getText():null)));
}
break;
default :
if ( cnt8 >= 1 ) break loop8;
EarlyExitException eee =
new EarlyExitException(8, input);
throw eee;
}
cnt8++;
} while (true);
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "heavyProteinAtoms"
public static class heavySolutionConc_return extends ParserRuleReturnScope {
public List<String> names;
public List<Double> num;;
};
// $ANTLR start "heavySolutionConc"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:352:1: heavySolutionConc returns [List<String> names, List<Double> num;] : SOLVENTHEAVYCONC (a= ELEMENT b= FLOAT )+ ;
public final InputfileParser.heavySolutionConc_return heavySolutionConc() throws RecognitionException {
InputfileParser.heavySolutionConc_return retval = new InputfileParser.heavySolutionConc_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
retval.names = new ArrayList<String>();
retval.num = new ArrayList<Double>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:357:2: ( SOLVENTHEAVYCONC (a= ELEMENT b= FLOAT )+ )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:357:4: SOLVENTHEAVYCONC (a= ELEMENT b= FLOAT )+
{
match(input,SOLVENTHEAVYCONC,FOLLOW_SOLVENTHEAVYCONC_in_heavySolutionConc2503);
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:357:21: (a= ELEMENT b= FLOAT )+
int cnt9=0;
loop9:
do {
int alt9=2;
int LA9_0 = input.LA(1);
if ( (LA9_0==ELEMENT) ) {
alt9=1;
}
switch (alt9) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:357:22: a= ELEMENT b= FLOAT
{
a=(Token)match(input,ELEMENT,FOLLOW_ELEMENT_in_heavySolutionConc2508);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_heavySolutionConc2512);
retval.names.add((a!=null?a.getText():null)); retval.num.add(Double.parseDouble((b!=null?b.getText():null)));
}
break;
default :
if ( cnt9 >= 1 ) break loop9;
EarlyExitException eee =
new EarlyExitException(9, input);
throw eee;
}
cnt9++;
} while (true);
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "heavySolutionConc"
// $ANTLR start "solventFraction"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:360:1: solventFraction returns [double solFrac] : SOLVENTFRACTION a= FLOAT ;
public final double solventFraction() throws RecognitionException {
double solFrac = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:361:2: ( SOLVENTFRACTION a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:361:4: SOLVENTFRACTION a= FLOAT
{
match(input,SOLVENTFRACTION,FOLLOW_SOLVENTFRACTION_in_solventFraction2618);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_solventFraction2622);
solFrac = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return solFrac;
}
// $ANTLR end "solventFraction"
// $ANTLR start "pdb"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:364:1: pdb returns [String pdb] : PDBNAME a= STRING ;
public final String pdb() throws RecognitionException {
String pdb = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:365:2: ( PDBNAME a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:365:4: PDBNAME a= STRING
{
match(input,PDBNAME,FOLLOW_PDBNAME_in_pdb2719);
a=(Token)match(input,STRING,FOLLOW_STRING_in_pdb2723);
pdb = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return pdb;
}
// $ANTLR end "pdb"
// $ANTLR start "wireframeType"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:368:1: wireframeType returns [String value] : WIREFRAMETYPE a= STRING ;
public final String wireframeType() throws RecognitionException {
String value = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:369:2: ( WIREFRAMETYPE a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:369:4: WIREFRAMETYPE a= STRING
{
match(input,WIREFRAMETYPE,FOLLOW_WIREFRAMETYPE_in_wireframeType2760);
a=(Token)match(input,STRING,FOLLOW_STRING_in_wireframeType2764);
value = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "wireframeType"
// $ANTLR start "modelFile"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:372:1: modelFile returns [String value] : MODELFILE a= STRING ;
public final String modelFile() throws RecognitionException {
String value = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:373:2: ( MODELFILE a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:373:4: MODELFILE a= STRING
{
match(input,MODELFILE,FOLLOW_MODELFILE_in_modelFile2852);
a=(Token)match(input,STRING,FOLLOW_STRING_in_modelFile2856);
value = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "modelFile"
// $ANTLR start "calculatePEEscape"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:376:1: calculatePEEscape returns [String value] : CALCULATEPEESCAPE a= STRING ;
public final String calculatePEEscape() throws RecognitionException {
String value = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:377:2: ( CALCULATEPEESCAPE a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:377:4: CALCULATEPEESCAPE a= STRING
{
match(input,CALCULATEPEESCAPE,FOLLOW_CALCULATEPEESCAPE_in_calculatePEEscape2923);
a=(Token)match(input,STRING,FOLLOW_STRING_in_calculatePEEscape2927);
value = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "calculatePEEscape"
// $ANTLR start "crystalContainerMaterial"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:381:1: crystalContainerMaterial returns [int value] : ( CONTAINERMATERIALTYPE | MATERIALTYPE ) e= crystalContainerKeyword ;
public final int crystalContainerMaterial() throws RecognitionException {
int value = 0;
int e =0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:382:2: ( ( CONTAINERMATERIALTYPE | MATERIALTYPE ) e= crystalContainerKeyword )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:382:4: ( CONTAINERMATERIALTYPE | MATERIALTYPE ) e= crystalContainerKeyword
{
if ( input.LA(1)==CONTAINERMATERIALTYPE||input.LA(1)==MATERIALTYPE ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
pushFollow(FOLLOW_crystalContainerKeyword_in_crystalContainerMaterial3051);
e=crystalContainerKeyword();
state._fsp
value = e;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalContainerMaterial"
// $ANTLR start "crystalContainerKeyword"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:385:1: crystalContainerKeyword returns [int value] : ( NONE | MIXTURE | ELEMENTAL );
public final int crystalContainerKeyword() throws RecognitionException {
int value = 0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:386:2: ( NONE | MIXTURE | ELEMENTAL )
int alt10=3;
switch ( input.LA(1) ) {
case NONE:
{
alt10=1;
}
break;
case MIXTURE:
{
alt10=2;
}
break;
case ELEMENTAL:
{
alt10=3;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 10, 0, input);
throw nvae;
}
switch (alt10) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:386:4: NONE
{
match(input,NONE,FOLLOW_NONE_in_crystalContainerKeyword3244);
value = 1;
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:387:4: MIXTURE
{
match(input,MIXTURE,FOLLOW_MIXTURE_in_crystalContainerKeyword3253);
value = 2;
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:388:4: ELEMENTAL
{
match(input,ELEMENTAL,FOLLOW_ELEMENTAL_in_crystalContainerKeyword3261);
value = 3;
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "crystalContainerKeyword"
// $ANTLR start "containerThickness"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:394:1: containerThickness returns [double value] : CONTAINERTHICKNESS a= FLOAT ;
public final double containerThickness() throws RecognitionException {
double value = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:395:2: ( CONTAINERTHICKNESS a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:395:4: CONTAINERTHICKNESS a= FLOAT
{
match(input,CONTAINERTHICKNESS,FOLLOW_CONTAINERTHICKNESS_in_containerThickness3401);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_containerThickness3405);
value = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "containerThickness"
// $ANTLR start "containerMaterialMixture"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:398:1: containerMaterialMixture returns [String value] : ( CONTAINERMATERIALMIXTURE | MATERIALMIXTURE ) a= STRING ;
public final String containerMaterialMixture() throws RecognitionException {
String value = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:399:2: ( ( CONTAINERMATERIALMIXTURE | MATERIALMIXTURE ) a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:399:4: ( CONTAINERMATERIALMIXTURE | MATERIALMIXTURE ) a= STRING
{
if ( input.LA(1)==CONTAINERMATERIALMIXTURE||input.LA(1)==MATERIALMIXTURE ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
a=(Token)match(input,STRING,FOLLOW_STRING_in_containerMaterialMixture3526);
value = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "containerMaterialMixture"
public static class containerMaterialElements_return extends ParserRuleReturnScope {
public List<String> names;
public List<Double> num;;
};
// $ANTLR start "containerMaterialElements"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:403:1: containerMaterialElements returns [List<String> names, List<Double> num;] : ( CONTAINERMATERIALELEMENTS | MATERIALELEMENTS ) (a= ELEMENT b= FLOAT )+ ;
public final InputfileParser.containerMaterialElements_return containerMaterialElements() throws RecognitionException {
InputfileParser.containerMaterialElements_return retval = new InputfileParser.containerMaterialElements_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
retval.names = new ArrayList<String>();
retval.num = new ArrayList<Double>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:408:2: ( ( CONTAINERMATERIALELEMENTS | MATERIALELEMENTS ) (a= ELEMENT b= FLOAT )+ )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:408:4: ( CONTAINERMATERIALELEMENTS | MATERIALELEMENTS ) (a= ELEMENT b= FLOAT )+
{
if ( input.LA(1)==CONTAINERMATERIALELEMENTS||input.LA(1)==MATERIALELEMENTS ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:408:51: (a= ELEMENT b= FLOAT )+
int cnt11=0;
loop11:
do {
int alt11=2;
int LA11_0 = input.LA(1);
if ( (LA11_0==ELEMENT) ) {
alt11=1;
}
switch (alt11) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:408:52: a= ELEMENT b= FLOAT
{
a=(Token)match(input,ELEMENT,FOLLOW_ELEMENT_in_containerMaterialElements3762);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_containerMaterialElements3766);
retval.names.add((a!=null?a.getText():null)); retval.num.add(Double.parseDouble((b!=null?b.getText():null)));
}
break;
default :
if ( cnt11 >= 1 ) break loop11;
EarlyExitException eee =
new EarlyExitException(11, input);
throw eee;
}
cnt11++;
} while (true);
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "containerMaterialElements"
// $ANTLR start "containerDensity"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:412:1: containerDensity returns [double value] : CONTAINERDENSITY a= FLOAT ;
public final double containerDensity() throws RecognitionException {
double value = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:413:2: ( CONTAINERDENSITY a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:413:4: CONTAINERDENSITY a= FLOAT
{
match(input,CONTAINERDENSITY,FOLLOW_CONTAINERDENSITY_in_containerDensity4001);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_containerDensity4005);
value = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "containerDensity"
// $ANTLR start "sequenceFile"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:416:1: sequenceFile returns [String value] : ( SEQUENCEFILE | SEQFILE ) a= STRING ;
public final String sequenceFile() throws RecognitionException {
String value = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:417:2: ( ( SEQUENCEFILE | SEQFILE ) a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:417:4: ( SEQUENCEFILE | SEQFILE ) a= STRING
{
if ( input.LA(1)==SEQFILE||input.LA(1)==SEQUENCEFILE ) {
input.consume();
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
a=(Token)match(input,STRING,FOLLOW_STRING_in_sequenceFile4116);
value = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "sequenceFile"
// $ANTLR start "calculateFLEscape"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:421:1: calculateFLEscape returns [String value] : CALCULATEFLESCAPE a= STRING ;
public final String calculateFLEscape() throws RecognitionException {
String value = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:422:2: ( CALCULATEFLESCAPE a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:422:4: CALCULATEFLESCAPE a= STRING
{
match(input,CALCULATEFLESCAPE,FOLLOW_CALCULATEFLESCAPE_in_calculateFLEscape4239);
a=(Token)match(input,STRING,FOLLOW_STRING_in_calculateFLEscape4243);
value = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "calculateFLEscape"
// $ANTLR start "flResolution"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:426:1: flResolution returns [int value] : FLRESOLUTION a= FLOAT ;
public final int flResolution() throws RecognitionException {
int value = 0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:427:2: ( FLRESOLUTION a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:427:4: FLRESOLUTION a= FLOAT
{
match(input,FLRESOLUTION,FOLLOW_FLRESOLUTION_in_flResolution4355);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_flResolution4359);
value = Integer.parseInt((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "flResolution"
// $ANTLR start "peResolution"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:430:1: peResolution returns [int value] : PERESOLUTION a= FLOAT ;
public final int peResolution() throws RecognitionException {
int value = 0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:431:2: ( PERESOLUTION a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:431:4: PERESOLUTION a= FLOAT
{
match(input,PERESOLUTION,FOLLOW_PERESOLUTION_in_peResolution4441);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_peResolution4445);
value = Integer.parseInt((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "peResolution"
protected static class beam_scope {
String beamType;
HashMap<Object, Object> beamProperties;
}
protected Stack<InputfileParser.beam_scope> beam_stack = new Stack<>();
// $ANTLR start "beam"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:434:1: beam returns [Beam bObj] : BEAM ( beamLine )+ ;
public final Beam beam() throws RecognitionException {
beam_stack.push(new beam_scope());
Beam bObj = null;
(beam_stack.peek()).beamProperties = new HashMap<Object, Object>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:445:2: ( BEAM ( beamLine )+ )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:445:4: BEAM ( beamLine )+
{
match(input,BEAM,FOLLOW_BEAM_in_beam4541);
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:445:9: ( beamLine )+
int cnt12=0;
loop12:
do {
int alt12=2;
int LA12_0 = input.LA(1);
if ( ((LA12_0 >= CIRCULAR && LA12_0 <= COLLIMATION)||LA12_0==ENERGY||LA12_0==FILE||(LA12_0 >= FLUX && LA12_0 <= HORIZONTAL)||LA12_0==PIXELSIZE||LA12_0==RECTANGULAR||LA12_0==TYPE||LA12_0==VERTICAL) ) {
alt12=1;
}
switch (alt12) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:445:9: beamLine
{
pushFollow(FOLLOW_beamLine_in_beam4543);
beamLine();
state._fsp
}
break;
default :
if ( cnt12 >= 1 ) break loop12;
EarlyExitException eee =
new EarlyExitException(12, input);
throw eee;
}
cnt12++;
} while (true);
}
bObj = beamFactory.createBeam((beam_stack.peek()).beamType, (beam_stack.peek()).beamProperties);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
beam_stack.pop();
}
return bObj;
}
// $ANTLR end "beam"
// $ANTLR start "beamLine"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:448:1: beamLine : ( TYPE a= STRING |b= beamFlux |c= beamFWHM |d= beamEnergy |e= beamCollimation |f= beamFile |g= beamPixelSize );
public final void beamLine() throws RecognitionException {
Token a=null;
Double b =null;
InputfileParser.beamFWHM_return c =null;
Double d =null;
Map<Object, Object> e =null;
String f =null;
Map<Object, Object> g =null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:449:2: ( TYPE a= STRING |b= beamFlux |c= beamFWHM |d= beamEnergy |e= beamCollimation |f= beamFile |g= beamPixelSize )
int alt13=7;
switch ( input.LA(1) ) {
case TYPE:
{
alt13=1;
}
break;
case FLUX:
{
alt13=2;
}
break;
case FWHM:
{
alt13=3;
}
break;
case ENERGY:
{
alt13=4;
}
break;
case CIRCULAR:
case COLLIMATION:
case HORIZONTAL:
case RECTANGULAR:
case VERTICAL:
{
alt13=5;
}
break;
case FILE:
{
alt13=6;
}
break;
case PIXELSIZE:
{
alt13=7;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 13, 0, input);
throw nvae;
}
switch (alt13) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:449:4: TYPE a= STRING
{
match(input,TYPE,FOLLOW_TYPE_in_beamLine4582);
a=(Token)match(input,STRING,FOLLOW_STRING_in_beamLine4586);
(beam_stack.peek()).beamType = (a!=null?a.getText():null);
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:450:4: b= beamFlux
{
pushFollow(FOLLOW_beamFlux_in_beamLine4604);
b=beamFlux();
state._fsp
(beam_stack.peek()).beamProperties.put(Beam.BEAM_FLUX, b);
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:451:4: c= beamFWHM
{
pushFollow(FOLLOW_beamFWHM_in_beamLine4616);
c=beamFWHM();
state._fsp
(beam_stack.peek()).beamProperties.put(Beam.BEAM_FWHM_X, (c!=null?c.x:null));
(beam_stack.peek()).beamProperties.put(Beam.BEAM_FWHM_Y, (c!=null?c.y:null));
}
break;
case 4 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:453:4: d= beamEnergy
{
pushFollow(FOLLOW_beamEnergy_in_beamLine4628);
d=beamEnergy();
state._fsp
(beam_stack.peek()).beamProperties.put(Beam.BEAM_ENERGY, d);
}
break;
case 5 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:454:4: e= beamCollimation
{
pushFollow(FOLLOW_beamCollimation_in_beamLine4640);
e=beamCollimation();
state._fsp
if (e != null) {
(beam_stack.peek()).beamProperties.putAll(e);
}
}
break;
case 6 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:457:4: f= beamFile
{
pushFollow(FOLLOW_beamFile_in_beamLine4651);
f=beamFile();
state._fsp
(beam_stack.peek()).beamProperties.put(Beam.BEAM_EXTFILE, f);
}
break;
case 7 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:458:4: g= beamPixelSize
{
pushFollow(FOLLOW_beamPixelSize_in_beamLine4672);
g=beamPixelSize();
state._fsp
(beam_stack.peek()).beamProperties.putAll(g);
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return ;
}
// $ANTLR end "beamLine"
// $ANTLR start "beamFlux"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:461:1: beamFlux returns [Double flux] : FLUX a= FLOAT ;
public final Double beamFlux() throws RecognitionException {
Double flux = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:462:2: ( FLUX a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:462:4: FLUX a= FLOAT
{
match(input,FLUX,FOLLOW_FLUX_in_beamFlux4696);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamFlux4700);
flux = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return flux;
}
// $ANTLR end "beamFlux"
public static class beamFWHM_return extends ParserRuleReturnScope {
public Double x;
public Double y;
};
// $ANTLR start "beamFWHM"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:465:1: beamFWHM returns [Double x, Double y] : FWHM a= FLOAT b= FLOAT ;
public final InputfileParser.beamFWHM_return beamFWHM() throws RecognitionException {
InputfileParser.beamFWHM_return retval = new InputfileParser.beamFWHM_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:466:2: ( FWHM a= FLOAT b= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:466:4: FWHM a= FLOAT b= FLOAT
{
match(input,FWHM,FOLLOW_FWHM_in_beamFWHM4742);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamFWHM4746);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamFWHM4750);
retval.x = Double.parseDouble((a!=null?a.getText():null)); retval.y = Double.parseDouble((b!=null?b.getText():null));
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "beamFWHM"
// $ANTLR start "beamEnergy"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:469:1: beamEnergy returns [Double energy] : ENERGY a= FLOAT ( KEV )? ;
public final Double beamEnergy() throws RecognitionException {
Double energy = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:470:2: ( ENERGY a= FLOAT ( KEV )? )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:470:4: ENERGY a= FLOAT ( KEV )?
{
match(input,ENERGY,FOLLOW_ENERGY_in_beamEnergy4792);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamEnergy4796);
energy = Double.parseDouble((a!=null?a.getText():null));
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:471:2: ( KEV )?
int alt14=2;
int LA14_0 = input.LA(1);
if ( (LA14_0==KEV) ) {
alt14=1;
}
switch (alt14) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:471:4: KEV
{
match(input,KEV,FOLLOW_KEV_in_beamEnergy4803);
}
break;
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return energy;
}
// $ANTLR end "beamEnergy"
// $ANTLR start "beamFile"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:476:1: beamFile returns [String filename] : FILE a= STRING ;
public final String beamFile() throws RecognitionException {
String filename = null;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:477:2: ( FILE a= STRING )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:477:4: FILE a= STRING
{
match(input,FILE,FOLLOW_FILE_in_beamFile4881);
a=(Token)match(input,STRING,FOLLOW_STRING_in_beamFile4885);
filename = (a!=null?a.getText():null);
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return filename;
}
// $ANTLR end "beamFile"
// $ANTLR start "beamPixelSize"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:481:1: beamPixelSize returns [Map<Object, Object> properties] : PIXELSIZE a= FLOAT b= FLOAT ;
public final Map<Object, Object> beamPixelSize() throws RecognitionException {
Map<Object, Object> properties = null;
Token a=null;
Token b=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:482:5: ( PIXELSIZE a= FLOAT b= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:482:7: PIXELSIZE a= FLOAT b= FLOAT
{
match(input,PIXELSIZE,FOLLOW_PIXELSIZE_in_beamPixelSize4932);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamPixelSize4936);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamPixelSize4940);
properties = new HashMap<Object, Object>();
properties.put(Beam.BEAM_PIXSIZE_X, Double.parseDouble((a!=null?a.getText():null)));
properties.put(Beam.BEAM_PIXSIZE_Y, Double.parseDouble((b!=null?b.getText():null)));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return properties;
}
// $ANTLR end "beamPixelSize"
// $ANTLR start "beamCollimation"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:489:1: beamCollimation returns [Map<Object, Object> properties] : ( COLLIMATION | RECTANGULAR a= FLOAT b= FLOAT | CIRCULAR FLOAT | HORIZONTAL d= FLOAT | VERTICAL e= FLOAT );
public final Map<Object, Object> beamCollimation() throws RecognitionException {
Map<Object, Object> properties = null;
Token a=null;
Token b=null;
Token d=null;
Token e=null;
properties = new HashMap<Object, Object>();
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:493:2: ( COLLIMATION | RECTANGULAR a= FLOAT b= FLOAT | CIRCULAR FLOAT | HORIZONTAL d= FLOAT | VERTICAL e= FLOAT )
int alt15=5;
switch ( input.LA(1) ) {
case COLLIMATION:
{
alt15=1;
}
break;
case RECTANGULAR:
{
alt15=2;
}
break;
case CIRCULAR:
{
alt15=3;
}
break;
case HORIZONTAL:
{
alt15=4;
}
break;
case VERTICAL:
{
alt15=5;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 15, 0, input);
throw nvae;
}
switch (alt15) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:493:4: COLLIMATION
{
match(input,COLLIMATION,FOLLOW_COLLIMATION_in_beamCollimation5019);
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:494:4: RECTANGULAR a= FLOAT b= FLOAT
{
match(input,RECTANGULAR,FOLLOW_RECTANGULAR_in_beamCollimation5025);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamCollimation5029);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamCollimation5033);
properties.put(Beam.BEAM_COLL_H, Double.parseDouble((a!=null?a.getText():null)));
properties.put(Beam.BEAM_COLL_V, Double.parseDouble((b!=null?b.getText():null)));
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:496:4: CIRCULAR FLOAT
{
match(input,CIRCULAR,FOLLOW_CIRCULAR_in_beamCollimation5040);
match(input,FLOAT,FOLLOW_FLOAT_in_beamCollimation5042);
}
break;
case 4 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:497:4: HORIZONTAL d= FLOAT
{
match(input,HORIZONTAL,FOLLOW_HORIZONTAL_in_beamCollimation5048);
d=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamCollimation5052);
properties.put(Beam.BEAM_COLL_H, Double.parseDouble((d!=null?d.getText():null)));
}
break;
case 5 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:498:4: VERTICAL e= FLOAT
{
match(input,VERTICAL,FOLLOW_VERTICAL_in_beamCollimation5059);
e=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_beamCollimation5063);
properties.put(Beam.BEAM_COLL_V, Double.parseDouble((e!=null?e.getText():null)));
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return properties;
}
// $ANTLR end "beamCollimation"
protected static class wedge_scope {
Double angRes;
Double startAng;
Double endAng;
Double expTime;
Double offsetX;
Double offsetY;
Double offsetZ;
Double translateX;
Double translateY;
Double translateZ;
Double rotationOffset;
}
protected Stack<InputfileParser.wedge_scope> wedge_stack = new Stack<>();
// $ANTLR start "wedge"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:508:1: wedge returns [Wedge wObj] : WEDGE a= FLOAT b= FLOAT ( wedgeLine )+ ;
public final Wedge wedge() throws RecognitionException {
wedge_stack.push(new wedge_scope());
Wedge wObj = null;
Token a=null;
Token b=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:527:2: ( WEDGE a= FLOAT b= FLOAT ( wedgeLine )+ )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:527:4: WEDGE a= FLOAT b= FLOAT ( wedgeLine )+
{
match(input,WEDGE,FOLLOW_WEDGE_in_wedge5376);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedge5380);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedge5384);
(wedge_stack.peek()).startAng = Double.parseDouble((a!=null?a.getText():null));
(wedge_stack.peek()).endAng = Double.parseDouble((b!=null?b.getText():null));
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:529:4: ( wedgeLine )+
int cnt16=0;
loop16:
do {
int alt16=2;
int LA16_0 = input.LA(1);
if ( (LA16_0==ANGULARRESOLUTION||LA16_0==EXPOSURETIME||LA16_0==ROTAXBEAMOFFSET||LA16_0==STARTOFFSET||LA16_0==TRANSLATEPERDEGREE) ) {
alt16=1;
}
switch (alt16) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:529:4: wedgeLine
{
pushFollow(FOLLOW_wedgeLine_in_wedge5391);
wedgeLine();
state._fsp
}
break;
default :
if ( cnt16 >= 1 ) break loop16;
EarlyExitException eee =
new EarlyExitException(16, input);
throw eee;
}
cnt16++;
} while (true);
}
wObj = new Wedge((wedge_stack.peek()).angRes, (wedge_stack.peek()).startAng, (wedge_stack.peek()).endAng, (wedge_stack.peek()).expTime, (wedge_stack.peek()).offsetX, (wedge_stack.peek()).offsetY, (wedge_stack.peek()).offsetZ, (wedge_stack.peek()).translateX, (wedge_stack.peek()).translateY, (wedge_stack.peek()).translateZ, (wedge_stack.peek()).rotationOffset);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
wedge_stack.pop();
}
return wObj;
}
// $ANTLR end "wedge"
// $ANTLR start "wedgeLine"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:532:1: wedgeLine : (a= wedgeExposure |b= wedgeAngRes |c= wedgeStartOffset |d= wedgeTranslate |e= wedgeRotAxBeamOffset );
public final void wedgeLine() throws RecognitionException {
double a =0.0;
double b =0.0;
InputfileParser.wedgeStartOffset_return c =null;
InputfileParser.wedgeTranslate_return d =null;
double e =0.0;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:533:2: (a= wedgeExposure |b= wedgeAngRes |c= wedgeStartOffset |d= wedgeTranslate |e= wedgeRotAxBeamOffset )
int alt17=5;
switch ( input.LA(1) ) {
case EXPOSURETIME:
{
alt17=1;
}
break;
case ANGULARRESOLUTION:
{
alt17=2;
}
break;
case STARTOFFSET:
{
alt17=3;
}
break;
case TRANSLATEPERDEGREE:
{
alt17=4;
}
break;
case ROTAXBEAMOFFSET:
{
alt17=5;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 17, 0, input);
throw nvae;
}
switch (alt17) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:533:4: a= wedgeExposure
{
pushFollow(FOLLOW_wedgeExposure_in_wedgeLine5435);
a=wedgeExposure();
state._fsp
(wedge_stack.peek()).expTime =a;
}
break;
case 2 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:534:4: b= wedgeAngRes
{
pushFollow(FOLLOW_wedgeAngRes_in_wedgeLine5445);
b=wedgeAngRes();
state._fsp
(wedge_stack.peek()).angRes =b;
}
break;
case 3 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:535:4: c= wedgeStartOffset
{
pushFollow(FOLLOW_wedgeStartOffset_in_wedgeLine5456);
c=wedgeStartOffset();
state._fsp
(wedge_stack.peek()).offsetX =(c!=null?c.x:null);
(wedge_stack.peek()).offsetY =(c!=null?c.y:null);
(wedge_stack.peek()).offsetZ =(c!=null?c.z:null);
}
break;
case 4 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:538:4: d= wedgeTranslate
{
pushFollow(FOLLOW_wedgeTranslate_in_wedgeLine5466);
d=wedgeTranslate();
state._fsp
(wedge_stack.peek()).translateX =(d!=null?d.x:null);
(wedge_stack.peek()).translateY =(d!=null?d.y:null);
(wedge_stack.peek()).translateZ =(d!=null?d.z:null);
}
break;
case 5 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:541:4: e= wedgeRotAxBeamOffset
{
pushFollow(FOLLOW_wedgeRotAxBeamOffset_in_wedgeLine5476);
e=wedgeRotAxBeamOffset();
state._fsp
(wedge_stack.peek()).rotationOffset =e;
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return ;
}
// $ANTLR end "wedgeLine"
// $ANTLR start "wedgeExposure"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:544:1: wedgeExposure returns [double value] : EXPOSURETIME a= FLOAT ;
public final double wedgeExposure() throws RecognitionException {
double value = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:545:2: ( EXPOSURETIME a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:545:4: EXPOSURETIME a= FLOAT
{
match(input,EXPOSURETIME,FOLLOW_EXPOSURETIME_in_wedgeExposure5493);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeExposure5497);
value = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return value;
}
// $ANTLR end "wedgeExposure"
// $ANTLR start "wedgeAngRes"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:548:1: wedgeAngRes returns [double res] : ANGULARRESOLUTION a= FLOAT ;
public final double wedgeAngRes() throws RecognitionException {
double res = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:549:2: ( ANGULARRESOLUTION a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:549:4: ANGULARRESOLUTION a= FLOAT
{
match(input,ANGULARRESOLUTION,FOLLOW_ANGULARRESOLUTION_in_wedgeAngRes5579);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeAngRes5583);
res = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return res;
}
// $ANTLR end "wedgeAngRes"
public static class wedgeStartOffset_return extends ParserRuleReturnScope {
public Double x;
public Double y;
public Double z;
};
// $ANTLR start "wedgeStartOffset"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:552:1: wedgeStartOffset returns [Double x, Double y, Double z] : STARTOFFSET a= FLOAT b= FLOAT (c= FLOAT )? ;
public final InputfileParser.wedgeStartOffset_return wedgeStartOffset() throws RecognitionException {
InputfileParser.wedgeStartOffset_return retval = new InputfileParser.wedgeStartOffset_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
Token c=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:553:2: ( STARTOFFSET a= FLOAT b= FLOAT (c= FLOAT )? )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:553:4: STARTOFFSET a= FLOAT b= FLOAT (c= FLOAT )?
{
match(input,STARTOFFSET,FOLLOW_STARTOFFSET_in_wedgeStartOffset5690);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeStartOffset5694);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeStartOffset5698);
retval.x = Double.parseDouble((a!=null?a.getText():null)); retval.y = Double.parseDouble((b!=null?b.getText():null));
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:554:17: (c= FLOAT )?
int alt18=2;
int LA18_0 = input.LA(1);
if ( (LA18_0==FLOAT) ) {
alt18=1;
}
switch (alt18) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:554:17: c= FLOAT
{
c=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeStartOffset5719);
}
break;
}
retval.z = ((c!=null?c.getText():null) == null) ? null : Double.parseDouble((c!=null?c.getText():null));
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "wedgeStartOffset"
public static class wedgeTranslate_return extends ParserRuleReturnScope {
public Double x;
public Double y;
public Double z;
};
// $ANTLR start "wedgeTranslate"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:558:1: wedgeTranslate returns [Double x, Double y, Double z] : TRANSLATEPERDEGREE a= FLOAT b= FLOAT (c= FLOAT )? ;
public final InputfileParser.wedgeTranslate_return wedgeTranslate() throws RecognitionException {
InputfileParser.wedgeTranslate_return retval = new InputfileParser.wedgeTranslate_return();
retval.start = input.LT(1);
Token a=null;
Token b=null;
Token c=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:559:2: ( TRANSLATEPERDEGREE a= FLOAT b= FLOAT (c= FLOAT )? )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:559:4: TRANSLATEPERDEGREE a= FLOAT b= FLOAT (c= FLOAT )?
{
match(input,TRANSLATEPERDEGREE,FOLLOW_TRANSLATEPERDEGREE_in_wedgeTranslate5813);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeTranslate5817);
b=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeTranslate5821);
retval.x = Double.parseDouble((a!=null?a.getText():null)); retval.y = Double.parseDouble((b!=null?b.getText():null));
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:560:24: (c= FLOAT )?
int alt19=2;
int LA19_0 = input.LA(1);
if ( (LA19_0==FLOAT) ) {
alt19=1;
}
switch (alt19) {
case 1 :
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:560:24: c= FLOAT
{
c=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeTranslate5849);
}
break;
}
retval.z = ((c!=null?c.getText():null) == null) ? null : Double.parseDouble((c!=null?c.getText():null));
}
retval.stop = input.LT(-1);
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "wedgeTranslate"
// $ANTLR start "wedgeRotAxBeamOffset"
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:564:1: wedgeRotAxBeamOffset returns [double delta] : ROTAXBEAMOFFSET a= FLOAT ;
public final double wedgeRotAxBeamOffset() throws RecognitionException {
double delta = 0.0;
Token a=null;
try {
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:565:2: ( ROTAXBEAMOFFSET a= FLOAT )
// C:\\Users\\Josh\\git\\RADDOSE-3D\\lib\\antlrworks-parsergenerator\\Inputfile.g:565:4: ROTAXBEAMOFFSET a= FLOAT
{
match(input,ROTAXBEAMOFFSET,FOLLOW_ROTAXBEAMOFFSET_in_wedgeRotAxBeamOffset5985);
a=(Token)match(input,FLOAT,FOLLOW_FLOAT_in_wedgeRotAxBeamOffset5989);
delta = Double.parseDouble((a!=null?a.getText():null));
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
}
return delta;
}
// $ANTLR end "wedgeRotAxBeamOffset"
// Delegated rules
public static final BitSet FOLLOW_crystal_in_configfile47 = new BitSet(new long[]{0x0000000000100200L,0x0000000000002000L});
public static final BitSet FOLLOW_wedge_in_configfile65 = new BitSet(new long[]{0x0000000000100200L,0x0000000000002000L});
public static final BitSet FOLLOW_beam_in_configfile85 = new BitSet(new long[]{0x0000000000100200L,0x0000000000002000L});
public static final BitSet FOLLOW_EOF_in_configfile105 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CRYSTAL_in_crystal134 = new BitSet(new long[]{0x07B7AE04036F8C70L,0x0000000000004C6AL});
public static final BitSet FOLLOW_crystalLine_in_crystal136 = new BitSet(new long[]{0x07B7AE04036F8C72L,0x0000000000004C6AL});
public static final BitSet FOLLOW_crystalType_in_crystalLine192 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalDDM_in_crystalLine203 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalCoefcalc_in_crystalLine215 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalDim_in_crystalLine225 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalPPM_in_crystalLine236 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalAngP_in_crystalLine247 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalAngL_in_crystalLine258 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalDecayParam_in_crystalLine269 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_containerThickness_in_crystalLine279 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_containerDensity_in_crystalLine289 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_crystalContainerMaterial_in_crystalLine299 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_containerMaterialMixture_in_crystalLine308 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_unitcell_in_crystalLine317 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_nummonomers_in_crystalLine328 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_numresidues_in_crystalLine339 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_numRNA_in_crystalLine350 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_numDNA_in_crystalLine363 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_heavyProteinAtoms_in_crystalLine376 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_heavySolutionConc_in_crystalLine385 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_solventFraction_in_crystalLine394 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_pdb_in_crystalLine404 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_wireframeType_in_crystalLine417 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_modelFile_in_crystalLine428 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_calculatePEEscape_in_crystalLine440 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_proteinConcentration_in_crystalLine450 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_containerMaterialElements_in_crystalLine459 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_sequenceFile_in_crystalLine468 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_calculateFLEscape_in_crystalLine481 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_flResolution_in_crystalLine491 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_peResolution_in_crystalLine502 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TYPE_in_crystalType523 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_crystalType527 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_set_in_crystalDDM569 = new BitSet(new long[]{0x0000018000000000L,0x0000000000000010L});
public static final BitSet FOLLOW_crystalDDMKeyword_in_crystalDDM581 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_SIMPLE_in_crystalDDMKeyword729 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_LINEAR_in_crystalDDMKeyword736 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_LEAL_in_crystalDDMKeyword743 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_DECAYPARAM_in_crystalDecayParam863 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDecayParam867 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDecayParam871 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDecayParam875 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ABSCOEFCALC_in_crystalCoefcalc947 = new BitSet(new long[]{0x9808000004800100L,0x0000000000000005L});
public static final BitSet FOLLOW_crystalCoefcalcKeyword_in_crystalCoefcalc951 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_DUMMY_in_crystalCoefcalcKeyword1030 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_AVERAGE_in_crystalCoefcalcKeyword1040 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_DEFAULT_in_crystalCoefcalcKeyword1048 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RDJAVA_in_crystalCoefcalcKeyword1056 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RDFORTAN_in_crystalCoefcalcKeyword1063 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PDB_in_crystalCoefcalcKeyword1070 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_SAXS_in_crystalCoefcalcKeyword1080 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_SEQUENCE_in_crystalCoefcalcKeyword1088 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_SAXSSEQ_in_crystalCoefcalcKeyword1095 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_DIMENSION_in_crystalDim1419 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDim1432 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDim1436 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDim1440 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FLOAT_in_crystalDim1452 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalDim1456 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FLOAT_in_crystalDim1468 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ANGLEP_in_crystalAngP1545 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalAngP1549 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ANGLEL_in_crystalAngL1604 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalAngL1608 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PIXELSPERMICRON_in_crystalPPM1662 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_crystalPPM1664 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_UNITCELL_in_unitcell1762 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_unitcell1766 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_unitcell1770 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_unitcell1774 = new BitSet(new long[]{0x0000000200000002L});
public static final BitSet FOLLOW_FLOAT_in_unitcell1789 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_unitcell1793 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_unitcell1797 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_set_in_proteinConcentration1875 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_proteinConcentration1885 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NUMMONOMERS_in_nummonomers2067 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_nummonomers2071 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NUMRESIDUES_in_numresidues2148 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_numresidues2152 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NUMRNA_in_numRNA2230 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_numRNA2234 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NUMDNA_in_numDNA2287 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_numDNA2291 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROTEINHEAVYATOMS_in_heavyProteinAtoms2347 = new BitSet(new long[]{0x0000000008000000L});
public static final BitSet FOLLOW_ELEMENT_in_heavyProteinAtoms2352 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_heavyProteinAtoms2356 = new BitSet(new long[]{0x0000000008000002L});
public static final BitSet FOLLOW_SOLVENTHEAVYCONC_in_heavySolutionConc2503 = new BitSet(new long[]{0x0000000008000000L});
public static final BitSet FOLLOW_ELEMENT_in_heavySolutionConc2508 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_heavySolutionConc2512 = new BitSet(new long[]{0x0000000008000002L});
public static final BitSet FOLLOW_SOLVENTFRACTION_in_solventFraction2618 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_solventFraction2622 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PDBNAME_in_pdb2719 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_pdb2723 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_WIREFRAMETYPE_in_wireframeType2760 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_wireframeType2764 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_MODELFILE_in_modelFile2852 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_modelFile2856 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CALCULATEPEESCAPE_in_calculatePEEscape2923 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_calculatePEEscape2927 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_set_in_crystalContainerMaterial3039 = new BitSet(new long[]{0x0000500010000000L});
public static final BitSet FOLLOW_crystalContainerKeyword_in_crystalContainerMaterial3051 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NONE_in_crystalContainerKeyword3244 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_MIXTURE_in_crystalContainerKeyword3253 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ELEMENTAL_in_crystalContainerKeyword3261 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CONTAINERTHICKNESS_in_containerThickness3401 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_containerThickness3405 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_set_in_containerMaterialMixture3516 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_containerMaterialMixture3526 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_set_in_containerMaterialElements3751 = new BitSet(new long[]{0x0000000008000000L});
public static final BitSet FOLLOW_ELEMENT_in_containerMaterialElements3762 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_containerMaterialElements3766 = new BitSet(new long[]{0x0000000008000002L});
public static final BitSet FOLLOW_CONTAINERDENSITY_in_containerDensity4001 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_containerDensity4005 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_set_in_sequenceFile4106 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_sequenceFile4116 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CALCULATEFLESCAPE_in_calculateFLEscape4239 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_calculateFLEscape4243 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FLRESOLUTION_in_flResolution4355 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_flResolution4359 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PERESOLUTION_in_peResolution4441 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_peResolution4445 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_BEAM_in_beam4541 = new BitSet(new long[]{0x2040003920003000L,0x0000000000001400L});
public static final BitSet FOLLOW_beamLine_in_beam4543 = new BitSet(new long[]{0x2040003920003002L,0x0000000000001400L});
public static final BitSet FOLLOW_TYPE_in_beamLine4582 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_beamLine4586 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_beamFlux_in_beamLine4604 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_beamFWHM_in_beamLine4616 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_beamEnergy_in_beamLine4628 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_beamCollimation_in_beamLine4640 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_beamFile_in_beamLine4651 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_beamPixelSize_in_beamLine4672 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FLUX_in_beamFlux4696 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamFlux4700 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FWHM_in_beamFWHM4742 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamFWHM4746 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamFWHM4750 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ENERGY_in_beamEnergy4792 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamEnergy4796 = new BitSet(new long[]{0x0000004000000002L});
public static final BitSet FOLLOW_KEV_in_beamEnergy4803 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_FILE_in_beamFile4881 = new BitSet(new long[]{0x0000000000000000L,0x0000000000000100L});
public static final BitSet FOLLOW_STRING_in_beamFile4885 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PIXELSIZE_in_beamPixelSize4932 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamPixelSize4936 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamPixelSize4940 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_COLLIMATION_in_beamCollimation5019 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_RECTANGULAR_in_beamCollimation5025 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamCollimation5029 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamCollimation5033 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_CIRCULAR_in_beamCollimation5040 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamCollimation5042 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_HORIZONTAL_in_beamCollimation5048 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamCollimation5052 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_VERTICAL_in_beamCollimation5059 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_beamCollimation5063 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_WEDGE_in_wedge5376 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedge5380 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedge5384 = new BitSet(new long[]{0x4000000080000080L,0x0000000000000280L});
public static final BitSet FOLLOW_wedgeLine_in_wedge5391 = new BitSet(new long[]{0x4000000080000082L,0x0000000000000280L});
public static final BitSet FOLLOW_wedgeExposure_in_wedgeLine5435 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_wedgeAngRes_in_wedgeLine5445 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_wedgeStartOffset_in_wedgeLine5456 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_wedgeTranslate_in_wedgeLine5466 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_wedgeRotAxBeamOffset_in_wedgeLine5476 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_EXPOSURETIME_in_wedgeExposure5493 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeExposure5497 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ANGULARRESOLUTION_in_wedgeAngRes5579 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeAngRes5583 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_STARTOFFSET_in_wedgeStartOffset5690 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeStartOffset5694 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeStartOffset5698 = new BitSet(new long[]{0x0000000200000002L});
public static final BitSet FOLLOW_FLOAT_in_wedgeStartOffset5719 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_TRANSLATEPERDEGREE_in_wedgeTranslate5813 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeTranslate5817 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeTranslate5821 = new BitSet(new long[]{0x0000000200000002L});
public static final BitSet FOLLOW_FLOAT_in_wedgeTranslate5849 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_ROTAXBEAMOFFSET_in_wedgeRotAxBeamOffset5985 = new BitSet(new long[]{0x0000000200000000L});
public static final BitSet FOLLOW_FLOAT_in_wedgeRotAxBeamOffset5989 = new BitSet(new long[]{0x0000000000000002L});
}
|
package game;
import java.util.ArrayList;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import javafx.scene.control.Button;
import javafx.scene.layout.GridPane;
import models.TileColor;
public class GridDiamond implements Grid {
// Nombre de lignes de la grille
private int nbOfLines;
// Grille du jeu
private ArrayList<ArrayList<Tile>> grid = new ArrayList<ArrayList<Tile>>();
public String[][] cornersCoordinates = {
{"min", "min"}, // En haut - Joueur 1
{"max", "min"}, // En bas - Joueur 2
{"min", "max"}, // A droite - Joueur 3
{"max", "min"} // A gauche - Joueur 4
};
public GridDiamond() {
this(13);
}
public GridDiamond(int s) {
nbOfLines = ((s & 1) == 0) ? (s + 1) : s;
}
public void initRandom() {
for(int i = 0; i < nbOfLines; i++) {
ArrayList<Tile> line = new ArrayList<Tile>();
int numberOfTilesForLine;
if (i < nbOfLines / 2) {
numberOfTilesForLine = i + 1;
} else {
numberOfTilesForLine = -i + nbOfLines;
}
for(int j = 0; j < numberOfTilesForLine; j++) {
TileColor randomColor = TileColor.getRandomColor();
line.add(j, new Tile(randomColor));
}
grid.add(i, line);
}
}
public void initWithSave(JSONArray colorGrid, JSONArray playerGrid) {
for(int i = 0; i < nbOfLines; i++) {
JSONArray colorLine = (JSONArray) colorGrid.get(i);
JSONArray playerLine = (JSONArray) playerGrid.get(i);
for(int j = 0; j < nbOfLines; j++) {
String colorCode = (String) colorLine.get(j);
TileColor randomColor = TileColor.getColorFromCode(colorCode);
int pID = ((Long) playerLine.get(j)).intValue();
grid.get(i).get(j).setColor(randomColor);
grid.get(i).get(j).setPlayerID(pID);
}
}
}
public Tile getTile(int x, int y) {
return grid.get(x).get(y);
}
public void assignTiles(int pID, TileColor c) {
int newAssignedTiles = -1;
while (newAssignedTiles != 0) {
newAssignedTiles = 0;
for (int i = 0; i < nbOfLines; i++) {
ArrayList<Tile> line = grid.get(i);
for (int j = 0; j < line.size(); j++) {
Tile tile = line.get(j);
if (tile.getPlayerID() == pID) {
// La case appartient au joueur
// On la met de la nouvelle couleur choisie par le joueur
tile.setColor(c);
// Case au dessus
if (i > 0 && ((i > nbOfLines/2 + 1) || (j < i))) {
if (
// La case est de la couleur voulue
grid.get(i - 1).get(j).getColor() == c
&& grid.get(i - 1).get(j).getPlayerID() != pID
) {
grid.get(i - 1).get(j).setPlayerID(pID);
newAssignedTiles++;
}
}
// Case en dessous
if (i < nbOfLines - 1 && ((i < nbOfLines/2 - 1) || (j > i))) {
if (
// La case est de la couleur voulue
grid.get(i + 1).get(j).getColor() == c
&& grid.get(i + 1).get(j).getPlayerID() != pID
) {
grid.get(i + 1).get(j).setPlayerID(pID);
newAssignedTiles++;
}
}
if (j > 0) {
if (
// La case est de la couleur voulue
grid.get(i).get(j - 1).getColor() == c
&& grid.get(i).get(j - 1).getPlayerID() != pID
) {
grid.get(i).get(j - 1).setPlayerID(pID);
newAssignedTiles++;
}
}
if (j < line.size() - 1) {
if (
// La case est de la couleur voulue
grid.get(i).get(j + 1).getColor() == c
&& grid.get(i).get(j + 1).getPlayerID() != pID
) {
grid.get(i).get(j + 1).setPlayerID(pID);
newAssignedTiles++;
}
}
}
}
}
}
}
public void assignTile(int x, int y, int pID) {
Tile tile = grid.get(x).get(y);
tile.setPlayerID(pID);
}
/**
* Permet d'obtenir la taille de la grille
*
* @return La taille de la grille
*/
public int getSize() {
return nbOfLines;
}
public String[] getCornerCoordinate(int player) {
return cornersCoordinates[player];
}
public int countTilesOwnedBy(int pID) {
int count = 0;
for (int i = 0; i < nbOfLines; i++) {
ArrayList<Tile> line = grid.get(i);
for (int j = 0; j < line.size(); j++) {
Tile tile = line.get(j);
if (tile.getPlayerID() == pID) {
// La case est libre
count++;
}
}
}
return count;
}
/**
* Permet d'afficher la grille en mode 2D
*/
public GridPane show2D(Game game) {
GridPane gameGrid = new GridPane();
for (int i = 0; i < nbOfLines; i++) {
ArrayList<Tile> line = grid.get(i);
for (int j = 0; j < line.size(); j++) {
Tile tile = line.get(j);
int pID = tile.getPlayerID();
Button button = new Button();
button.getStyleClass().add("tile");
button.getStyleClass().add(TileColor.getColorClassName(tile.getColor()));
if (pID != 0) {
button.setText(Integer.toString(pID));
}
button.setOnAction(event -> {
game.chooseTile(tile);
});
gameGrid.add(button, j, i);
}
}
return gameGrid;
}
public JSONObject exportToJSON() {
JSONObject jsonObject = new JSONObject();
JSONArray colorGrid = new JSONArray();
for (int i = 0; i < nbOfLines; i++) {
JSONArray lineArray = new JSONArray();
for (int j = 0; j < nbOfLines; j++) {
TileColor color = grid.get(i).get(j).getColor();
lineArray.add(TileColor.getColorCode(color));
}
colorGrid.add(lineArray);
}
jsonObject.put("colorGrid", colorGrid);
JSONArray playerGrid = new JSONArray();
for (int i = 0; i < nbOfLines; i++) {
JSONArray lineArray = new JSONArray();
for (int j = 0; j < nbOfLines; j++) {
int pID = grid.get(i).get(j).getPlayerID();
lineArray.add(pID);
}
playerGrid.add(lineArray);
}
jsonObject.put("playerGrid", playerGrid);
return jsonObject;
}
}
|
package VASSAL.build.module.map.boardPicker;
import java.awt.AlphaComposite;
import java.awt.Color;
import java.awt.Component;
import java.awt.Composite;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.swing.AbstractAction;
import javax.swing.ImageIcon;
import javax.swing.Timer;
import org.jdesktop.animation.timing.Animator;
import org.jdesktop.animation.timing.TimingTargetAdapter;
import VASSAL.build.AbstractConfigurable;
import VASSAL.build.BadDataReport;
import VASSAL.build.Buildable;
import VASSAL.build.Builder;
import VASSAL.build.GameModule;
import VASSAL.build.module.GameComponent;
import VASSAL.build.module.Map;
import VASSAL.build.module.documentation.HelpFile;
import VASSAL.build.module.map.boardPicker.board.HexGrid;
import VASSAL.build.module.map.boardPicker.board.MapGrid;
import VASSAL.build.module.map.boardPicker.board.RegionGrid;
import VASSAL.build.module.map.boardPicker.board.SquareGrid;
import VASSAL.build.module.map.boardPicker.board.ZonedGrid;
import VASSAL.build.module.map.boardPicker.board.mapgrid.GridContainer;
import VASSAL.command.Command;
import VASSAL.configure.ColorConfigurer;
import VASSAL.configure.SingleChildInstance;
import VASSAL.configure.VisibilityCondition;
import VASSAL.tools.ErrorDialog;
import VASSAL.tools.ErrorUtils;
import VASSAL.tools.imageop.ImageOp;
import VASSAL.tools.imageop.MissingImageException;
import VASSAL.tools.imageop.Op;
import VASSAL.tools.imageop.Repainter;
import VASSAL.tools.imageop.ScaleOp;
import VASSAL.tools.imageop.SourceOp;
public class Board extends AbstractConfigurable implements GridContainer {
/**
* A Board is a piece of a Map.
* A Map can cantain a set of boards layed out in a rectangular grid.
*/
public static final String NAME = "name";
public static final String IMAGE = "image";
public static final String WIDTH = "width";
public static final String HEIGHT = "height";
public static final String COLOR = "color";
public static final String REVERSIBLE = "reversible";
protected Point pos = new Point(0, 0);
protected Rectangle boundaries = new Rectangle(0, 0, 500, 500);
protected String imageFile;
protected boolean reversible = false;
protected boolean reversed = false;
protected boolean fixedBoundaries = false;
protected Color color = null;
protected MapGrid grid = null;
protected Map map;
protected double magnification = 1.0;
@Deprecated protected String boardName = "Board 1";
@Deprecated protected Image boardImage;
protected SourceOp boardImageOp;
protected ScaleOp scaledImageOp;
public Board() {
}
/**
* @return this <code>Board</code>'s {@link Map}.
* Until a game is started that is using this board, the map will be null.
*/
public Map getMap() {
return map;
}
public void setMap(Map map) {
this.map = map;
}
public String getLocalizedName() {
final String s = getLocalizedConfigureName();
return s != null ? s : "";
}
public String getName() {
final String s = getConfigureName();
return s != null ? s : "";
}
public void addTo(Buildable b) {
validator = new SingleChildInstance(this, MapGrid.class);
}
public void removeFrom(Buildable b) {
}
public String[] getAttributeNames() {
return new String[] {
NAME,
IMAGE,
REVERSIBLE,
WIDTH,
HEIGHT,
COLOR
};
}
public String[] getAttributeDescriptions() {
return new String[] {
"Board name: ",
"Board image: ",
"Reversible: ",
"Board width: ",
"Board height: ",
"Background color: "
};
}
public static String getConfigureTypeName() {
return "Board";
}
public Class<?>[] getAttributeTypes() {
return new Class<?>[] {
String.class,
Image.class,
Boolean.class,
Integer.class,
Integer.class,
Color.class
};
}
public VisibilityCondition getAttributeVisibility(String name) {
if (REVERSIBLE.equals(name)) {
return new VisibilityCondition() {
public boolean shouldBeVisible() {
return imageFile != null;
}
};
}
else if (WIDTH.equals(name) || HEIGHT.equals(name) || COLOR.equals(name)) {
return new VisibilityCondition() {
public boolean shouldBeVisible() {
return imageFile == null;
}
};
}
else {
return null;
}
}
public String getAttributeValueString(String key) {
if (NAME.equals(key)) {
return getConfigureName();
}
else if (IMAGE.equals(key)) {
return imageFile;
}
else if (WIDTH.equals(key)) {
return imageFile == null ? String.valueOf(boundaries.width) : null;
}
else if (HEIGHT.equals(key)) {
return imageFile == null ? String.valueOf(boundaries.height) : null;
}
else if (COLOR.equals(key)) {
return imageFile == null ? ColorConfigurer.colorToString(color) : null;
}
else if (REVERSIBLE.equals(key)) {
return String.valueOf(reversible);
}
return null;
}
public void setAttribute(String key, Object val) {
if (NAME.equals(key)) {
setConfigureName((String) val);
}
else if (IMAGE.equals(key)) {
if (val instanceof File) {
val = ((File) val).getName();
}
imageFile = (String) val;
boardImageOp = imageFile == null || imageFile.trim().length() == 0
? null : Op.loadLarge(imageFile);
}
else if (WIDTH.equals(key)) {
if (val instanceof String) {
val = new Integer((String) val);
}
if (val != null) {
boundaries.setSize(((Integer) val).intValue(), boundaries.height);
}
}
else if (HEIGHT.equals(key)) {
if (val instanceof String) {
val = new Integer((String) val);
}
if (val != null) {
boundaries.setSize(boundaries.width, ((Integer) val).intValue());
}
}
else if (COLOR.equals(key)) {
if (val instanceof String) {
val = ColorConfigurer.stringToColor((String) val);
}
color = (Color) val;
}
else if (REVERSIBLE.equals(key)) {
if (val instanceof String) {
val = Boolean.valueOf((String) val);
}
reversible = ((Boolean) val).booleanValue();
}
}
public Class[] getAllowableConfigureComponents() {
return new Class[] {
HexGrid.class,
SquareGrid.class,
RegionGrid.class,
ZonedGrid.class
};
}
public void draw(Graphics g, int x, int y, double zoom, Component obs) {
drawRegion(g,
new Point(x,y),
new Rectangle(x, y,
Math.round((float) zoom*boundaries.width),
Math.round((float) zoom*boundaries.height)),
zoom, obs);
}
private ConcurrentMap<Point,Future<Image>> requested =
new ConcurrentHashMap<Point,Future<Image>>();
private java.util.Map<Point,Float> alpha =
new ConcurrentHashMap<Point,Float>();
private static Comparator<Point> tileOrdering = new Comparator<Point>() {
public int compare(Point t1, Point t2) {
if (t1.y < t2.y) return -1;
if (t1.y > t2.y) return 1;
return t1.x - t2.x;
}
};
protected void drawTile(Graphics g, Future<Image> fim,
int tx, int ty, Component obs) {
try {
g.drawImage(fim.get(), tx, ty, obs);
}
catch (CancellationException e) {
// FIXME: bug until we permit cancellation
ErrorDialog.bug(e);
}
catch (InterruptedException e) {
ErrorDialog.bug(e);
}
catch (ExecutionException e) {
final MissingImageException mie =
ErrorUtils.getAncestorOfClass(MissingImageException.class, e);
if (mie != null) {
ErrorDialog.dataError(new BadDataReport(
"Image not found", mie.getFile().getAbsolutePath(), mie));
}
else {
ErrorDialog.bug(e);
}
}
}
public void drawRegion(final Graphics g,
final Point location,
Rectangle visibleRect,
double zoom,
final Component obs) {
zoom *= magnification;
final Rectangle bounds =
new Rectangle(location.x, location.y,
Math.round(boundaries.width * (float) zoom),
Math.round(boundaries.height * (float) zoom));
if (visibleRect.intersects(bounds)) {
visibleRect = visibleRect.intersection(bounds);
if (boardImageOp != null) {
final ImageOp op;
if (zoom == 1.0 && !reversed) {
op = boardImageOp;
}
else {
if (scaledImageOp == null || scaledImageOp.getScale() != zoom) {
scaledImageOp = Op.scale(boardImageOp, zoom);
}
op = reversed ? Op.rotate(scaledImageOp, 180) : scaledImageOp;
}
final Rectangle r = new Rectangle(visibleRect.x - location.x,
visibleRect.y - location.y,
visibleRect.width,
visibleRect.height);
final int ow = op.getTileWidth();
final int oh = op.getTileHeight();
final Point[] tiles = op.getTileIndices(r);
for (Point tile : tiles) {
// find tile position
final int tx = location.x + tile.x*ow;
final int ty = location.y + tile.y*oh;
// find actual tile size
final int tw = Math.min(ow, location.x+bounds.width-tx);
final int th = Math.min(oh, location.y+bounds.height-ty);
final Repainter rep = obs == null ? null :
new Repainter(obs, tx, ty, tw, th);
try {
final Future<Image> fim = op.getFutureTile(tile.x, tile.y, rep);
if (obs == null) {
drawTile(g, fim, tx, ty, obs);
}
else {
if (fim.isDone()) {
if (requested.containsKey(tile)) {
requested.remove(tile);
final Point t = tile;
final Animator a = new Animator(100,
new TimingTargetAdapter() {
@Override
public void timingEvent(float fraction) {
alpha.put(t, fraction);
obs.repaint(tx, ty, tw, th);
}
}
);
a.setResolution(20);
a.start();
}
else {
Float a = alpha.get(tile);
if (a != null && a < 1.0f) {
final Graphics2D g2d = (Graphics2D) g;
final Composite oldComp = g2d.getComposite();
g2d.setComposite(
AlphaComposite.getInstance(AlphaComposite.SRC_OVER, a));
drawTile(g2d, fim, tx, ty, obs);
g2d.setComposite(oldComp);
}
else {
alpha.remove(tile);
drawTile(g, fim, tx, ty, obs);
}
}
}
else {
requested.putIfAbsent(tile, fim);
}
}
}
// FIXME: should getTileFuture() throw these? Yes, probably, because it's
// synchronous when obs is null.
catch (CancellationException e) {
// FIXME: bug until we permit cancellation
ErrorDialog.bug(e);
}
catch (ExecutionException e) {
// FIXME: bug until we figure out why getTileFuture() throws this
ErrorDialog.bug(e);
}
}
for (Point tile : requested.keySet().toArray(new Point[0])) {
if (Arrays.binarySearch(tiles, tile, tileOrdering) < 0) {
requested.remove(tile);
}
}
/*
final StringBuilder sb = new StringBuilder();
for (Point tile : requested.keySet().toArray(new Point[0])) {
if (Arrays.binarySearch(tiles, tile, tileOrdering) < 0) {
final Future<Image> fim = requested.remove(tile);
if (!fim.isDone()) {
sb.append("(")
.append(tile.x)
.append(",")
.append(tile.y)
.append(") ");
}
}
}
if (sb.length() > 0) {
sb.insert(0, "cancelling: ").append("\n");
System.out.print(sb.toString());
}
*/
}
else {
if (color != null) {
g.setColor(color);
g.fillRect(visibleRect.x, visibleRect.y,
visibleRect.width, visibleRect.height);
}
else {
g.clearRect(visibleRect.x, visibleRect.y,
visibleRect.width, visibleRect.height);
}
}
if (grid != null) {
grid.draw(g, bounds, visibleRect, zoom, reversed);
}
}
}
@Deprecated
public synchronized Image getScaledImage(double zoom, Component obs) {
try {
final ImageOp sop = Op.scale(boardImageOp, zoom);
return (reversed ? Op.rotate(sop, 180) : sop).getImage(null);
}
catch (CancellationException e) {
ErrorDialog.bug(e);
}
catch (InterruptedException e) {
ErrorDialog.bug(e);
}
catch (ExecutionException e) {
ErrorDialog.bug(e);
}
return null;
}
public void setReversed(boolean val) {
if (reversible) {
if (reversed != val) {
reversed = val;
scaledImageOp = null; // get a new rendered version on next paint
}
}
}
public boolean isReversed() {
return reversed;
}
/**
* If this board is reversed, return the location in un-reversed coordinates
*/
public Point localCoordinates(Point p) {
if (reversed) {
p.x = bounds().width - p.x;
p.y = bounds().height - p.y;
}
if (magnification != 1.0) {
p.x = (int) Math.round(p.x/magnification);
p.y = (int) Math.round(p.y/magnification);
}
return p;
}
/**
* If this board is reversed, return the location in reversed coordinates
*/
public Point globalCoordinates(Point p) {
if (magnification != 1.0) {
p.x = (int) Math.round(p.x*magnification);
p.y = (int) Math.round(p.y*magnification);
}
if (reversed) {
p.x = bounds().width - p.x;
p.y = bounds().height - p.y;
}
return p;
}
public void setGrid(MapGrid mg) {
grid = mg;
}
public void removeGrid(MapGrid grid) {
if (this.grid == grid) {
this.grid = null;
}
}
public Board getBoard() {
return this;
}
public Dimension getSize() {
return bounds().getSize();
}
public MapGrid getGrid() {
return grid;
}
public Board copy() {
Board b = new Board();
b.build(getBuildElement(Builder.createNewDocument()));
return b;
}
/**
* @deprecated Images are now fixed automagically using {@link ImageOp}s.
*/
@Deprecated
public void fixImage(Component map) { }
/**
* @deprecated Images are now fixed automagically using {@link ImageOp}s.
*/
@Deprecated
public void fixImage() { }
public String locationName(Point p) {
return grid == null ? null : grid.locationName(localCoordinates(p));
}
public String localizedLocationName(Point p) {
return grid == null ? null : grid.localizedLocationName(localCoordinates(p));
}
public Point snapTo(Point p) {
return grid == null ? p : globalCoordinates(grid.snapTo(localCoordinates(p)));
}
/**
* @return true if the given point may not be a local location.
* I.e., if this grid will attempt to snap it to the nearest grid location.
*/
public boolean isLocationRestricted(Point p) {
return grid == null ? false : grid.isLocationRestricted(localCoordinates(p));
}
public String fileName() {
return imageFile;
}
/**
* @return Position of this board relative to the other boards (0,0) is the upper left, (0,1) is to the right, etc.
*/
public Point relativePosition() {
return pos;
}
/**
* @return The (read-only) boundaries of this Board within the overall Map
*/
public Rectangle bounds() {
if (imageFile != null && boardImageOp != null && !fixedBoundaries) {
boundaries.setSize(boardImageOp.getSize());
if (magnification != 1.0) {
boundaries.setSize((int)Math.round(magnification*boundaries.width),
(int)Math.round(magnification*boundaries.height));
}
fixedBoundaries = true;
}
return new Rectangle(boundaries);
}
/**
* @deprecated Bounds are now fixed automagically by {@link ImageOp}s.
*/
@Deprecated
protected void fixBounds() { }
/**
* Translate the location of the board by the given number of pixels
*
* @see #bounds()
*/
public void translate(int x, int y) {
boundaries.translate(x, y);
}
/**
* Set the location of this board
*
* @see #bounds()
*/
public void setLocation(int x, int y) {
boundaries.setLocation(x, y);
}
public HelpFile getHelpFile() {
return HelpFile.getReferenceManualPage("Board.htm");
}
/**
* Removes board images from the {@link VASSAL.tools.DataArchive} cache
* @deprecated Board images are removed automatically now, when under
* memory pressure.
*/
@Deprecated
public void cleanUp() {
if (imageFile != null) {
GameModule.getGameModule().getDataArchive().unCacheImage("images/" + imageFile);
}
if (boardImage != null) {
GameModule.getGameModule().getDataArchive().unCacheImage(boardImage);
boardImage = null;
}
}
/**
* Cleans up {@link Board}s (by invoking {@link Board#cleanUp}) when a
* game is closed
* @deprecated Only used to cleanup <code>Board</code> images, which
* is now handled automatically by the cache.
*/
@Deprecated
public static class Cleanup implements GameComponent {
private static Cleanup instance;
private Set<Board> toClean = new HashSet<Board>();
private boolean gameStarted = false;
public static void init() {
if (instance == null) {
instance = new Cleanup();
}
}
private Cleanup() {
GameModule.getGameModule().getGameState().addGameComponent(this);
}
public static Cleanup getInstance() {
return instance;
}
/**
* Mark this board as needing to be cleaned up when the game is closed
*
* @param b
*/
public void addBoard(Board b) {
toClean.add(b);
}
public Command getRestoreCommand() {
return null;
}
public void setup(boolean gameStarting) {
if (gameStarted && !gameStarting) {
for (Board board : toClean) {
board.cleanUp();
}
toClean.clear();
}
gameStarted = gameStarting;
}
}
public double getMagnification() {
return magnification;
}
public void setMagnification(double magnification) {
this.magnification = magnification;
fixedBoundaries = false;
bounds();
}
}
|
package edu.dynamic.dynamiz.parser;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import edu.dynamic.dynamiz.controller.Command;
import edu.dynamic.dynamiz.controller.CommandAdd;
import edu.dynamic.dynamiz.controller.CommandDelete;
import edu.dynamic.dynamiz.controller.CommandMark;
import edu.dynamic.dynamiz.controller.CommandHelp;
import edu.dynamic.dynamiz.controller.CommandList;
import edu.dynamic.dynamiz.controller.CommandRedo;
import edu.dynamic.dynamiz.controller.CommandSearch;
import edu.dynamic.dynamiz.controller.CommandShow;
import edu.dynamic.dynamiz.controller.CommandType;
import edu.dynamic.dynamiz.controller.CommandUndo;
import edu.dynamic.dynamiz.controller.CommandUnmark;
import edu.dynamic.dynamiz.controller.CommandUpdate;
import edu.dynamic.dynamiz.structure.MyDate;
import edu.dynamic.dynamiz.structure.EventItem;
import edu.dynamic.dynamiz.structure.TaskItem;
import edu.dynamic.dynamiz.structure.ToDoItem;
/**
* This is a class which stores the information of the parsed command line given
* by the user.
*
* Represents a list of arguments parsed against an option argument.
*
* Also represents a list of commands parsed (?! if it is necessary)
*
* This allows querying of a boolean hasOption(String opt), and retrieving value
* of the option getOptionValue(String opt)
*
* @author nhan
*
*/
public class CommandLine {
private CommandType commandType;
private Options options;
private String param;
private Command command;
private final int INVALID_ID = -1;
/** A logger instance for this class*/
private final static Logger LoggerCommandLine = Logger.getLogger(CommandLine.class.getName());
private final static String INVALID_ID_MSG = "Not a valid id given: %1$s";
private final static String INVALID_COMMANDTYPE_MSG = "Not a valid alias to known CommandType: %1$s";
private final static String INVALID_OPTIONTYPE_MSG = "Not a valid alias of known OptionType: %1$s";
private final static String INVALID_DATE_INTERVAL_MSG = "Not a valid interval of date: from %1$s to %2$s";
public CommandLine() {
this.commandType = null;
this.options = null;
this.param = null;
}
public CommandLine(CommandType cmdType, Options options, String param) {
this.commandType = cmdType;
this.options = options;
this.param = param;
if (!initialiseCommand()) {
LoggerCommandLine.severe("Command is not initialised!");
}
}
private boolean initialiseCommand() {
switch (this.commandType) {
case ADD:
this.command = parseAdd();
break;
case DELETE:
this.command = parseDelete();
break;
case UPDATE:
this.command = parseUpdate();
break;
case LIST:
this.command = parseList();
break;
case SEARCH:
this.command = parseSearch();
break;
case UNDO:
this.command = parseUndo();
break;
case REDO:
this.command = parseRedo();
break;
case HELP:
this.command = parseHelp();
break;
case MARK:
this.command = parseMark();
break;
case UNMARK:
this.command = parseUnmark();
break;
case SHOW:
this.command = parseShow();
break;
case EXIT:
this.command = parseExit();
break;
default:
return false;
}
return true;
}
/**
* Parsing CommandLine object into respective {@link CommandAdd} object
* @return a parsed {@link CommandAdd} object
*/
private Command parseAdd() {
ToDoItem commandItem = null;
// Handling date
boolean hasStart = options.hasOption(OptionType.START_TIME);
boolean hasEnd = options.hasOption(OptionType.END_TIME);
boolean hasBoth = hasStart && hasEnd;
boolean hasOn = options.hasOption(OptionType.ON_TIME);
MyDate startDate = null;
MyDate endDate = null;
if (hasStart) {
startDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.START_TIME));
}
if (hasEnd) {
endDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.END_TIME));
}
if (hasOn) {
MyDate onDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.ON_TIME));
int dd = onDate.getDayOfMonth();
int mm = onDate.getMonth();
int yy = onDate.getYear();
if (startDate != null) {
startDate.setDate(dd, mm, yy);
} else {
startDate = onDate;
}
if (endDate != null) {
endDate.setDate(dd, mm, yy);
} else {
endDate = onDate;
}
}
if (hasBoth || hasOn) {
if (startDate.compareTo(endDate) <= 0) {
commandItem = new EventItem(this.param, startDate, endDate);
} else {
throw new IllegalArgumentException(String.format(INVALID_DATE_INTERVAL_MSG,
startDate, endDate));
}
} else if (hasEnd) {
commandItem = new TaskItem(this.param, endDate);
} else {
commandItem = new ToDoItem(this.param);
}
// Handling Priority (if applicable)
if (options.hasOption(OptionType.PRIORITY)) {
int priority = Integer.parseInt(getFirstOptionValue(options, OptionType.PRIORITY));
commandItem.setPriority(priority);
}
return new CommandAdd(commandItem);
}
/**
* Parsing CommandLine object into respective {@link CommandDelete} object
* @return a parsed {@link CommandDelete} object
*/
private Command parseDelete() {
int[] ids = Util.toIntArray(extractIdList(param));
return new CommandDelete(ids);
}
/**
* Parsing CommandLine object into respective {@link CommandList} object
* @return a parsed {@link CommandList} object
*/
private Command parseList() {
// Parse Start and End Date
List<MyDate> commandStartDateList = new ArrayList<MyDate>();
List<MyDate> commandEndDateList = new ArrayList<MyDate>();
List<MyDate> commandOnDateList = new ArrayList<MyDate>();
List<Integer> commandPriorityList = new ArrayList<Integer>();
List<OptionType> commandOrderList = new ArrayList<OptionType>();
if (options.hasOption(OptionType.START_TIME)) {
commandStartDateList = extractDateList(options, OptionType.START_TIME);
}
if (options.hasOption(OptionType.END_TIME)) {
commandEndDateList = extractDateList(options, OptionType.END_TIME);
}
if (options.hasOption(OptionType.PRIORITY)) {
commandPriorityList = extractPriorityList(options);
}
if (options.hasOption(OptionType.ORDER_BY)) {
commandOrderList = extractOptionTypeList(options);
}
if (options.hasOption(OptionType.ON_TIME)) {
commandOnDateList = extractDateList(options, OptionType.ON_TIME);
if (!commandOnDateList.isEmpty()) {
for (MyDate date: commandOnDateList) {
commandStartDateList.add(date);
commandEndDateList.add(date);
}
}
}
int[] priorities = null;
MyDate[] startDates = null;
MyDate[] endDates = null;
OptionType[] orderings = null;
if (!commandStartDateList.isEmpty()) {
startDates = commandStartDateList.toArray(new MyDate[commandStartDateList.size()]);
}
if (!commandEndDateList.isEmpty()) {
endDates = commandEndDateList.toArray(new MyDate[commandEndDateList.size()]);
}
if (!commandPriorityList.isEmpty()) {
priorities = new int[commandPriorityList.size()];
for (int i = 0; i < commandPriorityList.size(); i++) {
priorities[i] = commandPriorityList.get(i);
}
}
if (!commandOrderList.isEmpty()) {
orderings = commandOrderList.toArray(new OptionType[commandOrderList.size()]);
}
return new CommandList(priorities, startDates, endDates, orderings);
}
/**
* Parsing CommandLine object into respective {@link CommandSearch} object
* @return a parsed {@link CommandSearch} object
*/
private Command parseSearch() {
// Parse Start and End Date
MyDate commandStartDate = null;
MyDate commandEndDate = null;
int commandPriority = OptionType.PRIORITY_UNCHANGED;
String commandStatus = null;
List<OptionType> commandOrderList= new ArrayList<OptionType>();
if (options.hasOption(OptionType.START_TIME)) {
commandStartDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.START_TIME));
}
if (options.hasOption(OptionType.END_TIME)) {
commandEndDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.END_TIME));
}
if (options.hasOption(OptionType.PRIORITY)) {
commandPriority = Integer.parseInt(getFirstOptionValue(options, OptionType.PRIORITY));
}
if (options.hasOption(OptionType.ORDER_BY)) {
commandOrderList = extractOptionTypeList(options);
}
if (options.hasOption(OptionType.STATUS)) {
commandStatus = getFirstOptionValue(options, OptionType.STATUS);
}
return new CommandSearch(param, commandPriority, commandStartDate, commandEndDate,
commandStatus, commandOrderList.toArray(new OptionType[commandOrderList.size()]));
}
/**
* Parsing CommandLine object into respective {@link CommandUndo} object
* @return a parsed {@link CommandUndo} object
*/
private Command parseUndo() {
return new CommandUndo();
}
/**
* Parsing CommandLine object into respective {@link CommandRedo} object
* @return a parsed {@link CommandRedo} object
*/
private Command parseRedo() {
return new CommandRedo();
}
/**
* Parsing CommandLine object into respective {@link CommandUpdate} object
* @return a parsed {@link CommandUpdate} object
*/
private Command parseUpdate() {
// check param. If have more than just item ID, update the description
String itemID = Util.getFirstWord(this.param);
int id;
try {
id = Integer.parseInt(itemID);
} catch (NumberFormatException e) {
LoggerCommandLine.warning(String.format(INVALID_ID_MSG, itemID));
throw new IllegalArgumentException(String.format(INVALID_ID_MSG, itemID));
}
String extraDescription = Util.stripFirstWord(this.param);
MyDate commandStartDate = null;
MyDate commandEndDate = null;
int commandPriority = OptionType.PRIORITY_UNCHANGED;
if (options.hasOption(OptionType.START_TIME)) {
commandStartDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.START_TIME));
}
if (options.hasOption(OptionType.END_TIME)) {
commandEndDate = Util.convertStringToMyDate(getFirstOptionValue(options, OptionType.END_TIME));
}
if (options.hasOption(OptionType.PRIORITY)) {
commandPriority = Integer.parseInt(getFirstOptionValue(options, OptionType.PRIORITY));
}
return new CommandUpdate(id, extraDescription, commandPriority,
commandStartDate, commandEndDate);
}
/**
* Parsing CommandLine object into respective {@link CommandHelp} object
* @return a parsed {@link CommandHelp} object
*/
private Command parseHelp() {
try {
if (!param.isEmpty()) {
CommandType type = CommandType.fromString(param);
return new CommandHelp(type);
} else {
return new CommandHelp(null);
}
} catch (IllegalArgumentException e) {
LoggerCommandLine.warning(String.format(INVALID_COMMANDTYPE_MSG, param));
return new CommandHelp(null);
}
}
/**
* Parsing CommandLine object into respective {@link CommandMark} object
* @return a parsed {@link CommandMark} object
*/
private Command parseMark() {
int[] ids = Util.toIntArray(extractIdList(param));
return new CommandMark(ids);
}
/**
* Parsing CommandLine object into respective {@link CommandUnmark} object
* @return a parsed {@link CommandUnmark} object
*/
private Command parseUnmark() {
int[] ids = Util.toIntArray(extractIdList(param));
return new CommandUnmark(ids);
}
/**
* Parsing CommandLine object into respective {@link CommandShow} object
* @return a parsed {@link CommandShow} object
*/
private Command parseShow() {
try {
int id = Integer.parseInt(param);
return new CommandShow(id);
} catch (NumberFormatException e) {
LoggerCommandLine.warning(String.format(INVALID_ID_MSG, param));
return new CommandShow(INVALID_ID);
}
}
/**
* It is redundant to try and parse Exit command into an object
*/
private Command parseExit() {
return null;
}
/**
* Retrieve the first valid value of the last {@link Option} present in the collection of the given
* {@link Options}
*
* @param commandOptions The collection of {@link Option}
* @param optionType The {@link OptionType} of the {@link Option} that is being retrieved
* @return the first valid value of the matching {@link Option}
*/
private String getFirstOptionValue(Options commandOptions, OptionType optionType) {
List<Option> optionList = commandOptions.getOptions(optionType);
Option option = optionList.get(optionList.size() - 1);
String optionStr = option.getValues().get(0);
return optionStr;
}
/**
* Retrieve a list of {@link MyDate} object from the given list of value
*
* @param options The collection of {@link Option} to extract from
* @param dateType The {@link OptionType} of the {@link Option} that is being retrieved
* @return a list of {@link MyDate} and/or {@link MyDateTime} objects extracted from the given collection.
*/
public List<MyDate> extractDateList(Options options, OptionType dateType) {
List<String> values = options.getOptions(dateType).get(0).getValues();
List<MyDate> dateList = new ArrayList<MyDate>();
for (String value: values) {
MyDate date = Util.convertStringToMyDate(value);
if (date != null) {
dateList.add(date);
}
}
return dateList;
}
/**
* Retrieve a list of {@link OptionType.PRIORITY} integer values from the collection
*
* @param options The collection of {@link Option} to extract from
* @return a list of {@link OptionType.PRIORITY} integer values extracted from the given collection.
*/
public List<Integer> extractPriorityList(Options options) {
List<String> values = options.getOptions(OptionType.PRIORITY).get(0).getValues();
List<Integer> priorityList = new ArrayList<Integer>();
for (String value: values) {
Integer priority = Integer.parseInt(value);
priorityList.add(priority);
}
return priorityList;
}
/**
* Retrieve a list of {@link OptionType} values from the collection
*
* @param options The collection of {@link Option} to extract from
* @return a list of matching {@link OptionType} values extracted from the given collection.
*/
public List<OptionType> extractOptionTypeList(Options options) {
List<String> values = options.getOptions(OptionType.ORDER_BY).get(0).getValues();
List<OptionType> typeList = new ArrayList<OptionType>();
for (String value: values) {
try {
OptionType type = OptionType.fromString(value);
typeList.add(type);
} catch (IllegalArgumentException e) {
LoggerCommandLine.warning(String.format(INVALID_OPTIONTYPE_MSG, value));
throw new IllegalArgumentException(String.format(INVALID_OPTIONTYPE_MSG, value));
}
}
return typeList;
}
/**
* Retrieve a List<Integer> from the given string
*
* @param idStr The string in which the number list is extracted from
* @return a List<Integer> containing the values in the given string
*/
public List<Integer> extractIdList(String idStr) {
String[] idArray = idStr.split(Option.DEFAULT_DELIMITER);
List<String> idStrList = Util.removeEmptyStringsInArray(idArray);
Set<Integer> idSet = new HashSet<Integer>();
for (String id: idStrList) {
if (Util.isInteger(id)) {
idSet.add(Integer.parseInt(id));
} else {
List<Integer> idList = Util.getNumberListFromRange(id);
if (idList != null && !idList.isEmpty()) {
idSet.addAll(idList);
} else {
LoggerCommandLine.warning(String.format(INVALID_ID_MSG, id));
throw new IllegalArgumentException(String.format(INVALID_ID_MSG, id));
}
}
}
return new ArrayList<Integer>(idSet);
}
/**
* Retrieve an Integer[] array from the given string
*
* @param idStr The string in which the number array is extracted from
* @return an Integer[] containing the values in the given string.
*/
public Integer[] extractIdArray(String idStr) {
List<Integer> idList = extractIdList(idStr);
return idList.toArray(new Integer[idList.size()]);
}
public CommandType getCommandType() {
return commandType;
}
public void setCommandType(CommandType command) {
this.commandType = command;
}
public Options getOptions() {
return options;
}
public void setOptions(Options options) {
this.options = options;
}
public String getParam() {
return param;
}
public void setParam(String param) {
this.param = param;
}
public Command getCommand() {
return command;
}
public void setCommand(Command command) {
this.command = command;
}
public int getNumberOfOptions() {
return options.getNumOfOptions();
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("Command Type: " + commandType.toString() + "\n");
sb.append("Value: " + param + "\n");
sb.append("Options: \n" + options.toString());
return sb.toString();
}
/**
* This function will call the corresponding Command its execute. For
* example, if the parsed CommandLine has the CommandType of Add. It will
* call CommandAdd's execute.
*/
public void execute() {
if (command != null) {
command.execute();
} else {
throw new IllegalArgumentException("Null command");
}
}
}
|
package edu.dynamic.dynamiz.structure;
/**
* Defines each item in the To-Do list in general.
* Natural ordering for each ToDoItem is done using lexicographical ordering of its id.
*
* Constructors
* ToDoItem(String description) //Creates a new instance of this item.
* ToDoItem(String description, int priority) //Creates a new instance of this item with the given priority level
* protected ToDoItem(String description, int priority, String status)
* ToDoItem(ToDoItem item) //Creates a new copy of the given item
*
* Public Methods
* int compareTo(ToDoItem item) //Compares this with the given item.
* boolean equals(Object obj) //Checks if this equals to the given object.
* String getDescription() //Gets the description of this item.
* String getFeedbackString() //Gets the feedback string format of this item.
* StringgetId() //Gets the ID of this item.
* int getPriority() //Gets the priority level of this item.
* String getStatus() //Gets the status of this item.
* void setDescription(String description) //Changes the description of this item.
* void setPriority(int priority) //Changes the priority of this item.
* void setStatus(String status) //Changes the status of this item.
* String toFileString() //Gets string representation of this item used in files.
* String toString() //Gets the string representation of this item.
*
* @author zixian
*/
public class ToDoItem implements Comparable<ToDoItem>{
private static final int DEFAULT_PRIORITY = 0;
protected static final String DEFAULT_STATUS = "pending";
public static final String STATUS_PENDING = DEFAULT_STATUS;
public static final String STATUS_INPROGRESS = "in progress";
public static final String STATUS_COMPLETED = "completed";
//Number of ToDoItems with the same alphabetical prefix
private static final int MAX_IDNUM = 99;
//Print formats
private static final String FORMAT_FEEDBACKSTRING = "ID: %1$s\n"+"Desc: %2$s\n"+"Priority: %3$d\n"+
"Status: %4$s";
private static final String FORMAT_FILESTRING = "%1$s; %2$d; %3$s;
private static final String FORMAT_PRINTSTRING = "%1$s %2$s %3$d %4$s
//ID is of the form idLetter followed by idNUm.
//Highest idNum possible is 99, after which idLetter will advance to the next alphabet.
private static char idLetter = 'A';
private static int idNum = 1;
//Main data members
protected String id, description, status;
protected int priority;
//Constructors
/**
* Creates a new instance of this item.
* @param description The description of this item.
*/
public ToDoItem(String description){
this(getNextId(), description, DEFAULT_PRIORITY, DEFAULT_STATUS);
}
/**
* Creates a new instance of this item with the given priority level.
* @param description The description of this object.
* @param priority The specified priority level of this item.
*/
public ToDoItem(String description, int priority){
this(getNextId(), description, priority, DEFAULT_STATUS);
}
protected ToDoItem(String id, String description, int priority, String status){
setId(id);
setDescription(description);
setPriority(priority);
setStatus(status);
}
//Copy constructor
/**
* Creates a new instance of the given item with the same data member values as the provided item.
* @param item The item to copy.
*/
public ToDoItem(ToDoItem item){
this(item.getId(), item.getDescription(), item.getPriority(), item.getStatus());
}
@Override
/**
* Compares this item with the given item by its natural ordering.
* @param The ToDoItem to compare with.
* @return Positive number if this.id is lexicographically greater than item.id,
* 0 if this.id.equals(item.id) is true, and negative number if this.id is lexicographically
* smaller than item.id.
*/
public int compareTo(ToDoItem item){
return id.compareTo(item.getId());
}
@Override
/**
* Checks if this equals the given object.
* @param obj The object to compare with.
* @return A boolean value given by (obj instanceof ToDoItem)? this.id.equals(((ToDoItem)obj).id): false;
*/
public boolean equals(Object obj){
if(obj instanceof ToDoItem){
ToDoItem temp = (ToDoItem)obj;
return id.equals(temp.getId());
}
return false;
}
/**
* Checks if the priority level is valid.
* @param priority The priority level to check.
* @return A boolean value such that (priority>=0)? true: false
*/
public static boolean isValidPriority(int priority){
return priority>0;
}
/**
* Gets the description of this item.
* @return The description string of this item.
*/
public String getDescription(){
return description;
}
/**
* Gets the ID of this item.
* @return The ID string of this object.
*/
public String getId(){
return id;
}
//Gets the next id to assign to a new ToDoItem.
private static String getNextId(){
StringBuilder newId = new StringBuilder(Character.toString(idLetter));
newId = newId.append(Integer.toString(idNum));
if(idNum==MAX_IDNUM){
idLetter++;
}
idNum = (idNum+1)%MAX_IDNUM;
return newId.toString();
}
/**
* Gets the priority of this item.
* @return The priority level of this item.
*/
public int getPriority(){
return priority;
}
/**
* Gets the status of this item.
* @return The status string of this item.
*/
public String getStatus(){
return status;
}
/**
* Changes this item's description.
* @param description The new description.
*/
public void setDescription(String description){
assert description!=null && !description.isEmpty();
this.description = description;
}
//Changes the ID of this item.
private void setId(String id){
assert id!=null && !id.isEmpty();
this.id = id;
}
public void setPriority(int priority) throws IllegalArgumentException{
if(priority<DEFAULT_PRIORITY){
throw new IllegalArgumentException();
}
this.priority = priority;
}
public void setStatus(String status) throws IllegalArgumentException{
assert status!=null && !status.isEmpty();
if(!status.equals(DEFAULT_STATUS) && !status.equals(STATUS_INPROGRESS) && !status.equals(STATUS_COMPLETED)){
throw new IllegalArgumentException();
}
this.status = status;
}
/**
* Gets the feedback string format of this item.
* @return The feedback string representation of this item.
*/
public String getFeedbackString(){
return String.format(FORMAT_FEEDBACKSTRING, id, description, priority, status);
}
/**
* Returns a string representation of this item to be displayed for feedback and confirmation.
* @return A formatted String representing this item.
*/
public String toFileString(){
return String.format(FORMAT_FILESTRING, description, priority, status);
}
@Override
/**
* Gets the string representation of this item.
* @return The String represetnation of this item.
*/
public String toString(){
return String.format(FORMAT_PRINTSTRING, id, description, priority, status);
}
}
|
package edu.ucla.cens.awserver.dao;
import javax.sql.DataSource;
import org.springframework.jdbc.core.JdbcTemplate;
/**
* Provides base classes with access to a JDBC DataSource and Spring JdbcTemplate.
*
* @author selsky
*/
public abstract class AbstractDao implements Dao {
private DataSource _dataSource;
private JdbcTemplate _jdbcTemplate;
public AbstractDao(DataSource dataSource) {
if(null == dataSource) {
throw new IllegalArgumentException("a non-null DataSource is required");
}
_dataSource = dataSource;
_jdbcTemplate = new JdbcTemplate(_dataSource);
}
protected DataSource getDataSource() {
return _dataSource;
}
protected JdbcTemplate getJdbcTemplate() {
return _jdbcTemplate;
}
}
|
package edu.wustl.common.treeApplet;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.List;
import java.util.Vector;
import javax.swing.ButtonGroup;
import javax.swing.JApplet;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import edu.wustl.common.tree.AdvanceQueryTreeRenderer;
import edu.wustl.common.tree.GenerateTree;
import edu.wustl.common.tree.SpecimenTreeRenderer;
import edu.wustl.common.tree.StorageContainerRenderer;
import edu.wustl.common.util.global.Constants;
/**
* TreeApplet builds the applet for the tree representation
* of query result view.
* @author gautam_shetty
*/
public class TreeApplet extends JApplet
{
/**
* Initializes the applet.
*/
public void init()
{
ObjectInputStream in = null;
try
{
URL codeBase = getCodeBase();
String protocol = codeBase.getProtocol();
String host = codeBase.getHost();
int port = codeBase.getPort();
String pageOf = this.getParameter(Constants.PAGEOF);
String storageContainerType = null,propertyName = null, cdeName = null ,specimenType = null,specimenClass = null;
int treeType = Constants.TISSUE_SITE_TREE_ID;
//Sri: Added for selecting node in the storage tree
Long selectedNode = new Long(0);
String position = null;
if (pageOf.equals(Constants.PAGEOF_STORAGE_LOCATION))
{
storageContainerType = this.getParameter(Constants.STORAGE_CONTAINER_TYPE);
treeType = Constants.STORAGE_CONTAINER_TREE_ID;
}
else if(pageOf.equals(Constants.PAGEOF_SPECIMEN) || pageOf.equals(Constants.PAGEOF_MULTIPLE_SPECIMEN))
treeType = Constants.STORAGE_CONTAINER_TREE_ID;
else if (pageOf.equals(Constants.PAGEOF_QUERY_RESULTS))
treeType = Constants.QUERY_RESULTS_TREE_ID;
else if (pageOf.equals(Constants.PAGEOF_TISSUE_SITE))
{
propertyName = this.getParameter(Constants.PROPERTY_NAME);
cdeName = this.getParameter(Constants.CDE_NAME);
}
//Added by Ramya - To Display specimen hierarchy in ordering system module.
else if(pageOf.equals(Constants.PAGEOF_SPECIMEN_TREE))
{
propertyName = this.getParameter(Constants.PROPERTY_NAME);
treeType = Constants.SPECIMEN_TREE_ID;
specimenType = this.getParameter(Constants.SPECIMEN_TYPE);
specimenClass = this.getParameter(Constants.SPECIMEN_CLASS);
}
String session_id = this.getParameter("session_id");
System.out.println("session_id "+session_id);
// If storage container tree, take care of positions and parent container
// ID edit boxes.
if(treeType == Constants.STORAGE_CONTAINER_TREE_ID)
{
String selectedNodeStr = this.getParameter(Constants.STORAGE_CONTAINER_TO_BE_SELECTED);
if((null != selectedNodeStr) && (false == "".equals(selectedNodeStr))
&& ("null".equals(selectedNodeStr) == false))
{
try
{
selectedNode = Long.valueOf(selectedNodeStr);
}
catch(Exception ex)
{
//do nothing since default value of selectedNode is 0
ex.printStackTrace();
}
}
position = this.getParameter(Constants.STORAGE_CONTAINER_POSITION);
}
String applicationPath = codeBase.getPath();
// modify applicationPath String ...
if(applicationPath.indexOf('/',1)!=-1){ //indexOf returns -1 if no match found
String newApplicationPath=null;
newApplicationPath = applicationPath.substring(0,applicationPath.indexOf('/',1)+1);
applicationPath=newApplicationPath;
}
//Kapil: MAC ISSUE JDK 1.3.1
String urlSuffix = applicationPath+Constants.TREE_DATA_ACTION+";jsessionid="+session_id+"?"+Constants.PAGEOF+"="+URLEncoder.encode(pageOf);
if (pageOf.equals(Constants.PAGEOF_TISSUE_SITE) == true)
{
//Kapil: MAC ISSUE JDK 1.3.1
urlSuffix = urlSuffix + "&"+Constants.PROPERTY_NAME+"="+URLEncoder.encode(propertyName)+"&"+Constants.CDE_NAME+"="+URLEncoder.encode(cdeName);
}
//Added By Ramya.Construct urlSuffix when treeType = Constants.REQUEST_DETAILS_ID.
else if(pageOf.equals(Constants.PAGEOF_SPECIMEN_TREE))
{
urlSuffix = urlSuffix + "&" + Constants.PROPERTY_NAME + "=" + URLEncoder.encode(propertyName)+ "&" +Constants.SPECIMEN_TYPE+ "=" + URLEncoder.encode(specimenType);
urlSuffix = urlSuffix + "&" + Constants.SPECIMEN_CLASS + "=" + URLEncoder.encode(specimenClass);
}
System.out.println("URL......................................"+urlSuffix);
URL dataURL = new URL(protocol, host, port, urlSuffix);
//Establish connection with the TreeDataAction and get the JTree object.
URLConnection connection = dataURL.openConnection();
connection.setUseCaches(false);
in = new ObjectInputStream(connection.getInputStream());
System.out.println("After Connection......................................");
JTree tree = new JTree();
List disableSpecimenIdsList = null;
System.out.println("PageOf : "+pageOf);
Vector treeDataVector = null;
if (pageOf.equals(Constants.PAGEOF_STORAGE_LOCATION) || pageOf.equals(Constants.PAGEOF_SPECIMEN)
|| pageOf.equals(Constants.PAGEOF_TISSUE_SITE) || pageOf.equals(Constants.PAGEOF_MULTIPLE_SPECIMEN)
|| pageOf.equals(Constants.PAGEOF_SPECIMEN_TREE))
{
Vector dataVector = (Vector) in.readObject();
GenerateTree generateTree = new GenerateTree();
tree = generateTree.createTree(dataVector, treeType);
}
else
{
treeDataVector = (Vector) in.readObject();
disableSpecimenIdsList=(List)in.readObject();
GenerateTree generateTree = new GenerateTree();
tree = generateTree.createTree(treeDataVector, treeType);
}
Container contentPane = getContentPane();
contentPane.setLayout(new BorderLayout());
if (pageOf.equals(Constants.PAGEOF_QUERY_RESULTS))
{
//Preparing radio buttons for configuring different views.
JPanel radioButtonPanel = new JPanel(new GridLayout(2, 1));
JRadioButton spreadsheetViewRadioButton = new JRadioButton(
Constants.SPREADSHEET_VIEW);
spreadsheetViewRadioButton
.setActionCommand(Constants.SPREADSHEET_VIEW);
spreadsheetViewRadioButton.setSelected(true);
spreadsheetViewRadioButton.setPreferredSize(new Dimension(80, 40));
JRadioButton individualViewRadioButton = new JRadioButton(
Constants.OBJECT_VIEW);
individualViewRadioButton.setActionCommand(Constants.OBJECT_VIEW);
individualViewRadioButton.setPreferredSize(new Dimension(80, 40));
ButtonGroup radioButtonGroup = new ButtonGroup();
radioButtonGroup.add(spreadsheetViewRadioButton);
radioButtonGroup.add(individualViewRadioButton);
radioButtonPanel.add(spreadsheetViewRadioButton);
radioButtonPanel.add(individualViewRadioButton);
//Radio buttons finish.
//Put the radioButton panel on the Applet.
//Kapil: MAC ISSUE JDK 1.3.1
contentPane.add(radioButtonPanel,BorderLayout.NORTH);
// Add listeners for the tree.
QueryResultsTreeListener nodeSelectionListener = new QueryResultsTreeListener(
this.getCodeBase(), this.getAppletContext());
tree.addTreeSelectionListener(nodeSelectionListener);
nodeSelectionListener.setDisableSpecimenIds(disableSpecimenIdsList);
//Add listeners for the radio buttons.
spreadsheetViewRadioButton.addActionListener(nodeSelectionListener);
individualViewRadioButton.addActionListener(nodeSelectionListener);
tree.setCellRenderer(new AdvanceQueryTreeRenderer());
}
JPanel treePanel = new JPanel(new GridLayout(1, 0));
JScrollPane scroll = new JScrollPane(tree);
treePanel.add(scroll);
treePanel.setOpaque(true);
treePanel.setVisible(true);
if (pageOf.equals(Constants.PAGEOF_TISSUE_SITE))
{
// changed for double click event
CDETreeListener tissueSiteListener = new CDETreeListener();
tissueSiteListener.setAppletContext(this.getAppletContext());
tree.addMouseListener(tissueSiteListener);
}
else if(pageOf.equals(Constants.PAGEOF_SPECIMEN_TREE))
{
//Get the node type from request parameter.
SpecimenTreeListener specimenTreeListener = new SpecimenTreeListener(specimenType,specimenClass);
specimenTreeListener.setAppletContext(this.getAppletContext());
tree.addMouseListener(specimenTreeListener);
//tree.setCellRenderer(new SpecimenTreeRenderer(specimenType,specimenClass));
}
else if (pageOf.equals(Constants.PAGEOF_STORAGE_LOCATION) || (pageOf.equals(Constants.PAGEOF_SPECIMEN)) || pageOf.equals(Constants.PAGEOF_MULTIPLE_SPECIMEN))
{
StorageLocationViewListener viewListener
= new StorageLocationViewListener(this.getCodeBase(), this.getAppletContext());
viewListener.setStorageContainerType(storageContainerType);
viewListener.setPageOf(pageOf);
tree.addTreeSelectionListener(viewListener);
tree.setCellRenderer(new StorageContainerRenderer());
}
//Put the tree panel on the Applet.
//Kapil: MAC ISSUE JDK 1.3.1
contentPane.add(treePanel, BorderLayout.CENTER);
System.out.println("25Aug06 : Tree set");
//Sri: Pass the position of the container to the next level
// This is used to auto select the node
if(false == selectedNode.equals(new Long(0)))
{
urlSuffix = applicationPath+Constants.SHOW_STORAGE_CONTAINER_GRID_VIEW_ACTION+";jsessionid="+session_id
+ "?" + Constants.SYSTEM_IDENTIFIER + "=" + selectedNode.toString()
+ "&" + Constants.STORAGE_CONTAINER_TYPE + "=" + storageContainerType
+ "&" + Constants.STORAGE_CONTAINER_POSITION + "=" + position
+ "&" + Constants.PAGEOF + "=" + pageOf;
dataURL = new URL(protocol, host, port, urlSuffix);
this.getAppletContext().showDocument(dataURL,Constants.DATA_VIEW_FRAME);
}
}
catch (MalformedURLException malExp)
{
malExp.printStackTrace();
}
catch (IOException ioExp)
{
ioExp.printStackTrace();
}
catch (ClassNotFoundException classNotExp)
{
classNotExp.printStackTrace();
}
catch (Exception excp)
{
excp.printStackTrace();
}
finally
{
try
{
if (in != null)
{
in.close();
}
}
catch (IOException ioExp)
{
ioExp.printStackTrace();
}
}
}
}
|
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
// File created: 2011-06-23 13:22:53
package fi.tkk.ics.hadoop.bam.cli.plugins;
import java.io.File;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumFileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import net.sf.samtools.util.BlockCompressedStreamConstants;
import fi.tkk.ics.hadoop.bam.custom.hadoop.InputSampler;
import fi.tkk.ics.hadoop.bam.custom.hadoop.TotalOrderPartitioner;
import fi.tkk.ics.hadoop.bam.custom.jargs.gnu.CmdLineParser;
import fi.tkk.ics.hadoop.bam.custom.samtools.BAMFileWriter;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileHeader;
import fi.tkk.ics.hadoop.bam.custom.samtools.SAMFileReader;
import static fi.tkk.ics.hadoop.bam.custom.jargs.gnu.CmdLineParser.Option.*;
import fi.tkk.ics.hadoop.bam.BAMInputFormat;
import fi.tkk.ics.hadoop.bam.KeyIgnoringBAMOutputFormat;
import fi.tkk.ics.hadoop.bam.SAMRecordWritable;
import fi.tkk.ics.hadoop.bam.cli.CLIPlugin;
import fi.tkk.ics.hadoop.bam.cli.Utils;
import fi.tkk.ics.hadoop.bam.util.Pair;
import fi.tkk.ics.hadoop.bam.util.Timer;
public final class Sort extends CLIPlugin {
private static final List<Pair<CmdLineParser.Option, String>> optionDescs
= new ArrayList<Pair<CmdLineParser.Option, String>>();
private static final CmdLineParser.Option
verboseOpt = new BooleanOption('v', "verbose"),
outputFileOpt = new StringOption('o', "output-file=PATH");
public Sort() {
super("sort", "BAM sorting", "1.0", "WORKDIR INPATH", optionDescs,
"Sorts the BAM file in INPATH in a distributed fashion using "+
"Hadoop. Output parts are placed in WORKDIR.");
}
static {
optionDescs.add(new Pair<CmdLineParser.Option, String>(
verboseOpt, "tell the Hadoop job to be more verbose"));
optionDescs.add(new Pair<CmdLineParser.Option, String>(
outputFileOpt, "output a complete BAM file to the file PATH, "+
"removing the parts from WORKDIR"));
}
@Override protected int run(CmdLineParser parser) {
final List<String> args = parser.getRemainingArgs();
if (args.isEmpty()) {
System.err.println("sort :: OUTDIR not given.");
return 3;
}
if (args.size() == 1) {
System.err.println("sort :: INPATH not given.");
return 3;
}
final String wrkDir = args.get(0),
in = args.get(1),
out = (String)parser.getOptionValue(outputFileOpt);
final boolean verbose = parser.getBoolean(verboseOpt);
final Path inPath = new Path(in),
wrkDirPath = new Path(wrkDir);
final String inFile = inPath.getName();
final Configuration conf = getConf();
// Used by SortOutputFormat to fetch the SAM header to output and to name
// the output files, respectively.
conf.set(SortOutputFormat.INPUT_PATH_PROP, in);
conf.set(SortOutputFormat.OUTPUT_NAME_PROP, inFile);
final Timer t = new Timer();
try {
Utils.setSamplingConf(inPath, conf);
// As far as I can tell there's no non-deprecated way of getting this
// info. We can silence this warning but not the import.
@SuppressWarnings("deprecation")
final int maxReduceTasks =
new JobClient(new JobConf(conf)).getClusterStatus()
.getMaxReduceTasks();
conf.setInt("mapred.reduce.tasks", Math.max(1, maxReduceTasks*9/10));
final Job job = new Job(conf);
job.setJarByClass (Sort.class);
job.setMapperClass (Mapper.class);
job.setReducerClass(SortReducer.class);
job.setMapOutputKeyClass(LongWritable.class);
job.setOutputKeyClass (NullWritable.class);
job.setOutputValueClass (SAMRecordWritable.class);
job.setInputFormatClass (BAMInputFormat.class);
job.setOutputFormatClass(SortOutputFormat.class);
FileInputFormat .setInputPaths(job, inPath);
FileOutputFormat.setOutputPath(job, wrkDirPath);
job.setPartitionerClass(TotalOrderPartitioner.class);
System.out.println("sort :: Sampling...");
t.start();
InputSampler.<LongWritable,SAMRecordWritable>writePartitionFile(
job,
new InputSampler.IntervalSampler<LongWritable,SAMRecordWritable>(
0.01, 100));
System.out.printf("sort :: Sampling complete in %d.%03d s.\n",
t.stopS(), t.fms());
job.submit();
System.out.println("sort :: Waiting for job completion...");
t.start();
if (!job.waitForCompletion(verbose)) {
System.err.println("sort :: Job failed.");
return 4;
}
System.out.printf("sort :: Job complete in %d.%03d s.\n",
t.stopS(), t.fms());
} catch (IOException e) {
System.err.printf("sort :: Hadoop error: %s\n", e);
return 4;
} catch (ClassNotFoundException e) { throw new RuntimeException(e); }
catch (InterruptedException e) { throw new RuntimeException(e); }
if (out != null) try {
System.out.println("sort :: Merging output...");
t.start();
final Path outPath = new Path(out);
final FileSystem srcFS = wrkDirPath.getFileSystem(conf);
FileSystem dstFS = outPath.getFileSystem(conf);
// The checksummed local file system doesn't support append().
if (dstFS instanceof LocalFileSystem
&& dstFS instanceof ChecksumFileSystem)
dstFS = ((LocalFileSystem)dstFS).getRaw();
// First, place the BAM header.
final BAMFileWriter w =
new BAMFileWriter(dstFS.create(outPath), new File(""));
w.setSortOrder(SAMFileHeader.SortOrder.coordinate, true);
final SAMFileReader r =
new SAMFileReader(inPath.getFileSystem(conf).open(inPath));
w.setHeader(r.getFileHeader());
r.close();
w.close();
// Then, the BAM contents.
final OutputStream outs = dstFS.append(outPath);
final FileStatus[] parts = srcFS.globStatus(new Path(
wrkDir, conf.get(SortOutputFormat.OUTPUT_NAME_PROP) +
"-[0-9][0-9][0-9][0-9][0-9][0-9]*"));
{int i = 0;
final Timer t2 = new Timer();
for (final FileStatus part : parts) {
t2.start();
final InputStream ins = srcFS.open(part.getPath());
IOUtils.copyBytes(ins, outs, conf, false);
ins.close();
System.out.printf("sort :: Merged part %d in %d.%03d s.\n",
++i, t2.stopS(), t2.fms());
}}
for (final FileStatus part : parts)
srcFS.delete(part.getPath(), false);
// Finally, the BGZF terminator.
outs.write(BlockCompressedStreamConstants.EMPTY_GZIP_BLOCK);
outs.close();
System.out.printf("sort :: Merging complete in %d.%03d s.\n",
t.stopS(), t.fms());
} catch (IOException e) {
System.err.printf("sort :: Output merging failed: %s\n", e);
return 5;
}
return 0;
}
}
final class SortReducer
extends Reducer<LongWritable,SAMRecordWritable,
NullWritable,SAMRecordWritable>
{
@Override protected void reduce(
LongWritable ignored, Iterable<SAMRecordWritable> records,
Reducer<LongWritable,SAMRecordWritable,
NullWritable,SAMRecordWritable>.Context
ctx)
throws IOException, InterruptedException
{
for (SAMRecordWritable rec : records)
ctx.write(NullWritable.get(), rec);
}
}
final class SortOutputFormat extends KeyIgnoringBAMOutputFormat<NullWritable> {
public static final String INPUT_PATH_PROP = "hadoopbam.sort.input.path",
OUTPUT_NAME_PROP = "hadoopbam.sort.output.name";
@Override public RecordWriter<NullWritable,SAMRecordWritable>
getRecordWriter(TaskAttemptContext context)
throws IOException
{
if (super.header == null) {
final Configuration conf = context.getConfiguration();
final Path path = new Path(conf.get(INPUT_PATH_PROP));
readSAMHeaderFrom(path, path.getFileSystem(conf));
}
return super.getRecordWriter(context);
}
@Override public Path getDefaultWorkFile(
TaskAttemptContext context, String ext)
throws IOException
{
String filename = context.getConfiguration().get(OUTPUT_NAME_PROP);
String extension = ext.isEmpty() ? ext : "." + ext;
int part = context.getTaskAttemptID().getTaskID().getId();
return new Path(super.getDefaultWorkFile(context, ext).getParent(),
filename + "-" + String.format("%06d", part) + extension);
}
// Allow the output directory to exist.
@Override public void checkOutputSpecs(JobContext job)
throws FileAlreadyExistsException, IOException
{}
}
|
package org.sejda.core.writer.model;
import java.awt.Color;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import javax.imageio.IIOImage;
import javax.imageio.ImageIO;
import javax.imageio.ImageTypeSpecifier;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.plugins.jpeg.JPEGImageWriteParam;
import javax.imageio.stream.ImageOutputStream;
import org.apache.commons.io.IOUtils;
import org.imgscalr.Scalr;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Element;
import com.sun.imageio.plugins.jpeg.JPEGImageWriter;
public class ImageOptimizer {
private static final Logger LOG = LoggerFactory.getLogger(ImageOptimizer.class);
/**
* Takes an image and creates an optimized version of it.
*
* If the image is larger than maxWidthOrHeight pixels, it is downsized to fit the maxWidthOrHeight rectangle (keeping its aspect ratio). Image is saved as JPEG with specified
* quality (1.0 is best/leave unchanged, 0.0 is worst). Image DPI is changed to dpi specified.
*/
public static File optimize(BufferedImage bufferedImage, float quality, int dpi, int maxWidthOrHeight)
throws IOException {
File outputFile = File.createTempFile("pdfimage", ".jpeg");
outputFile.deleteOnExit();
FileOutputStream fos = new FileOutputStream(outputFile);
try {
if (bufferedImage.getHeight() > maxWidthOrHeight || bufferedImage.getWidth() > maxWidthOrHeight) {
LOG.debug("Resizing image");
bufferedImage = Scalr.resize(bufferedImage, Scalr.Method.SPEED, maxWidthOrHeight);
}
// PNG read fix when converting to JPEG
BufferedImage imageRGB = new BufferedImage(bufferedImage.getWidth(), bufferedImage.getHeight(),
BufferedImage.TYPE_INT_RGB);
imageRGB.createGraphics().drawImage(bufferedImage, 0, 0, Color.WHITE, null);
JPEGImageWriter imageWriter = (JPEGImageWriter) ImageIO.getImageWritersBySuffix("jpeg").next();
ImageOutputStream ios = ImageIO.createImageOutputStream(fos);
imageWriter.setOutput(ios);
IIOMetadata imageMetaData = imageWriter.getDefaultImageMetadata(new ImageTypeSpecifier(imageRGB), null);
try {
// new metadata
Element tree = (Element) imageMetaData.getAsTree("javax_imageio_jpeg_image_1.0");
Element jfif = (Element) tree.getElementsByTagName("app0JFIF").item(0);
jfif.setAttribute("Xdensity", Integer.toString(dpi));
jfif.setAttribute("Ydensity", Integer.toString(dpi));
} catch (Exception e) {
LOG.warn("Failed to set DPI for image, metadata manipulation failed", e);
}
JPEGImageWriteParam jpegParams = (JPEGImageWriteParam) imageWriter.getDefaultWriteParam();
jpegParams.setCompressionMode(JPEGImageWriteParam.MODE_EXPLICIT);
jpegParams.setCompressionQuality(quality);
try {
imageWriter.write(imageMetaData, new IIOImage(imageRGB, null, null), jpegParams);
} finally {
IOUtils.closeQuietly(ios);
imageWriter.dispose();
}
return outputFile;
} finally {
IOUtils.closeQuietly(fos);
bufferedImage.flush();
}
}
}
|
package org.sejda.impl.sambox.component;
import static java.util.Optional.ofNullable;
import static org.sejda.util.RequireUtils.requireNotNullArg;
import java.awt.geom.AffineTransform;
import java.io.IOException;
import java.util.function.Function;
import org.sejda.sambox.cos.COSArray;
import org.sejda.sambox.cos.COSBase;
import org.sejda.sambox.cos.COSName;
import org.sejda.sambox.cos.COSStream;
import org.sejda.sambox.pdmodel.PDPage;
import org.sejda.sambox.pdmodel.common.PDRectangle;
import org.sejda.sambox.pdmodel.common.PDStream;
import org.sejda.sambox.pdmodel.graphics.form.PDFormXObject;
/**
* a {@link Function} capable of transforming a {@link PDPage} to a {@link PDFormXObject}
*
* @author Andrea Vacondio
*/
public class PageToFormXObject {
/**
* @return a {@link PDFormXObject} corresponding to the given {@link PDPage} or null if an error occurred
*/
public PDFormXObject apply(PDPage page) throws IOException {
requireNotNullArg(page, "Cannot convert a null page");
PDStream stream = getStream(page);
PDFormXObject form = new PDFormXObject(stream);
form.setResources(page.getResources());
PDRectangle mediaBox = page.getMediaBox();
PDRectangle boundingBox = ofNullable(page.getTrimBox()).orElse(mediaBox);
// this comes from PDFBox Superimpose class
AffineTransform at = form.getMatrix().createAffineTransform();
at.translate(mediaBox.getLowerLeftX() - boundingBox.getLowerLeftX(),
mediaBox.getLowerLeftY() - boundingBox.getLowerLeftY());
switch (page.getRotation()) {
case 90:
// at.scale(boundingBox.getWidth() / boundingBox.getHeight(),
// boundingBox.getHeight() / boundingBox.getWidth());
at.translate(0, boundingBox.getWidth());
at.rotate(-Math.PI / 2.0);
break;
case 180:
at.translate(boundingBox.getWidth(), boundingBox.getHeight());
at.rotate(-Math.PI);
break;
case 270:
// at.scale(boundingBox.getWidth() / boundingBox.getHeight(),
// boundingBox.getHeight() / boundingBox.getWidth());
at.translate(boundingBox.getHeight(), 0);
at.rotate(-Math.PI * 1.5);
break;
default:
// no additional transformations necessary
}
// Compensate for Crop Boxes not starting at 0,0
at.translate(-boundingBox.getLowerLeftX(), -boundingBox.getLowerLeftY());
if (!at.isIdentity()) {
form.setMatrix(at);
}
form.setBBox(new PDRectangle(boundingBox.getLowerLeftX(), boundingBox.getLowerLeftY(),
boundingBox.getUpperRightX(), boundingBox.getUpperRightY()));
return form;
}
private PDStream getStream(PDPage page) throws IOException {
return new PDStream(page.getContents(), COSName.FLATE_DECODE);
}
}
|
package com.commercetools.pspadapter.payone;
import com.commercetools.pspadapter.payone.config.PayoneConfig;
import com.commercetools.pspadapter.payone.config.PropertyProvider;
import com.commercetools.pspadapter.payone.config.ServiceConfig;
import com.commercetools.pspadapter.payone.domain.payone.model.common.Notification;
import com.commercetools.pspadapter.payone.notification.NotificationDispatcher;
import com.commercetools.pspadapter.tenant.TenantConfig;
import com.commercetools.pspadapter.tenant.TenantFactory;
import com.commercetools.pspadapter.tenant.TenantPropertyProvider;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import io.sphere.sdk.payments.queries.PaymentQuery;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.entity.ContentType;
import org.eclipse.jetty.http.HttpStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Spark;
import spark.utils.CollectionUtils;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletionStage;
import static com.commercetools.pspadapter.payone.util.PayoneConstants.PAYONE;
import static io.sphere.sdk.json.SphereJsonUtils.toJsonString;
import static io.sphere.sdk.json.SphereJsonUtils.toPrettyJsonString;
import static io.sphere.sdk.utils.CompletableFutureUtils.listOfFuturesToFutureOfList;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
/**
* @author fhaertig
* @author Jan Wolter
*/
public class IntegrationService {
public static final Logger LOG = LoggerFactory.getLogger(IntegrationService.class);
static final int ERROR_STATUS = HttpStatus.SERVICE_UNAVAILABLE_503;
static final int SUCCESS_STATUS = HttpStatus.OK_200;
static final String STATUS_KEY = "status";
static final String TENANTS_KEY = "tenants";
static final String APPLICATION_INFO_KEY = "applicationInfo";
private static final String HEROKU_ASSIGNED_PORT = "PORT";
private List<TenantFactory> tenantFactories = null;
private ServiceConfig serviceConfig = null;
/**
* This constructor is only used for testing proposes
*/
public IntegrationService(@Nonnull final ServiceConfig config, List<TenantFactory> factories) {
this.serviceConfig = config;
this.tenantFactories = factories;
}
public IntegrationService(@Nonnull final ServiceConfig config,
@Nonnull final PropertyProvider propertyProvider) {
this.serviceConfig = config;
this.tenantFactories = serviceConfig.getTenants().stream()
.map(tenantName -> new TenantPropertyProvider(tenantName, propertyProvider))
.map(tenantPropertyProvider -> new TenantConfig(tenantPropertyProvider,
new PayoneConfig(tenantPropertyProvider)))
.map(tenantConfig -> new TenantFactory(PAYONE, tenantConfig))
.collect(toList());
if (CollectionUtils.isEmpty(this.tenantFactories)) {
throw new IllegalArgumentException("Tenants list must be non-empty");
}
}
private static void initTenantServiceResources(TenantFactory tenantFactory) {
// create custom types
if (tenantFactory.getCustomTypeBuilder() != null) {
tenantFactory.getCustomTypeBuilder().run();
}
PaymentHandler paymentHandler = tenantFactory.getPaymentHandler();
NotificationDispatcher notificationDispatcher = tenantFactory.getNotificationDispatcher();
// register payment handler URL
String paymentHandlerUrl = tenantFactory.getPaymentHandlerUrl();
if (StringUtils.isNotEmpty(paymentHandlerUrl)) {
LOG.info("Register payment handler URL {}", paymentHandlerUrl);
Spark.get(paymentHandlerUrl, (req, res) -> {
final PaymentHandleResult paymentHandleResult = paymentHandler.handlePayment(req.params("id"));
if (!paymentHandleResult.body().isEmpty()) {
LOG.debug("--> Result body of ${getTenantName()}/commercetools/handle/payments/{}: {}", req.params(
"id"), paymentHandleResult.body());
}
res.status(paymentHandleResult.statusCode());
return res;
}, new HandlePaymentResponseTransformer());
}
// register Payone notifications URL
String payoneNotificationUrl = tenantFactory.getPayoneNotificationUrl();
if (StringUtils.isNotEmpty(payoneNotificationUrl)) {
LOG.info("Register payone notification URL {}", payoneNotificationUrl);
Spark.post(payoneNotificationUrl, (req, res) -> {
LOG.debug("<- Received POST from Payone: {}", req.body());
try {
final Notification notification = Notification.fromKeyValueString(req.body(), "\r?\n?&");
notificationDispatcher.dispatchNotification(notification);
} catch (Exception e) {
// Potential issues for this exception are:
// 1. req.body is mal-formed hence can't by parsed by Notification.fromKeyValueString
// 2. Invalid access secret values in the request (account id, key, portal id etc)
// 3. ConcurrentModificationException in case the respective payment could not be updated
// after two attempts due to concurrent modifications; a later retry might be successful
// 4. Execution timeout, if sphere client has not responded in time
// 5. unknown notification type
// Any other unexpected error.
LOG.error("Payone notification handling error. Request body: {}", req.body(), e);
res.status(400);
return "Payone notification handling error. See the logs. Requested body: " + req.body();
}
res.status(200);
return "TSOK";
});
}
}
/**
* @return Unmodifiable view of tenant factories list which are used for the service run.
*/
public List<TenantFactory> getTenantFactories() {
return Collections.unmodifiableList(tenantFactories);
}
public void start() {
initSparkService();
for (TenantFactory tenantFactory : tenantFactories) {
initTenantServiceResources(tenantFactory);
}
Spark.awaitInitialization();
}
private void initSparkService() {
Spark.port(port());
// This is a temporary jerry-rig for the load balancer to check connection with the service itself.
// For now it just returns a JSON response with status code, tenants list and static application info.
// It should be expanded to a more real health-checker service, which really performs PAYONE status check.
// But don't forget, a load balancer may call this URL very often (like 1 per sec),
// so don't make this request processor heavy, or implement is as independent background service.
LOG.info("Register /health URL");
LOG.info("Use /health?pretty to pretty-print output JSON");
Spark.get("/health", (req, res) -> {
ImmutableMap<String, Object> healthResponse = createHealthResponse(serviceConfig, tenantFactories);
res.status((Integer) healthResponse.getOrDefault(STATUS_KEY, ERROR_STATUS));
res.type(ContentType.APPLICATION_JSON.getMimeType());
return req.queryParams("pretty") != null ? toPrettyJsonString(healthResponse) :
toJsonString(healthResponse);
});
}
public void stop() {
Spark.stop();
}
public int port() {
final String environmentVariable = System.getenv(HEROKU_ASSIGNED_PORT);
if (!Strings.isNullOrEmpty(environmentVariable)) {
return Integer.parseInt(environmentVariable);
}
final String systemProperty = System.getProperty(HEROKU_ASSIGNED_PORT, "8080");
return Integer.parseInt(systemProperty);
}
private ImmutableMap<String, Object> createHealthResponse(@Nonnull final ServiceConfig serviceConfig,
List<TenantFactory> tenants) {
final ImmutableMap<String, String> applicationInfo = ImmutableMap.of(
"version", serviceConfig.getApplicationVersion(),
"title", serviceConfig.getApplicationName());
Map<String, CompletionStage<Integer>> tenantMap = checkTenantStatuses(tenants);
//resolve all completable stages
listOfFuturesToFutureOfList(new ArrayList<>(tenantMap.values())).join();
//unpack completable features
Map<String, Integer> statusMap = tenantMap.keySet().stream()
.collect(toMap(tenant -> tenant, tenant -> tenantMap.get(tenant).toCompletableFuture().join()));
return ImmutableMap.of(
STATUS_KEY, !statusMap.containsValue(ERROR_STATUS) ? SUCCESS_STATUS : ERROR_STATUS,
TENANTS_KEY, statusMap,
APPLICATION_INFO_KEY, applicationInfo);
}
private Map<String, CompletionStage<Integer>> checkTenantStatuses(List<TenantFactory> tenants) {
return tenants.stream().collect(toMap(TenantFactory::getTenantName, tenantFactory -> {
final String tenantName = tenantFactory.getTenantName();
return tenantFactory.getBlockingSphereClient().execute(PaymentQuery.of().withLimit(0l))
.handle((result, exception) -> {
if (result != null) {
return SUCCESS_STATUS;
}
LOG.error("Cannot query payments for the tenant {}", tenantName, exception);
return ERROR_STATUS;
});
}));
}
}
|
package org.slc.sli.common.constants;
/**
* Defines for entity names exposed by API.
*/
public final class EntityNames {
public static final String AGGREGATION = "aggregation";
public static final String AGGREGATION_DEFINITION = "aggregationDefinition";
public static final String ASSESSMENT = "assessment";
public static final String ASSESSMENT_FAMILY = "assessmentFamily";
public static final String ATTENDANCE = "attendance";
public static final String BELL_SCHEDULE = "bellSchedule";
public static final String COHORT = "cohort";
public static final String COMPETENCY_LEVEL_DESCRIPTOR = "competencyLevelDescriptor";
public static final String COMPETENCY_LEVEL_DESCRIPTOR_TYPE = "competencyLevelDescriptorType";
public static final String COURSE = "course";
public static final String DISCIPLINE_INCIDENT = "disciplineIncident";
public static final String DISCIPLINE_ACTION = "disciplineAction";
public static final String EDUCATION_ORGANIZATION = "educationOrganization";
public static final String GRADEBOOK_ENTRY = "gradebookEntry";
public static final String PARENT = "parent";
public static final String PROGRAM = "program";
public static final String REALM = "realm";
public static final String REPORT_CARD = "reportCard";
public static final String SCHOOL = "school";
public static final String SECTION = "section";
public static final String SESSION = "session";
public static final String STAFF = "staff";
public static final String STUDENT = "student";
public static final String LEARNINGOBJECTIVE = "learningObjective";
public static final String LEARNINGSTANDARD = "learningStandard";
public static final String STUDENT_COMPETENCY = "studentCompetency";
public static final String STUDENT_COMPETENCY_OBJECTIVE = "studentCompetencyObjective";
public static final String STUDENT_SECTION_GRADEBOOK_ENTRY = "studentSectionGradebookEntry";
public static final String TEACHER = "teacher";
public static final String STUDENT_SCHOOL_ASSOCIATION = "studentSchoolAssociation";
public static final String STUDENT_SECTION_ASSOCIATION = "studentSectionAssociation";
public static final String STUDENT_PARENT_ASSOCIATION = "studentParentAssociation";
public static final String TEACHER_SECTION_ASSOCIATION = "teacherSectionAssociation";
public static final String STUDENT_TRANSCRIPT_ASSOCIATION = "studentTranscriptAssociation";
public static final String STUDENT_DISCIPLINE_INCIDENT_ASSOCIATION = "studentDisciplineIncidentAssociation";
public static final String STUDENT_PROGRAM_ASSOCIATION = "studentProgramAssociation";
public static final String STAFF_PROGRAM_ASSOCIATION = "staffProgramAssociation";
public static final String STUDENT_COHORT_ASSOCIATION = "studentCohortAssociation";
public static final String STAFF_COHORT_ASSOCIATION = "staffCohortAssociation";
}
|
package org.jgrapes.net;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.net.ssl.ExtendedSSLSession;
import javax.net.ssl.SNIServerName;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLEngineResult.HandshakeStatus;
import javax.net.ssl.SSLEngineResult.Status;
import javax.net.ssl.SSLException;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.jgrapes.core.Channel;
import org.jgrapes.core.ClassChannel;
import org.jgrapes.core.Component;
import org.jgrapes.core.Components;
import org.jgrapes.core.EventPipeline;
import org.jgrapes.core.annotation.Handler;
import org.jgrapes.core.annotation.HandlerDefinition.ChannelReplacements;
import org.jgrapes.io.IOSubchannel;
import org.jgrapes.io.events.Close;
import org.jgrapes.io.events.Closed;
import org.jgrapes.io.events.IOError;
import org.jgrapes.io.events.Input;
import org.jgrapes.io.events.OpenTcpConnection;
import org.jgrapes.io.events.Output;
import org.jgrapes.io.events.Purge;
import org.jgrapes.io.util.LinkedIOSubchannel;
import org.jgrapes.io.util.ManagedBuffer;
import org.jgrapes.io.util.ManagedBufferPool;
import org.jgrapes.net.events.Accepted;
import org.jgrapes.net.events.Connected;
/**
* A component that receives and send byte buffers on an
* encrypted channel and sends and receives the corresponding
* decrypted data on a plain channel.
*/
@SuppressWarnings({ "PMD.ExcessiveImports" })
public class SslCodec extends Component {
@SuppressWarnings("PMD.FieldNamingConventions")
private static final Logger logger
= Logger.getLogger(SslCodec.class.getName());
private final Channel encryptedChannel;
private final SSLContext sslContext;
/**
* Represents the encrypted channel in annotations.
*/
private class EncryptedChannel extends ClassChannel {
}
/**
* Creates a new codec that uses the given {@link SSLContext}.
*
* @param plainChannel the component's channel
* @param encryptedChannel the channel with the encrypted data
* @param sslContext the SSL context to use
*/
public SslCodec(Channel plainChannel, Channel encryptedChannel,
SSLContext sslContext) {
super(plainChannel, ChannelReplacements.create()
.add(EncryptedChannel.class, encryptedChannel));
this.encryptedChannel = encryptedChannel;
this.sslContext = sslContext;
}
/**
* Creates a new codec to be used as client.
*
* @param plainChannel the component's channel
* @param encryptedChannel the channel with the encrypted data
* @param dontValidate if `true` accept all kinds of certificates
*/
@SuppressWarnings({ "PMD.DataflowAnomalyAnalysis", "PMD.CommentRequired",
"PMD.ReturnEmptyArrayRatherThanNull",
"PMD.UncommentedEmptyMethodBody" })
public SslCodec(Channel plainChannel, Channel encryptedChannel,
boolean dontValidate) {
super(plainChannel, ChannelReplacements.create()
.add(EncryptedChannel.class, encryptedChannel));
this.encryptedChannel = encryptedChannel;
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
if (dontValidate) {
// Create a trust manager that does not validate certificate
// chains
TrustManager[] trustAllCerts = new TrustManager[] {
new X509TrustManager() {
public X509Certificate[] getAcceptedIssuers() {
return null;
}
public void checkClientTrusted(
X509Certificate[] certs, String authType) {
}
public void checkServerTrusted(
X509Certificate[] certs, String authType) {
}
}
};
sslContext.init(null, trustAllCerts, null);
} else {
sslContext.init(null, null, null);
}
this.sslContext = sslContext;
} catch (NoSuchAlgorithmException | KeyManagementException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Creates a new downstream connection as {@link LinkedIOSubchannel}
* of the network connection together with an {@link SSLEngine}.
*
* @param event
* the accepted event
*/
@Handler(channels = EncryptedChannel.class)
public void onAccepted(Accepted event, IOSubchannel encryptedChannel) {
new PlainChannel(event, encryptedChannel);
}
/**
* Forward the connection request to the encrypted network.
*
* @param event the event
*/
@Handler
public void onOpenConnection(OpenTcpConnection event) {
fire(new OpenTcpConnection(event.address()), encryptedChannel);
}
/**
* Creates a new downstream connection as {@link LinkedIOSubchannel}
* of the network connection together with an {@link SSLEngine}.
*
* @param event
* the accepted event
*/
@Handler(channels = EncryptedChannel.class)
public void onConnected(Connected event, IOSubchannel encryptedChannel) {
new PlainChannel(event, encryptedChannel);
}
/**
* Handles encrypted data from upstream (the network). The data is
* send through the {@link SSLEngine} and events are sent downstream
* (and in the initial phases upstream) according to the conversion
* results.
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
* @throws ExecutionException
*/
@Handler(channels = EncryptedChannel.class)
public void onInput(
Input<ByteBuffer> event, IOSubchannel encryptedChannel)
throws InterruptedException, SSLException, ExecutionException {
@SuppressWarnings({ "unchecked", "PMD.AvoidDuplicateLiterals" })
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().sendDownstream(event);
}
}
/**
* Handles a close event from the encrypted channel (client).
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
*/
@Handler(channels = EncryptedChannel.class)
public void onClosed(Closed event, IOSubchannel encryptedChannel)
throws SSLException, InterruptedException {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().upstreamClosed();
}
}
/**
* Forwards a {@link Purge} event downstream.
*
* @param event the event
* @param encryptedChannel the encrypted channel
*/
@Handler(channels = EncryptedChannel.class)
public void onPurge(Purge event, IOSubchannel encryptedChannel) {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
if (plainChannel.isPresent()) {
plainChannel.get().purge();
}
}
/**
* Handles an {@link IOError} event from the encrypted channel (client)
* by sending it downstream.
*
* @param event the event
* @param encryptedChannel the channel for exchanging the encrypted data
* @throws InterruptedException
* @throws SSLException
*/
@Handler(channels = EncryptedChannel.class)
public void onIOError(IOError event, IOSubchannel encryptedChannel)
throws SSLException, InterruptedException {
@SuppressWarnings("unchecked")
final Optional<PlainChannel> plainChannel
= (Optional<PlainChannel>) LinkedIOSubchannel
.downstreamChannel(this, encryptedChannel);
plainChannel.ifPresent(channel -> fire(new IOError(event), channel));
}
/**
* Sends decrypted data through the engine and then upstream.
*
* @param event
* the event with the data
* @throws InterruptedException if the execution was interrupted
* @throws SSLException if some SSL related problem occurs
* @throws ExecutionException
*/
@Handler
public void onOutput(Output<ByteBuffer> event,
PlainChannel plainChannel)
throws InterruptedException, SSLException, ExecutionException {
if (plainChannel.hub() != this) {
return;
}
plainChannel.sendUpstream(event);
}
/**
* Forwards a close event upstream.
*
* @param event
* the close event
* @throws SSLException if an SSL related problem occurs
* @throws InterruptedException if the execution was interrupted
*/
@Handler
public void onClose(Close event, PlainChannel plainChannel)
throws InterruptedException, SSLException {
if (plainChannel.hub() != this) {
return;
}
plainChannel.close(event);
}
/**
* Represents the plain channel.
*/
private class PlainChannel extends LinkedIOSubchannel {
public SocketAddress localAddress;
public SocketAddress remoteAddress;
public SSLEngine sslEngine;
private EventPipeline downPipeline;
private ManagedBufferPool<ManagedBuffer<ByteBuffer>,
ByteBuffer> downstreamPool;
private boolean isInputClosed;
private ByteBuffer carryOver;
private boolean[] inputProcessed = { false };
/**
* Instantiates a new plain channel from an accepted connection.
*
* @param event the event
* @param upstreamChannel the upstream channel
*/
public PlainChannel(Accepted event, IOSubchannel upstreamChannel) {
super(SslCodec.this, channel(), upstreamChannel,
newEventPipeline());
localAddress = event.localAddress();
remoteAddress = event.remoteAddress();
init();
sslEngine.setUseClientMode(false);
}
/**
* Instantiates a new plain channel from an initiated connection.
*
* @param event the event
* @param upstreamChannel the upstream channel
*/
public PlainChannel(Connected event, IOSubchannel upstreamChannel) {
super(SslCodec.this, channel(), upstreamChannel,
newEventPipeline());
localAddress = event.localAddress();
remoteAddress = event.remoteAddress();
init();
sslEngine.setUseClientMode(true);
// Forward downstream
downPipeline.fire(
new Connected(event.localAddress(), event.remoteAddress()),
this);
}
private void init() {
if (remoteAddress instanceof InetSocketAddress) {
sslEngine = sslContext.createSSLEngine(
((InetSocketAddress) remoteAddress).getAddress()
.getHostAddress(),
((InetSocketAddress) remoteAddress).getPort());
} else {
sslEngine = sslContext.createSSLEngine();
}
String channelName = Components.objectName(SslCodec.this)
+ "." + Components.objectName(this);
// Create buffer pools, adding 50 to application buffer size, see
int decBufSize = sslEngine.getSession()
.getApplicationBufferSize() + 50;
downstreamPool = new ManagedBufferPool<>(ManagedBuffer::new,
() -> {
return ByteBuffer.allocate(decBufSize);
}, 2)
.setName(channelName + ".downstream.buffers");
int encBufSize = sslEngine.getSession().getPacketBufferSize();
setByteBufferPool(new ManagedBufferPool<>(ManagedBuffer::new,
() -> {
return ByteBuffer.allocate(encBufSize);
}, 2)
.setName(channelName + ".upstream.buffers"));
downPipeline = newEventPipeline();
}
/**
* Sends input downstream.
*
* @param event the event
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
* @throws ExecutionException the execution exception
*/
public void sendDownstream(Input<ByteBuffer> event)
throws SSLException, InterruptedException, ExecutionException {
ManagedBuffer<ByteBuffer> unwrapped = downstreamPool.acquire();
ByteBuffer input = event.buffer().duplicate();
if (carryOver != null) {
if (carryOver.remaining() < input.remaining()) {
// Shouldn't happen with carryOver having packet size
// bytes left, have seen it happen nevertheless.
carryOver.flip();
ByteBuffer extCarryOver = ByteBuffer.allocate(
carryOver.remaining() + input.remaining());
extCarryOver.put(carryOver);
carryOver = extCarryOver;
}
carryOver.put(input);
carryOver.flip();
input = carryOver;
carryOver = null;
}
// Main processing
SSLEngineResult processingResult = processInput(unwrapped, input);
// final message?
if (processingResult.getStatus() == Status.CLOSED
&& !isInputClosed) {
Closed evt = new Closed();
downPipeline.fire(evt, this);
evt.get();
isInputClosed = true;
return;
}
// Check if data from incomplete packet remains in input buffer
if (input.hasRemaining()) {
// Actually, packet buffer size should be sufficient,
// but since this is hard to test and doesn't really matter...
carryOver = ByteBuffer.allocate(input.remaining()
+ sslEngine.getSession().getPacketBufferSize() + 50);
carryOver.put(input);
}
}
@SuppressWarnings({ "PMD.CyclomaticComplexity", "PMD.NcssCount" })
private SSLEngineResult processInput(
ManagedBuffer<ByteBuffer> unwrapped, ByteBuffer input)
throws SSLException, InterruptedException, ExecutionException {
SSLEngineResult unwrapResult;
while (true) {
unwrapResult
= sslEngine.unwrap(input, unwrapped.backingBuffer());
synchronized (inputProcessed) {
inputProcessed[0] = true;
inputProcessed.notifyAll();
}
// Handle any handshaking procedures
switch (unwrapResult.getStatus() == Status.CLOSED
? HandshakeStatus.NOT_HANDSHAKING
: unwrapResult.getHandshakeStatus()) {
case NEED_TASK:
while (true) {
Runnable runnable = sslEngine.getDelegatedTask();
if (runnable == null) {
break;
}
// Having this handled by the response thread is
// probably not really necessary, but as the delegated
// task usually includes sending upstream...
@SuppressWarnings("PMD.AvoidInstantiatingObjectsInLoops")
FutureTask<Boolean> task
= new FutureTask<>(runnable, true);
upstreamChannel().responsePipeline()
.executorService().submit(task);
task.get();
}
continue;
case NEED_WRAP:
ManagedBuffer<ByteBuffer> feedback
= upstreamChannel().byteBufferPool().acquire();
SSLEngineResult wrapResult = sslEngine.wrap(
ManagedBuffer.EMPTY_BYTE_BUFFER
.backingBuffer(),
feedback.backingBuffer());
upstreamChannel().respond(Output.fromSink(feedback, false));
if (wrapResult
.getHandshakeStatus() == HandshakeStatus.FINISHED) {
fireAccepted();
}
continue;
case FINISHED:
fireAccepted();
// fall through
case NEED_UNWRAP:
// sslEngine.unwrap sometimes returns NEED_UNWRAP in
// combination with CLOSED, though this doesn't really
// make sense. As unwrapping is what we do here anyway,
// continue unless no data is left.
if (unwrapResult.getStatus() == Status.BUFFER_UNDERFLOW
|| unwrapResult.getStatus() == Status.CLOSED) {
break;
}
continue;
default:
break;
}
// Just to make sure... (Initial allocation should be
// big enough.)
if (unwrapResult.getStatus() == Status.BUFFER_OVERFLOW) {
if (unwrapped.position() > 0) {
// forward data received up to now
downPipeline.fire(Input.fromSink(unwrapped,
sslEngine.isInboundDone()), this);
}
unwrapped = downstreamPool.acquire();
if (unwrapped.capacity() < sslEngine.getSession()
.getApplicationBufferSize() + 50) {
unwrapped.replaceBackingBuffer(ByteBuffer.allocate(
sslEngine.getSession()
.getApplicationBufferSize() + 50));
}
continue;
}
// If we get here, handshake has completed or no input is left
if (unwrapResult.getStatus() != Status.OK) {
// Underflow, overflow or closed
break;
}
}
if (unwrapped.position() == 0) {
// Was only handshake
unwrapped.unlockBuffer();
} else {
// forward data received
downPipeline.fire(
Input.fromSink(unwrapped, sslEngine.isInboundDone()), this);
}
return unwrapResult;
}
@SuppressWarnings("PMD.DataflowAnomalyAnalysis")
private void fireAccepted() {
List<SNIServerName> snis = Collections.emptyList();
if (sslEngine.getSession() instanceof ExtendedSSLSession) {
snis = ((ExtendedSSLSession) sslEngine.getSession())
.getRequestedServerNames();
}
downPipeline.fire(new Accepted(
localAddress, remoteAddress, true, snis), this);
}
/**
* Send output upstream.
*
* @param event the event
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
* @throws ExecutionException
*/
public void sendUpstream(Output<ByteBuffer> event)
throws SSLException, InterruptedException, ExecutionException {
ByteBuffer output = event.buffer().backingBuffer().duplicate();
processOutput(output);
}
@SuppressWarnings({ "PMD.DataflowAnomalyAnalysis",
"PMD.CyclomaticComplexity", "PMD.NcssCount",
"PMD.NPathComplexity" })
private void processOutput(ByteBuffer output)
throws InterruptedException, SSLException, ExecutionException {
ManagedBuffer<ByteBuffer> wrapped
= upstreamChannel().byteBufferPool().acquire();
while (true) {
SSLEngineResult wrapResult;
while (true) {
// Cheap synchronization: no (relevant) input
inputProcessed[0] = false;
wrapResult
= sslEngine.wrap(output, wrapped.backingBuffer());
switch (wrapResult.getHandshakeStatus()) {
case NEED_TASK:
while (true) {
Runnable runnable = sslEngine.getDelegatedTask();
if (runnable == null) {
break;
}
runnable.run();
}
continue;
case NEED_UNWRAP:
if (wrapped.position() == 0) {
// Nothing to send, input required. Wait until
// input becomes available and retry.
synchronized (inputProcessed) {
while (!inputProcessed[0]) {
inputProcessed.wait();
}
}
continue;
}
break;
default:
break;
}
// Just to make sure... (Initial allocation should be
// big enough.)
if (wrapResult.getStatus() == Status.BUFFER_OVERFLOW) {
if (wrapped.position() > 0) {
// forward data received up to now
upstreamChannel().respond(Output.fromSink(wrapped,
sslEngine.isInboundDone()));
}
wrapped = upstreamChannel().byteBufferPool().acquire();
if (wrapped.capacity() < sslEngine.getSession()
.getApplicationBufferSize() + 50) {
wrapped.replaceBackingBuffer(ByteBuffer.allocate(
sslEngine.getSession()
.getApplicationBufferSize() + 50));
}
continue;
}
// If we get here, handshake needs wrap or no output is
// left
if (wrapResult.getStatus() == Status.OK
|| wrapResult.getStatus() == Status.CLOSED) {
break;
}
}
if (wrapped.position() == 0) {
if (output.hasRemaining()
&& wrapResult.getStatus() != Status.CLOSED) {
// Nothing sent, but data remains, try again
continue;
}
// Nothing remains to be done, unlock buffer and quit
wrapped.unlockBuffer();
break;
}
// Something needs to be sent (handshake or data)
upstreamChannel().respond(Output.fromSink(wrapped,
sslEngine.isInboundDone()));
if (!output.hasRemaining()
|| wrapResult.getStatus() == Status.CLOSED) {
// Nothing remains to be done
break;
}
// Was handshake (or partial content), get new buffer and try
// again
wrapped = upstreamChannel().byteBufferPool().acquire();
}
}
/**
* Close the connection.
*
* @param event the event
* @throws InterruptedException the interrupted exception
* @throws SSLException the SSL exception
*/
public void close(Close event)
throws InterruptedException, SSLException {
sslEngine.closeOutbound();
while (!sslEngine.isOutboundDone()) {
ManagedBuffer<ByteBuffer> feedback
= upstreamChannel().byteBufferPool().acquire();
sslEngine.wrap(ManagedBuffer.EMPTY_BYTE_BUFFER
.backingBuffer(), feedback.backingBuffer());
upstreamChannel().respond(Output.fromSink(feedback, false));
}
upstreamChannel().respond(new Close());
}
/**
* Forwards the {@link Closed} event downstream.
*
* @throws SSLException the SSL exception
* @throws InterruptedException the interrupted exception
*/
public void upstreamClosed()
throws SSLException, InterruptedException {
if (!isInputClosed) {
// was not properly closed on SSL layer
Closed evt = new Closed();
newEventPipeline().fire(evt, this);
evt.get();
}
try {
sslEngine.closeInbound();
while (!sslEngine.isOutboundDone()) {
ManagedBuffer<ByteBuffer> feedback
= upstreamChannel().byteBufferPool().acquire();
SSLEngineResult result = sslEngine.wrap(
ManagedBuffer.EMPTY_BYTE_BUFFER.backingBuffer(),
feedback.backingBuffer());
// This is required for/since JDK 11. It claims that
// outbound is not done, but doesn't produce any additional
// data.
if (result.getStatus() == Status.CLOSED
|| feedback.position() == 0) {
feedback.unlockBuffer();
break;
}
upstreamChannel().respond(Output.fromSink(feedback, false));
}
} catch (SSLException e) {
// Several clients (notably chromium, see
// don't close the connection properly. So nobody is really
// interested in this message
logger.log(Level.FINEST, e.getMessage(), e);
}
}
/**
* Fire a {@link Purge} event downstream.
*/
public void purge() {
downPipeline.fire(new Purge(), this);
}
}
}
|
package org.postgresql.test.jdbc4.jdbc41;
import java.sql.*;
import java.util.Properties;
import junit.framework.TestCase;
import org.postgresql.test.TestUtil;
public class SchemaTest extends TestCase
{
private Connection _conn;
public SchemaTest(String name)
{
super(name);
}
protected void setUp() throws Exception
{
_conn = TestUtil.openDB();
Statement stmt = _conn.createStatement();
stmt.execute("CREATE SCHEMA schema1");
stmt.execute("CREATE SCHEMA schema2");
stmt.execute("CREATE SCHEMA \"schema 3\"");
stmt.execute("CREATE SCHEMA \"schema \"\"4\"");
stmt.execute("CREATE SCHEMA \"schema '5\"");
stmt.execute("CREATE SCHEMA \"UpperCase\"");
TestUtil.createTable(_conn, "schema1.table1", "id integer");
TestUtil.createTable(_conn, "schema2.table2", "id integer");
TestUtil.createTable(_conn, "\"UpperCase\".table3", "id integer");
}
protected void tearDown() throws SQLException
{
_conn.setSchema(null);
Statement stmt = _conn.createStatement();
stmt.execute("DROP SCHEMA schema1 CASCADE");
stmt.execute("DROP SCHEMA schema2 CASCADE");
stmt.execute("DROP SCHEMA \"schema 3\" CASCADE");
stmt.execute("DROP SCHEMA \"schema \"\"4\" CASCADE");
stmt.execute("DROP SCHEMA \"schema '5\" CASCADE");
stmt.execute("DROP SCHEMA \"UpperCase\" CASCADE");
TestUtil.closeDB(_conn);
}
/**
* Test that what you set is what you get
*/
public void testGetSetSchema() throws SQLException
{
_conn.setSchema("schema1");
assertEquals("schema1", _conn.getSchema());
_conn.setSchema("schema2");
assertEquals("schema2", _conn.getSchema());
_conn.setSchema("schema 3");
assertEquals("\"schema 3\"", _conn.getSchema());
_conn.setSchema("schema \"4");
assertEquals("\"schema \"\"4\"", _conn.getSchema());
_conn.setSchema("schema '5");
assertEquals("\"schema '5\"", _conn.getSchema());
_conn.setSchema("UpperCase");
assertEquals("\"UpperCase\"", _conn.getSchema());
}
/**
* Test that setting the schema allows to access objects of this schema
* without prefix, hide objects from other schemas but doesn't prevent
* to prefix-access to them.
*/
public void testUsingSchema() throws SQLException
{
Statement stmt = _conn.createStatement();
try
{
try
{
_conn.setSchema("schema1");
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema2.table2", "*"));
try
{
stmt.executeQuery(TestUtil.selectSQL("table2", "*"));
fail("Objects of schema2 should not be visible without prefix");
}
catch (SQLException e)
{
// expected
}
_conn.setSchema("schema2");
stmt.executeQuery(TestUtil.selectSQL("table2", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema1.table1", "*"));
try
{
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
fail("Objects of schema1 should not be visible without prefix");
}
catch (SQLException e)
{
// expected
}
_conn.setSchema("UpperCase");
stmt.executeQuery(TestUtil.selectSQL("table3", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema1.table1", "*"));
try
{
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
fail("Objects of schema1 should not be visible without prefix");
}
catch (SQLException e)
{
// expected
}
}
catch (SQLException e)
{
fail("Could not find expected schema elements: " + e.getMessage());
}
}
finally
{
try
{
stmt.close();
}
catch (SQLException e)
{
}
}
}
/**
* Test that get schema returns the schema with the highest priority
* in the search path
*/
public void testMultipleSearchPath() throws SQLException
{
Statement stmt = _conn.createStatement();
try
{
stmt.execute("SET search_path TO schema1,schema2");
}
finally
{
try
{
stmt.close();
}
catch (SQLException e)
{
}
}
assertEquals("schema1", _conn.getSchema());
}
public void testSchemaInProperties() throws Exception
{
Properties properties = new Properties();
properties.setProperty("currentSchema", "schema1");
Connection conn = TestUtil.openDB(properties);
try
{
assertEquals("schema1", conn.getSchema());
Statement stmt = conn.createStatement();
stmt.executeQuery(TestUtil.selectSQL("table1", "*"));
stmt.executeQuery(TestUtil.selectSQL("schema2.table2", "*"));
try
{
stmt.executeQuery(TestUtil.selectSQL("table2", "*"));
fail("Objects of schema2 should not be visible without prefix");
}
catch (SQLException e)
{
// expected
}
}
finally
{
TestUtil.closeDB(conn);
}
}
}
|
package grafter_2.rdf;
import org.apache.http.*;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.params.ClientPNames;
import org.apache.http.client.params.CookiePolicy;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.conn.ConnectionPoolTimeoutException;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.util.EntityUtils;
import org.eclipse.rdf4j.OpenRDFException;
import org.eclipse.rdf4j.http.client.SPARQLProtocolSession;
import org.eclipse.rdf4j.http.client.SparqlSession;
import org.eclipse.rdf4j.http.protocol.UnauthorizedException;
import org.eclipse.rdf4j.http.protocol.error.ErrorInfo;
import org.eclipse.rdf4j.http.protocol.error.ErrorType;
import org.eclipse.rdf4j.query.*;
import org.eclipse.rdf4j.repository.RepositoryException;
import org.eclipse.rdf4j.rio.RDFParseException;
import org.eclipse.rdf4j.rio.UnsupportedRDFormatException;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
public class SPARQLSession extends SPARQLProtocolSession /*SparqlSession*/ {
public SPARQLSession(String queryEndpointUrl, String updateEndpointUrl, HttpClient client, ExecutorService executor) {
super(client, executor);
this.setQueryURL(queryEndpointUrl);
this.setUpdateURL(updateEndpointUrl);
}
@SuppressWarnings("unchecked")
private static <T> T readField(Class cls, Object receiver, String fieldName) {
try {
Field f = cls.getDeclaredField(fieldName);
f.setAccessible(true);
return (T)f.get(receiver);
} catch(NoSuchFieldException ex) {
throw new RuntimeException(String.format("Field %s in class %s does not exist", fieldName, cls.getName()));
} catch (IllegalAccessException ex) {
throw new RuntimeException(String.format("Field %s in class %s is not accessible", fieldName, cls.getName()));
}
}
@SuppressWarnings("deprecation")
/**
* Constructs the parameters to be used by the HTTP request. This is based on the params member of SparqlSession
* which is configured within the constructor and by setConnectionTimeout in the base class.
*/
private HttpParams getHttpParams() {
BasicHttpParams params = new BasicHttpParams();
params.setBooleanParameter(ClientPNames.HANDLE_REDIRECTS, true);
params.setParameter(ClientPNames.COOKIE_POLICY, CookiePolicy.RFC_2109);
//set timeouts:
// - SO_TIMEOUT is the timeout between consecutive data packets received by the underlying connection
// - CONNECTION_TIMEOUT is the time to establish the TCP connection
// - CONN_MANAGER_TIMEOUT is the timeout for obtaining a connection from the connection pool
int socketTimeout = (int)this.getConnectionTimeout();
params.setIntParameter(CoreConnectionPNames.SO_TIMEOUT, socketTimeout);
HttpConnectionParams.setConnectionTimeout(params, 100);
params.setLongParameter(ClientPNames.CONN_MANAGER_TIMEOUT,1);
return params;
}
private HttpClientContext getHttpContext() {
return readField(SPARQLProtocolSession.class, this, "httpContext");
}
/**
* Inspects a HTTP response returned from the server and checks if it looks like a timeout.
* @param response The HTTP response returned from the server
* @return Whether the received response looks like a query timeout response
*/
private boolean isStardogTimeoutResponse(HttpResponse response) {
int statusCode = response.getStatusLine().getStatusCode();
Header errorCodeHeader = response.getFirstHeader("SD-Error-Code");
if ( statusCode == HttpURLConnection.HTTP_INTERNAL_ERROR &&
errorCodeHeader != null &&
"QueryEval".equals(errorCodeHeader.getValue())) {
HttpEntity entity = response.getEntity();
try {
//inspect the message in the response to see if it indicates a query timeout
String body = EntityUtils.toString(entity);
return body != null && body.contains("exceeded query timeout");
}
catch (IOException ex) {
return false;
}
catch (ParseException ex) {
return false;
}
}
else return false;
}
private static final String INCLUDE_INFERRED_PARAM_NAME = "infer";
private static final String STARDOG_INFERRED_PARAM_NAME = "reasoning";
private static final String TIMEOUT_QUERY_PARAM_NAME = "timeout";
private static void removeTimeoutQueryParams(List<NameValuePair> queryPairs) {
List<NameValuePair> toRemove = new ArrayList<NameValuePair>();
for(NameValuePair pair : queryPairs) {
if(TIMEOUT_QUERY_PARAM_NAME.equals(pair.getName())) {
toRemove.add(pair);
}
}
queryPairs.removeAll(toRemove);
}
@Override protected List<NameValuePair> getQueryMethodParameters(QueryLanguage ql, String query, String baseURI, Dataset dataset, boolean includeInferred, int maxQueryTime, Binding... bindings) {
List<NameValuePair> pairs = super.getQueryMethodParameters(ql, query, baseURI, dataset, includeInferred, maxQueryTime, bindings);
// SPARQLProtocolSession (super) only implements standard query
// parameters so if we want to send reasoning parameters we have to add
// them ourselves. We're adding both stardog's param, and rdf4j's param
// because stardog just ignores `infer` and so this should work on any
// server that uses `infer` and ignores `reasoning` as well.
if (includeInferred) {
pairs.add(new BasicNameValuePair(INCLUDE_INFERRED_PARAM_NAME, "true"));
pairs.add(new BasicNameValuePair(STARDOG_INFERRED_PARAM_NAME, "true"));
}
//sesame adds a timeout=period_seconds query parameter if the maximum query time is set
//remove this parameter and replace it with our own
removeTimeoutQueryParams(pairs);
//add timeout if specified (i.e. maxQueryTime > 0)
if(maxQueryTime > 0) {
//add Stardog timeout=period_ms query parameter
//maxQueryTime is the maximum time in seconds whereas Stardog's timeout is measured in Milliseconds
Integer timeoutMs = 1000 * maxQueryTime;
pairs.add(new BasicNameValuePair(TIMEOUT_QUERY_PARAM_NAME, timeoutMs.toString()));
}
return pairs;
}
@SuppressWarnings("deprecation")
@Override protected HttpResponse execute(HttpUriRequest method) throws IOException, OpenRDFException {
//NOTE: the implementation of this method is based on SparqlSession.execute(HttpUriRequest)
//This class cannot access the private HttpClientContext fields used by the base implementation
//so fetches it using reflection(!). It also inspects the received response to check if it appears to indicate
//a query timeout and throws a QueryInterruptedException in that case.
HttpClient httpClient = getHttpClient();
HttpClientContext httpContext = getHttpContext();
boolean consume = true;
HttpParams params = getHttpParams();
method.setParams(params);
HttpResponse response;
try {
response = httpClient.execute(method, httpContext);
}
catch (ConnectionPoolTimeoutException ex) {
throw new QueryInterruptedException("Error executing query against remote endpoint.", ex);
}
try {
int httpCode = response.getStatusLine().getStatusCode();
if (httpCode >= 200 && httpCode < 300 || httpCode == HttpURLConnection.HTTP_NOT_FOUND) {
consume = false;
return response; // everything OK, control flow can continue
}
else if (isStardogTimeoutResponse(response)) {
throw new QueryInterruptedException();
}
else {
switch (httpCode) {
case HttpURLConnection.HTTP_UNAUTHORIZED: // 401
throw new UnauthorizedException();
case HttpURLConnection.HTTP_UNAVAILABLE: // 503
throw new QueryInterruptedException();
default:
ErrorInfo errInfo = getErrorInfo(response);
// Throw appropriate exception
if (errInfo.getErrorType() == ErrorType.MALFORMED_DATA) {
throw new RDFParseException(errInfo.getErrorMessage());
}
else if (errInfo.getErrorType() == ErrorType.UNSUPPORTED_FILE_FORMAT) {
throw new UnsupportedRDFormatException(errInfo.getErrorMessage());
}
else if (errInfo.getErrorType() == ErrorType.MALFORMED_QUERY) {
throw new MalformedQueryException(errInfo.getErrorMessage());
}
else if (errInfo.getErrorType() == ErrorType.UNSUPPORTED_QUERY_LANGUAGE) {
throw new UnsupportedQueryLanguageException(errInfo.getErrorMessage());
}
else {
if(errInfo != null) {
throw new RepositoryException(errInfo.toString());
} else {
throw new RepositoryException("No Error Info Present, server may not have responded properly");
}
}
}
}
}
finally {
if (consume) {
EntityUtils.consumeQuietly(response.getEntity());
}
}
}
}
|
package test;
import static org.junit.Assert.*;
import java.util.Vector;
import misc.PolyFunction;
import org.junit.Test;
import finders.FinderInterface;
import finders.InvalidFuncException;
import finders.LinearFinder;
import finders.NewtonFinder;
import finders.QuadricFinder;
public class FinderTest {
@Test public void testLinearFinder() {
FinderInterface finder = new LinearFinder();
// probeprfung 2a
// -2/3x + 5
// result = 7.5
PolyFunction f1 = new PolyFunction().
setKoeff(1, -2.0/3.0).
setKoeff(0, 5.0);
try {
assertEquals((Double) 7.5, finder.find(f1).firstElement());
} catch (InvalidFuncException e) {
fail();
}
}
@Test public void testQuadricFinder() {
FinderInterface finder = new QuadricFinder();
// probeprfung 2b
// 1/2x^2 - 2x - 6
// results = -2, 6
PolyFunction f1 = new PolyFunction().
setKoeff(2, 0.5).
setKoeff(1, -2.0).
setKoeff(0, -6.0);
try {
Vector<Double> results = finder.find(f1);
assertTrue(results.contains(-2.0));
assertTrue(results.contains(6.0));
} catch (InvalidFuncException e) {
fail();
}
}
@Test public void testNewtonFinder1() {
FinderInterface finder = new NewtonFinder();
// probeprfung 2e
// x^3 - 3x - 2
// results = -1, 2
PolyFunction f1 = new PolyFunction().
setKoeff(3, 1.0).
setKoeff(1, -3.0).
setKoeff(0, -2.0);
try {
Vector<Double> results = finder.find(f1);
assertTrue(results.contains(-1.0));
assertTrue(results.contains(2.0));
} catch (InvalidFuncException e) {
fail();
}
}
@Test public void testNewtonFinder2() {
FinderInterface finder = new NewtonFinder();
// result = 0
PolyFunction f1 = new PolyFunction().
setKoeff(3, 1.0);
try {
Vector<Double> results = finder.find(f1);
assertEquals(1, results.size());
assertEquals((Double) 0.0, results.firstElement());
} catch (InvalidFuncException e) {
fail();
}
}
@Test public void testNewtonFinder3() {
FinderInterface finder = new NewtonFinder();
// ableitung hat keine nullstelle
// -x^3 - 3x
// result = 0
PolyFunction f1 = new PolyFunction().
setKoeff(3, -1.0).
setKoeff(1, -3.0);
try {
Vector<Double> results = finder.find(f1);
assertEquals(1, results.size());
assertEquals((Double) 0.0, results.firstElement());
} catch (InvalidFuncException e) {
fail();
}
}
@Test public void testNewtonFinder4() {
FinderInterface finder = new NewtonFinder();
// result = 0
PolyFunction f1 = new PolyFunction().
setKoeff(3, -1.0);
try {
Vector<Double> results = finder.find(f1);
assertEquals(1, results.size());
assertEquals((Double) 0.0, results.firstElement());
} catch (InvalidFuncException e) {
fail();
}
}
}
|
package step.grid.client;
import java.io.Closeable;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import javax.json.JsonObject;
import javax.ws.rs.ProcessingException;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.glassfish.jersey.client.ClientProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
import step.grid.AgentRef;
import step.grid.Grid;
import step.grid.Token;
import step.grid.TokenWrapper;
import step.grid.agent.handler.MessageHandler;
import step.grid.agent.handler.TokenHandlerPool;
import step.grid.io.InputMessage;
import step.grid.io.ObjectMapperResolver;
import step.grid.io.OutputMessage;
import step.grid.tokenpool.Identity;
import step.grid.tokenpool.Interest;
public class GridClient implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(GridClient.class);
public static final String SELECTION_CRITERION_THREAD = "#THREADID#";
private final Grid adapterGrid;
private Client client;
private long noMatchExistsTimeout = 10000;
private long matchExistsTimeout = 60000;
public GridClient(Grid adapterGrid) {
super();
this.adapterGrid = adapterGrid;
client = ClientBuilder.newClient();
client.register(ObjectMapperResolver.class);
client.register(JacksonJsonProvider.class);
}
private OutputMessage processInput(TokenWrapper tokenWrapper, String function, JsonObject argument, String handler, Map<String,String> properties, int callTimeout) throws Exception {
Token token = tokenWrapper.getToken();
AgentRef agent = tokenWrapper.getAgent();
InputMessage message = new InputMessage();
message.setArgument(argument);
message.setFunction(function);
message.setTokenId(token.getId());
message.setHandler(handler);
message.setProperties(properties);
message.setCallTimeout(callTimeout);
OutputMessage output;
if(token.isLocal()) {
output = callLocalToken(message);
} else {
output = callAgent(agent, token, message);
}
token.getAttributes().put(SELECTION_CRITERION_THREAD, Long.toString(Thread.currentThread().getId()));
return output;
}
private OutputMessage callLocalToken(InputMessage message) throws Exception {
OutputMessage output;
TokenHandlerPool p = new TokenHandlerPool();
MessageHandler h = p.get(message.getHandler());
output = h.handle(null, message);
return output;
}
public TokenHandle getLocalToken() {
Token localToken = new Token();
localToken.setId(UUID.randomUUID().toString());
localToken.setAgentid(Grid.LOCAL_AGENT);
localToken.setAttributes(new HashMap<String, String>());
localToken.setSelectionPatterns(new HashMap<String, Interest>());
TokenWrapper tokenWrapper = new TokenWrapper(localToken, new AgentRef(Grid.LOCAL_AGENT, "localhost"));
return new TokenHandle(tokenWrapper);
}
public TokenHandle getToken() {
TokenPretender tokenPretender = new TokenPretender(null, null);
TokenWrapper tokenWrapper = getToken(tokenPretender);
return new TokenHandle(tokenWrapper);
}
public TokenHandle getToken(Map<String, String> attributes, Map<String, Interest> interests) {
TokenPretender tokenPretender = new TokenPretender(attributes, interests);
TokenWrapper tokenWrapper = getToken(tokenPretender);
return new TokenHandle(tokenWrapper);
}
public class TokenHandle {
TokenWrapper token;
String handler = null;
Map<String,String> properties = new HashMap<>();
int callTimeout = 180000;
private TokenHandle(TokenWrapper token) {
super();
this.token = token;
}
public TokenHandle setCallTimeout(int callTimeout) {
this.callTimeout = callTimeout;
return this;
}
public TokenHandle setHandler(String handler) {
this.handler = handler;
return this;
}
public TokenHandle addProperties(Map<String, String> properties) {
if(properties!=null) {
this.properties.putAll(properties);
}
return this;
}
public void setCurrentOwner(Object currentOwner) {
token.setCurrentOwner(currentOwner);
}
public OutputMessage process(String function, JsonObject argument) throws Exception {
return processInput(token, function, argument, handler, properties, callTimeout);
}
public OutputMessage processAndRelease(String function, JsonObject argument) throws Exception {
try {
return processInput(token, function, argument, handler, properties, callTimeout);
} finally {
release();
}
}
public TokenWrapper getToken() {
return token;
}
public void release() {
if(!token.getToken().getAgentid().equals(Grid.LOCAL_AGENT)) {
returnAdapterTokenToRegister(token);
}
}
}
private OutputMessage callAgent(AgentRef agentRef, Token token, InputMessage message) throws Exception {
// TODO get from config?
int connectionTimeout = 3000;
int callTimeoutOffset = 3000;
String agentUrl = agentRef.getAgentUrl();
try {
Entity<InputMessage> entity = Entity.entity(message, MediaType.APPLICATION_JSON);
Response response = client.target(agentUrl + "/process").request().property(ClientProperties.READ_TIMEOUT, message.getCallTimeout()+callTimeoutOffset)
.property(ClientProperties.CONNECT_TIMEOUT, connectionTimeout).post(entity);
try {
if(response.getStatus()==200) {
OutputMessage output = response.readEntity(OutputMessage.class);
return output;
} else {
String error = response.readEntity(String.class);
throw new Exception("Error while calling agent with ref " + agentRef.toString()+ ". HTTP Response: "+error);
}
} finally {
response.close();
}
} catch (ProcessingException e) {
throw e;
}
}
private TokenWrapper getToken(final Identity tokenPretender) {
TokenWrapper adapterToken = null;
try {
adapterToken = adapterGrid.selectToken(tokenPretender, matchExistsTimeout, noMatchExistsTimeout);
} catch (TimeoutException e) {
String desc = "[attributes=" + tokenPretender.getAttributes() + ", selectionCriteria=" + tokenPretender.getInterests() + "]";
throw new RuntimeException("Not able to find any available adapter matching " + desc);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return adapterToken;
}
private void returnAdapterTokenToRegister(TokenWrapper adapterToken) {
adapterGrid.returnToken(adapterToken);
}
@Override
public void close() {
client.close();
}
}
|
package gcm.gui.modelview.movie;
import gcm.gui.GCM2SBMLEditor;
import gcm.gui.modelview.movie.SerializableScheme;
import gcm.gui.schematic.ListChooser;
import gcm.gui.schematic.Schematic;
import gcm.gui.schematic.TreeChooser;
import gcm.gui.schematic.Utils;
import gcm.parser.GCMFile;
import gcm.util.GlobalConstants;
import main.Gui;
import parser.TSDParser;
import reb2sac.Reb2Sac;
import sbmleditor.MySpecies;
import util.Utility;
import com.google.gson.Gson;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Vector;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.JToggleButton;
import javax.swing.JToolBar;
import javax.swing.SwingConstants;
import javax.swing.Timer;
public class MovieContainer extends JPanel implements ActionListener {
public static final String COLOR_PREPEND = "_COLOR";
public static final String MIN_PREPEND = "_MIN";
public static final String MAX_PREPEND = "_MAX";
private final String PLAYING = "playing";
private final String PAUSED = "paused";
private String mode = PLAYING;
public static final int FRAME_DELAY_MILLISECONDS = 20;
private static final long serialVersionUID = 1L;
private Schematic schematic;
private Reb2Sac reb2sac;
private GCMFile gcm;
private Gui biosim;
private GCM2SBMLEditor gcm2sbml;
private TSDParser parser;
private Timer playTimer;
private MovieScheme movieScheme;
private int initialSliderValue;
private boolean isUIInitialized;
private boolean isDirty = false;
private String outputFilename = "";
//movie toolbar/UI elements
private JButton fileButton;
private JButton playPauseButton;
private JButton rewindButton;
private JButton singleStepButton;
private JButton clearButton;
private JToggleButton aviButton;
private JSlider slider;
/**
* constructor
*
* @param reb2sac_
* @param gcm
* @param biosim
* @param gcm2sbml
*/
public MovieContainer(Reb2Sac reb2sac_, GCMFile gcm, Gui biosim, GCM2SBMLEditor gcm2sbml){
super(new BorderLayout());
JComboBox compartmentList = MySpecies.createCompartmentChoices(gcm.getSBMLDocument());
schematic = new Schematic(gcm, biosim, gcm2sbml, false, this, null, gcm.getReactionPanel(), compartmentList);
this.add(schematic, BorderLayout.CENTER);
this.gcm = gcm;
this.biosim = biosim;
this.reb2sac = reb2sac_;
this.gcm2sbml = gcm2sbml;
this.movieScheme = new MovieScheme();
this.playTimer = new Timer(0, playTimerEventHandler);
mode = PAUSED;
}
//TSD FILE METHODS
/**
* returns a vector of strings of TSD filenames within a directory
* i don't know why it doesn't return a vector of strings
*
* @param directoryName directory for search for files in
* @return TSD filenames within the directory
*/
private Vector<Object> recurseTSDFiles(String directoryName){
Vector<Object> filenames = new Vector<Object>();
filenames.add(new File(directoryName).getName());
for (String s : new File(directoryName).list()){
String fullFileName = directoryName + File.separator + s;
File f = new File(fullFileName);
if(s.endsWith(".tsd") && f.isFile()){
filenames.add(s);
}
else if(f.isDirectory()){
filenames.add(recurseTSDFiles(fullFileName));
}
}
return filenames;
}
/**
* opens a treechooser of the TSD files, then loads and parses the selected TSD file
*
* @throws ListChooser.EmptyListException
*/
private void prepareTSDFile(){
pause();
// if simID is present, go up one directory.
String simPath = reb2sac.getSimPath();
String simID = reb2sac.getSimID();
if(!simID.equals("")){
simPath = new File(simPath).getParent();
}
Vector<Object> filenames = recurseTSDFiles(simPath);
String filename;
try{
filename = TreeChooser.selectFromTree(Gui.frame, filenames, "Choose a simulation file");
}
catch(TreeChooser.EmptyTreeException e){
JOptionPane.showMessageDialog(Gui.frame, "Sorry, there aren't any simulation files. Please simulate then try again.");
return;
}
if(filename == null)
return;
String fullFilePath = reb2sac.getRootPath() + filename;
this.parser = new TSDParser(fullFilePath, false);
slider.setMaximum(parser.getNumSamples()-1);
slider.setValue(0);
biosim.log.addText(fullFilePath + " loaded. " +
String.valueOf(parser.getData().size()) +
" rows of data loaded.");
loadPreferences();
}
//UI METHODS
/**
* displays the schematic and the movie UI
*/
public void display(){
schematic.display();
if(isUIInitialized == false){
this.addPlayUI();
isUIInitialized = true;
}
}
/**
* adds the toolbar at the bottom
*/
private void addPlayUI(){
// Add the bottom menu bar
JToolBar mt = new JToolBar();
fileButton = Utils.makeToolButton("", "choose_simulation_file", "Choose Simulation", this);
mt.add(fileButton);
clearButton = Utils.makeToolButton("", "clearAppearances", "Clear Appearances", this);
mt.add(clearButton);
aviButton = new JToggleButton("Make AVI");
aviButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent event) {
if (aviButton.isSelected()) {
if(parser == null){
JOptionPane.showMessageDialog(Gui.frame, "Must first choose a simulation file.");
aviButton.setSelected(false);
}
else {
outputFilename = Utility.browse(Gui.frame, null, null, JFileChooser.FILES_ONLY, "Save AVI", -1);
if (outputFilename == null || outputFilename.length() == 0) {
aviButton.setSelected(false);
return;
}
aviButton.setText("Stop AVI");
pause();
playPauseButtonPress();
initialSliderValue = slider.getValue();
//disable all buttons and stuff
fileButton.setEnabled(false);
playPauseButton.setEnabled(false);
rewindButton.setEnabled(false);
singleStepButton.setEnabled(false);
clearButton.setEnabled(false);
slider.setEnabled(false);
}
}
else {
aviButton.setSelected(false);
aviButton.setText("Make AVI");
//remove all image files
removeJPGs();
pause();
slider.setValue(0);
//enable the buttons and stuff
fileButton.setEnabled(true);
playPauseButton.setEnabled(true);
rewindButton.setEnabled(true);
singleStepButton.setEnabled(true);
clearButton.setEnabled(true);
slider.setEnabled(true);
}
}
});
mt.add(aviButton);
mt.addSeparator();
rewindButton = Utils.makeToolButton("movie" + File.separator + "rewind.png", "rewind", "Rewind", this);
mt.add(rewindButton);
singleStepButton = Utils.makeToolButton("movie" + File.separator + "single_step.png", "singlestep", "Single Step", this);
mt.add(singleStepButton);
playPauseButton = Utils.makeToolButton("movie" + File.separator + "play.png", "playpause", "Play", this);
mt.add(playPauseButton);
slider = new JSlider(SwingConstants.HORIZONTAL, 0, 100, 0);
slider.setSnapToTicks(true);
mt.add(slider);
mt.setFloatable(false);
this.add(mt, BorderLayout.SOUTH);
}
/**
* reloads the schematic's grid from file
* is called on an analysis view when the normal view/SBML is saved
*/
public void reloadGrid() {
schematic.reloadGrid();
}
//EVENT METHODS
/**
* event handler for when UI buttons are pressed.
*/
public void actionPerformed(ActionEvent event) {
String command = event.getActionCommand();
if(command.equals("rewind")){
if(parser == null){
JOptionPane.showMessageDialog(Gui.frame, "Must first choose a simulation file.");
}
else {
slider.setValue(0);
updateVisuals();
}
}
else if(command.equals("playpause")){
if(parser == null){
JOptionPane.showMessageDialog(Gui.frame, "Must first choose a simulation file.");
}
else {
playPauseButtonPress();
}
}
else if(command.equals("singlestep")){
if(parser == null){
JOptionPane.showMessageDialog(Gui.frame, "Must first choose a simulation file.");
}
else {
nextFrame();
}
}
else if(command.equals("choose_simulation_file")){
prepareTSDFile();
}
else if(command.equals("clearAppearances")){
movieScheme.clearAppearances();
schematic.getGraph().buildGraph();
this.setIsDirty(true);
}
else{
throw new Error("Unrecognized command '" + command + "'!");
}
}
/**
* event handler for when the timer ticks
*/
ActionListener playTimerEventHandler = new ActionListener() {
public void actionPerformed(ActionEvent evt) {
nextFrame();
}
};
//MOVIE CONTROL METHODS
/**
* switches between play/pause modes
*
* Called whenever the play/pause button is pressed, or when the system needs to
* pause the movie (such as at the end)
*/
private void playPauseButtonPress(){
if(mode == PAUSED){
if(slider.getValue() >= slider.getMaximum()-1)
slider.setValue(0);
playTimer.setDelay(FRAME_DELAY_MILLISECONDS);
Utils.setIcon(playPauseButton, "movie" + File.separator + "pause.png");
playTimer.start();
mode = PLAYING;
}
else{
Utils.setIcon(playPauseButton, "movie" + File.separator + "play.png");
playTimer.stop();
mode = PAUSED;
}
}
/**
* calls playpausebuttonpress to pause the movie
*/
private void pause(){
if(mode == PLAYING)
playPauseButtonPress();
}
/**
* advances the movie to the next frame
*/
private void nextFrame(){
//if the user wants output, print it to file
if (aviButton.isSelected() && slider.getValue() > 0) {
outputJPG();
//if the simulation ends, generate the AVI file using ffmpeg
//also, remove all of the image files created
if (slider.getValue() + 1 >= slider.getMaximum()){
outputAVI();
aviButton.setSelected(false);
aviButton.setText("Make AVI");
//remove all image files
pause();
slider.setValue(0);
//enable the buttons and stuff
fileButton.setEnabled(true);
playPauseButton.setEnabled(true);
rewindButton.setEnabled(true);
singleStepButton.setEnabled(true);
clearButton.setEnabled(true);
slider.setEnabled(true);
}
}
slider.setValue(slider.getValue()+1);
if (slider.getValue() >= slider.getMaximum()){
pause();
}
updateVisuals();
}
/**
* updates the visual appearance of cells on the graph (ie, species, components, etc.)
* gets called when the timer ticks
*/
private void updateVisuals(){
if(parser == null){
throw new Error("NoSimFileChosen");
}
int frameIndex = slider.getValue();
if(frameIndex < 0 || frameIndex > parser.getNumSamples()-1){
throw new Error("Invalid slider value! It is outside the data range!");
}
HashMap<String, ArrayList<Double>> speciesTSData = parser.getHashMap();
//loop through the species and set their appearances
for(String speciesID : gcm.getSpecies().keySet()){
//make sure this species has data in the TSD file
if(speciesTSData.containsKey(speciesID)){
//get the component's appearance and send it to the graph for updating
MovieAppearance speciesAppearance =
movieScheme.getAppearance(speciesID, GlobalConstants.SPECIES, frameIndex, speciesTSData);
if (speciesAppearance != null)
schematic.getGraph().setSpeciesAnimationValue(speciesID, speciesAppearance);
}
}
//loop through the components and set their appearances
for(String compID : gcm.getComponents().keySet()){
//get the component's appearance and send it to the graph for updating
MovieAppearance compAppearance =
movieScheme.getAppearance(compID, GlobalConstants.COMPONENT, frameIndex, speciesTSData);
if (compAppearance != null)
schematic.getGraph().setComponentAnimationValue(compID, compAppearance);
}
//if there's a grid to set the appearance of
if (gcm.getGrid().isEnabled()) {
//loop through all grid locations and set appearances
for (int row = 0; row < gcm.getGrid().getNumRows(); ++row) {
for (int col = 0; col < gcm.getGrid().getNumCols(); ++col) {
String gridID = "ROW" + row + "_COL" + col;
//get the component's appearance and send it to the graph for updating
MovieAppearance gridAppearance =
movieScheme.getAppearance(gridID, GlobalConstants.GRID_RECTANGLE, frameIndex, speciesTSData);
if (gridAppearance != null)
schematic.getGraph().setGridRectangleAnimationValue(gridID, gridAppearance);
}
}
}
schematic.getGraph().refresh();
}
/**
* creates an AVI using JPG frames of the simulation
*/
private void outputAVI() {
String separator = "";
if (File.separator.equals("\\"))
separator = "\\\\";
else
separator = File.separator;
String path = "";
String movieName = "";
if (outputFilename.contains(separator)) {
path = outputFilename.substring(0, outputFilename.lastIndexOf(separator));
movieName = outputFilename.substring(outputFilename.lastIndexOf(separator)+1, outputFilename.length());
}
if (movieName.contains(".")) {
movieName = movieName.substring(0, movieName.indexOf("."));
}
String args = "";
//if we're on windows, add "cmd" to the front of the command line argument
if (System.getProperty("os.name").contains("Windows")) {
args += "cmd ";
}
//args for ffmpeg
args +=
"ffmpeg " + "-y " +
"-r " + "5 " +
"-b " + "5000k " + //"-sameq -flags +ilme+ildct -flags +alt -top 1 " +
"-i " + reb2sac.getRootPath() + separator + "%09d.jpg " +
path + separator + movieName + ".mp4";
//run ffmpeg to generate the AVI movie file
try {
Process p = Runtime.getRuntime().exec(args, null, new File(reb2sac.getRootPath()));
String line = "";
BufferedReader input =
new BufferedReader(new InputStreamReader(p.getErrorStream()));
while ((line = input.readLine()) != null) {
biosim.log.addText(line);
}
removeJPGs();
}
catch (IOException e) {
e.printStackTrace();
}
}
/**
* creates a JPG of the current graph frame
*/
private void outputJPG() {
String separator = "";
if (File.separator.equals("\\"))
separator = "\\\\";
else
separator = File.separator;
String filenum = String.format("%09d", slider.getValue() - initialSliderValue);
schematic.outputFrame(reb2sac.getRootPath() + separator + filenum + ".jpg");
}
/**
* removes all created JPGs
*/
private void removeJPGs() {
String separator = "";
if (File.separator.equals("\\"))
separator = "\\\\";
else
separator = File.separator;
//remove all created jpg files
for (int jpgNum = 0; jpgNum <= slider.getMaximum(); ++jpgNum) {
String jpgNumString = String.format("%09d", jpgNum);
String jpgFilename =
reb2sac.getRootPath() + separator + jpgNumString + ".jpg";
File jpgFile = new File(jpgFilename);
if (jpgFile != null && jpgFile.exists() && jpgFile.canWrite())
jpgFile.delete();
}
}
//PREFERENCES METHODS
/**
* outputs the preferences file
*/
public void savePreferences(){
Gson gson = new Gson();
String out = gson.toJson(this.movieScheme.getAllSpeciesSchemes());
String fullPath = getPreferencesFullPath();
FileOutputStream fHandle;
try {
fHandle = new FileOutputStream(fullPath);
}
catch (FileNotFoundException e) {
e.printStackTrace();
JOptionPane.showMessageDialog(Gui.frame, "An error occured opening preferences file " + fullPath + "\nmessage: " + e.getMessage());
return;
}
try {
fHandle.write(out.getBytes());
}
catch (IOException e) {
e.printStackTrace();
JOptionPane.showMessageDialog(Gui.frame, "An error occured writing the preferences file " + fullPath + "\nmessage: " + e.getMessage());
}
try {
fHandle.close();
}
catch (IOException e) {
e.printStackTrace();
JOptionPane.showMessageDialog(Gui.frame, "An error occured closing the preferences file " + fullPath + "\nmessage: " + e.getMessage());
return;
}
biosim.log.addText("file saved to " + fullPath);
this.gcm2sbml.saveParams(false, "", true);
}
/**
* loads the preferences file if it exists and stores its values into the movieScheme object.
*/
public void loadPreferences(){
// load the prefs file if it exists
String fullPath = getPreferencesFullPath();
String json = null;
try {
json = TSDParser.readFileToString(fullPath);
}
catch (IOException e) {
}
if(json == null){
if (movieScheme == null ||
movieScheme.getAllSpeciesSchemes().length == 0)
movieScheme = new MovieScheme();
}
else{
Gson gson = new Gson();
try{
SerializableScheme[] speciesSchemes = gson.fromJson(json, SerializableScheme[].class);
//if there's already a scheme, keep it
if (movieScheme == null ||
movieScheme.getAllSpeciesSchemes().length == 0) {
movieScheme = new MovieScheme();
movieScheme.populate(speciesSchemes, parser.getSpecies());
}
}
catch(Exception e){
biosim.log.addText("An error occured trying to load the preferences file " + fullPath + " ERROR: " + e.toString());
}
}
}
//GET/SET METHODS
public boolean getIsDirty(){
return isDirty;
}
public void setIsDirty(boolean value) {
isDirty = value;
}
public TSDParser getTSDParser() {
return parser;
}
public GCM2SBMLEditor getGCM2SBMLEditor() {
return gcm2sbml;
}
private String getPreferencesFullPath(){
String path = reb2sac.getSimPath();
String fullPath = path + File.separator + "schematic_preferences.json";
return fullPath;
}
public Schematic getSchematic() {
return schematic;
}
public MovieScheme getMovieScheme() {
return movieScheme;
}
public GCMFile getGCM() {
return gcm;
}
}
|
package org.mockitousage.stubbing;
import static org.mockito.Mockito.*;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.exceptions.verification.SmartNullPointerException;
import org.mockito.exceptions.verification.WantedButNotInvoked;
import org.mockitousage.IMethods;
import org.mockitoutil.TestBase;
public class SmartNullsStubbingTest extends TestBase {
private IMethods mock;
@Before
public void setup() {
mock = mock(IMethods.class, Mockito.RETURNS_SMART_NULLS);
}
public IMethods unstubbedMethodInvokedHere(IMethods mock) {
return mock.iMethodsReturningMethod();
}
@Test
public void shouldSmartNPEPointToUnstubbedCall() throws Exception {
IMethods methods = unstubbedMethodInvokedHere(mock);
try {
methods.simpleMethod();
fail();
} catch (SmartNullPointerException e) {
assertContains("unstubbedMethodInvokedHere(", e.getMessage());
}
}
interface Bar {
void boo();
}
class Foo {
Foo getSomeClass() {
return null;
}
Bar getSomeInterface() {
return null;
}
Bar getBarWithParams(int x, String y) {
return null;
}
void boo() {}
}
@Test
public void shouldThrowSmartNPEWhenMethodReturnsClass() throws Exception {
Foo mock = mock(Foo.class, RETURNS_SMART_NULLS);
Foo foo = mock.getSomeClass();
try {
foo.boo();
fail();
} catch (SmartNullPointerException e) {}
}
@Test
public void shouldThrowSmartNPEWhenMethodReturnsInterface() throws Exception {
Foo mock = mock(Foo.class, RETURNS_SMART_NULLS);
Bar bar = mock.getSomeInterface();
try {
bar.boo();
fail();
} catch (SmartNullPointerException e) {}
}
@Test
public void shouldReturnOrdinaryEmptyValuesForOrdinaryTypes() throws Exception {
IMethods mock = mock(IMethods.class, RETURNS_SMART_NULLS);
assertEquals("", mock.stringReturningMethod());
assertEquals(0, mock.intReturningMethod());
assertEquals(true, mock.listReturningMethod().isEmpty());
assertEquals(0, mock.arrayReturningMethod().length);
}
@Test
public void shouldNotThrowSmartNullPointerOnToString() {
Object smartNull = mock.objectReturningMethod();
try {
verify(mock).simpleMethod(smartNull);
fail();
} catch (WantedButNotInvoked e) {}
}
@Test
public void shouldNotThrowSmartNullPointerOnObjectMethods() {
Object smartNull = mock.objectReturningMethod();
smartNull.toString();
}
@Test
public void shouldShowParameters() {
Foo foo = mock(Foo.class, RETURNS_SMART_NULLS);
Bar smartNull = foo.getBarWithParams(10, "yes sir");
try {
//TODO: make sure the message is clear
smartNull.boo();
fail();
} catch (Exception e) {
assertContains("yes sir", e.getMessage());
}
}
}
|
package org.voltdb;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutionException;
import org.voltcore.utils.DBBPool;
import org.voltcore.utils.DBBPool.BBContainer;
import org.voltdb.iv2.SpScheduler.DurableUniqueIdListener;
import org.voltdb.licensetool.LicenseApi;
import com.google_voltpatches.common.collect.ImmutableMap;
/**
* Stub class that provides a gateway to the InvocationBufferServer when
* DR is enabled. If no DR, then it acts as a noop stub.
*
*/
public class PartitionDRGateway implements DurableUniqueIdListener {
public enum DRRecordType {
INSERT, DELETE, UPDATE, BEGIN_TXN, END_TXN, TRUNCATE_TABLE, DELETE_BY_INDEX, UPDATE_BY_INDEX;
}
public static enum DRRowType {
EXISTING_ROW,
EXPECTED_ROW,
NEW_ROW
}
public static enum DRConflictResolutionFlag {
ACCEPT_CHANGE,
CONVERGENT
}
// Keep sync with EE DRConflictType at types.h
public static enum DRConflictType {
NO_CONFLICT,
CONSTRIANT_VIOLATION,
EXPECTED_ROW_MISSING,
EXPECTED_ROW_TIMESTAMP_MISMATCH
}
public static ImmutableMap<Integer, PartitionDRGateway> m_partitionDRGateways = ImmutableMap.of();
/**
* Load the full subclass if it should, otherwise load the
* noop stub.
* @param partitionId partition id
* @param overflowDir
* @return Instance of PartitionDRGateway
*/
public static PartitionDRGateway getInstance(int partitionId,
ProducerDRGateway producerGateway,
StartAction startAction)
{
final VoltDBInterface vdb = VoltDB.instance();
LicenseApi api = vdb.getLicenseApi();
final boolean licensedToDR = api.isDrReplicationAllowed();
// if this is a primary cluster in a DR-enabled scenario
// try to load the real version of this class
PartitionDRGateway pdrg = null;
if (licensedToDR && producerGateway != null) {
pdrg = tryToLoadProVersion();
}
if (pdrg == null) {
pdrg = new PartitionDRGateway();
}
// init the instance and return
try {
pdrg.init(partitionId, producerGateway, startAction);
} catch (Exception e) {
VoltDB.crashLocalVoltDB(e.getMessage(), false, e);
}
// Regarding apparent lack of thread safety: this is called serially
// while looping over the SPIs during database initialization
assert !m_partitionDRGateways.containsKey(partitionId);
ImmutableMap.Builder<Integer, PartitionDRGateway> builder = ImmutableMap.builder();
builder.putAll(m_partitionDRGateways);
builder.put(partitionId, pdrg);
m_partitionDRGateways = builder.build();
return pdrg;
}
private static PartitionDRGateway tryToLoadProVersion()
{
try {
Class<?> pdrgiClass = null;
pdrgiClass = Class.forName("org.voltdb.dr2.PartitionDRGatewayImpl");
Constructor<?> constructor = pdrgiClass.getConstructor();
Object obj = constructor.newInstance();
return (PartitionDRGateway) obj;
} catch (Exception e) {
}
return null;
}
// empty methods for community edition
protected void init(int partitionId,
ProducerDRGateway producerGateway,
StartAction startAction) throws IOException, ExecutionException, InterruptedException
{}
public void onSuccessfulProcedureCall(long txnId, long uniqueId, int hash,
StoredProcedureInvocation spi,
ClientResponseImpl response) {}
public void onSuccessfulMPCall(long spHandle, long txnId, long uniqueId, int hash,
StoredProcedureInvocation spi,
ClientResponseImpl response) {}
public long onBinaryDR(int partitionId, long startSequenceNumber, long lastSequenceNumber,
long lastSpUniqueId, long lastMpUniqueId, ByteBuffer buf) {
final BBContainer cont = DBBPool.wrapBB(buf);
DBBPool.registerUnsafeMemory(cont.address());
cont.discard();
return -1;
}
@Override
public void lastUniqueIdsMadeDurable(long spUniqueId, long mpUniqueId) {}
public int processDRConflict(int partitionId, int remoteClusterId, long remoteTimestamp, String tableName, DRRecordType action,
DRConflictType deleteConflict, ByteBuffer existingMetaTableForDelete, ByteBuffer existingTupleTableForDelete,
ByteBuffer expectedMetaTableForDelete, ByteBuffer expectedTupleTableForDelete,
DRConflictType insertConflict, ByteBuffer existingMetaTableForInsert, ByteBuffer existingTupleTableForInsert,
ByteBuffer newMetaTableForInsert, ByteBuffer newTupleTableForInsert) {
return 0;
}
public static long pushDRBuffer(
int partitionId,
long startSequenceNumber,
long lastSequenceNumber,
long lastSpUniqueId,
long lastMpUniqueId,
ByteBuffer buf) {
final PartitionDRGateway pdrg = m_partitionDRGateways.get(partitionId);
if (pdrg == null) {
VoltDB.crashLocalVoltDB("No PRDG when there should be", true, null);
}
return pdrg.onBinaryDR(partitionId, startSequenceNumber, lastSequenceNumber, lastSpUniqueId, lastMpUniqueId, buf);
}
public void forceAllDRNodeBuffersToDisk(final boolean nofsync) {}
public static int reportDRConflict(int partitionId, int remoteClusterId, long remoteTimestamp, String tableName, int action,
int deleteConflict, ByteBuffer existingMetaTableForDelete, ByteBuffer existingTupleTableForDelete,
ByteBuffer expectedMetaTableForDelete, ByteBuffer expectedTupleTableForDelete,
int insertConflict, ByteBuffer existingMetaTableForInsert, ByteBuffer existingTupleTableForInsert,
ByteBuffer newMetaTableForInsert, ByteBuffer newTupleTableForInsert) {
final PartitionDRGateway pdrg = m_partitionDRGateways.get(partitionId);
if (pdrg == null) {
VoltDB.crashLocalVoltDB("No PRDG when there should be", true, null);
}
return pdrg.processDRConflict(partitionId, remoteClusterId, remoteTimestamp, tableName, DRRecordType.values()[action],
DRConflictType.values()[deleteConflict], existingMetaTableForDelete, existingTupleTableForDelete,
expectedMetaTableForDelete, expectedTupleTableForDelete,
DRConflictType.values()[insertConflict], existingMetaTableForInsert, existingTupleTableForInsert,
newMetaTableForInsert, newTupleTableForInsert);
}
}
|
package com.github.sinemetu1;
import static org.junit.Assert.*;
import org.apache.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Test;
import com.github.sinemetu1.Wallet;
public class WalletTest {
private static Logger log = Logger.getLogger(WalletTest.class);
//private final static int version = 1040000;
//private final static int protocolVersion = 60003;
//private final static int walletVersion = 60000;
@Test
public void testGetInfo() throws Exception {
Wallet w = new Wallet();
String actual = w.getInfo();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
JSONObject result = actualJSONObject.getJSONObject("result");
assertTrue(result.getInt("version") > 0);
assertTrue(result.getInt("protocolversion") > 0);
assertTrue(result.getInt("walletversion") > 0);
assertEquals(0.00000000, result.getDouble("balance"), 0);
assertEquals("", result.getString("errors"));
}
@Test
public void testGetPeerInfo() throws Exception {
Wallet w = new Wallet();
String actual = w.getPeerInfo();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
JSONArray result = actualJSONObject.getJSONArray("result");
for (int idx = 0; idx < result.length(); idx++) {
JSONObject curr = result.getJSONObject(idx);
assertTrue(curr.getInt("version") >= 0);
assertTrue(curr.getInt("banscore") >= 0);
curr.getBoolean("inbound"); // should be able to get
}
}
@Test
public void testGetBlockCount() throws Exception {
Wallet w = new Wallet();
String actual = w.getBlockCount();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertTrue(actualJSONObject.getInt("result") >= 0);
}
@Test
public void testGetConnectionCount() throws Exception {
Wallet w = new Wallet();
String actual = w.getConnectionCount();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertTrue(actualJSONObject.getInt("result") >= 0);
}
@Test
public void testGetDifficulty() throws Exception {
Wallet w = new Wallet();
String actual = w.getDifficulty();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertTrue(actualJSONObject.getInt("result") >= 0);
}
@Test
public void testGetGenerate() throws Exception {
Wallet w = new Wallet();
String actual = w.getDifficulty();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertTrue(actualJSONObject.getInt("result") >= 0);
}
@Test
public void testGetHashesPerSec() throws Exception {
Wallet w = new Wallet();
String actual = w.getHashesPerSec();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertTrue(actualJSONObject.getInt("result") >= 0);
}
@Test
public void testGetMiningInfo() throws Exception {
Wallet w = new Wallet();
String actual = w.getMiningInfo();
JSONObject actualJSONObject = new JSONObject(actual);
JSONObject result = actualJSONObject.getJSONObject("result");
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertTrue(result.getInt("pooledtx") >= 0);
assertEquals("", result.getString("errors"));
assertTrue(result.getInt("currentblocktx") >= 0);
assertEquals(false, result.getBoolean("generate"));
assertEquals(false, result.getBoolean("testnet"));
assertTrue(result.getInt("currentblocksize") >= 0);
assertTrue(result.getInt("hashespersec") >= 0);
assertTrue(result.getDouble("difficulty") >= 0);
assertTrue(result.getDouble("blocks") >= 0);
assertTrue(result.getLong("networkhashps") >= 0);
}
@Test
public void testGetRawMemPool() throws Exception {
Wallet w = new Wallet();
String actual = w.getRawMemPool();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
JSONArray result = actualJSONObject.getJSONArray("result");
for (int idx = 0; idx < result.length(); idx++) {
String pool = result.getString(idx);
assertTrue(!pool.equals(""));
}
}
@Test
public void testHelp() throws Exception {
Wallet w = new Wallet();
String actual = w.help("");
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
String result = actualJSONObject.getString("result");
assertTrue(!result.equals(""));
}
@Test
public void testKeyPoolRefill() throws Exception {
Wallet w = new Wallet();
String actual = w.keyPoolRefill();
JSONObject actualJSONObject = new JSONObject(actual);
assertEquals(JSONObject.NULL, actualJSONObject.get("id"));
assertEquals(JSONObject.NULL, actualJSONObject.get("result"));
assertEquals(JSONObject.NULL, actualJSONObject.get("error"));
}
}
|
package core.time;
import org.junit.Test;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Month;
import java.time.format.DateTimeFormatter;
import java.util.Locale;
import static org.junit.Assert.assertEquals;
public class DateTimeFormatterTest {
@Test
public void formatDate() {
DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE;
LocalDate date = LocalDate.of(2015, Month.OCTOBER, 30);
// Format date from formatter or from date
assertEquals(formatter.format(date), "2015-10-30");
assertEquals(date.format(formatter), "2015-10-30");
}
@Test
public void formatTime() {
DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_TIME;
LocalTime time = LocalTime.of(10, 30, 15, 1000);
// Format time from formatter or from time
assertEquals(formatter.format(time), "10:30:15.000001");
assertEquals(time.format(formatter), "10:30:15.000001");
}
@Test
public void formatDateTime() {
DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
LocalDateTime dateTime = LocalDateTime.of(2015, Month.OCTOBER, 30, 10, 30, 15, 1000);
// Format dateTime from formatter or from dateTime
assertEquals(formatter.format(dateTime), "2015-10-30T10:30:15.000001");
assertEquals(dateTime.format(formatter), "2015-10-30T10:30:15.000001");
}
@Test
public void parseDate() {
DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE;
assertEquals(LocalDate.parse("2015-10-30", formatter), LocalDate.of(2015, Month.OCTOBER, 30));
}
@Test
public void parseTime() {
DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_TIME;
assertEquals(LocalTime.parse("10:30:15", formatter), LocalTime.of(10, 30, 15));
}
@Test
public void parseDateTime() {
DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
LocalDateTime dateTime = LocalDateTime.of(2015, Month.OCTOBER, 30, 10, 30, 15);
assertEquals(LocalDateTime.parse("2015-10-30T10:30:15", formatter), dateTime);
}
@Test
public void formatDateWithCustomFormatter() {
LocalDate date = LocalDate.of(2016, Month.JANUARY, 1);
checkDateCustomFormatter(date, "yy/M/d", "16/1/1");
checkDateCustomFormatter(date, "yy/M/dd", "16/1/01");
checkDateCustomFormatter(date, "yy/MM/dd", "16/01/01");
checkDateCustomFormatter(date, "yyyy/MMM/dd", "2016/Jan/01");
}
private void checkDateCustomFormatter(LocalDate date, String pattern, String expected) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(pattern, Locale.US);
assertEquals(date.format(formatter), expected);
}
@Test
public void formatTimeWithCustomFormatter() {
LocalTime time = LocalTime.of(1, 5, 5, 1000);
checkCustomTimeFormatter(time, "h:m:s", "1:5:5");
checkCustomTimeFormatter(time, "h:m:ss", "1:5:05");
checkCustomTimeFormatter(time, "h:mm:ss", "1:05:05");
checkCustomTimeFormatter(time, "hh:mm:ss", "01:05:05");
checkCustomTimeFormatter(time, "hh:mm:ss.SSS", "01:05:05.000");
checkCustomTimeFormatter(time, "hh:mm:ss.SSSSSS", "01:05:05.000001");
}
private void checkCustomTimeFormatter(LocalTime time, String pattern, String expected) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(pattern, Locale.US);
assertEquals(time.format(formatter), expected);
}
}
|
package genie.content.format.meta.cpp;
import genie.content.model.mclass.MClass;
import genie.content.model.mnaming.MNameComponent;
import genie.content.model.mnaming.MNameRule;
import genie.content.model.mnaming.MNamer;
import genie.content.model.mownership.MOwner;
import genie.content.model.mprop.MProp;
import genie.content.model.mtype.MType;
import genie.content.model.mtype.MTypeHint;
import genie.engine.file.WriteStats;
import genie.engine.format.*;
import genie.engine.model.Ident;
import genie.engine.model.Item;
import modlan.utils.Strings;
import java.util.Collection;
import java.util.TreeMap;
public class FMetaDef
extends GenericFormatterTask
{
public FMetaDef(
FormatterCtx aInFormatterCtx, FileNameRule aInFileNameRule, Indenter aInIndenter, BlockFormatDirective aInHeaderFormatDirective, BlockFormatDirective aInCommentFormatDirective, String aInName, boolean aInIsUserFile, WriteStats aInStats
)
{
super(aInFormatterCtx,
aInFileNameRule,
aInIndenter,
aInHeaderFormatDirective,
aInCommentFormatDirective,
aInName,
aInIsUserFile,
aInStats);
}
public void generate()
{
out.println(0, "namespace opflex");
out.println(0, "{");
out.println(1, "namespace modb");
out.println(1, "{");
out.printHeaderComment(
2,
new String[]
{
"A base fixture that defines a simple object model"
});
out.println(2, "class MDFixture");
out.println(2, "{");
out.println(3, "public:");
out.println();
out.println(4, "MDFixture() :");
out.println(5, "md(");
out.println(6, "list_of");
//MClass lRoot = MClass.getContainmentRoot();
for (Item lIt : MClass.MY_CAT.getNodes().getItemsList())
{
MClass lClass = (MClass) lIt;
if (lClass.isConcrete())
{
genMo(7, lClass);
}
}
out.println(5, " )
out.println(4, "{");
out.println(4, "}");
out.println();
out.println(4, "ModelMetadata md;");
out.println(3, "private:");
out.println(2, "}; // MDFixture");
out.println(1, "} // namespace modb");
out.println(0, "} // namespace opflex");
}
public static String getClassType(MClass aIn)
{
if (isPolicy(aIn))
{
return "ClassInfo::POLICY";
}
else if (isObservable(aIn))
{
return "ClassInfo::OBSERVABLE";
}
else if (isRelationshipSource(aIn))
{
return "ClassInfo::RELATIONSHIP";
}
else if (isRelationshipTarget(aIn))
{
return "ClassInfo::INVERSE_RELATIONSHIP";
}
else if (isRelationshipResolver(aIn))
{
return "ClassInfo::RESOLVER";
}
else
{
return "ClassInfo::LOCAL_ONLY";
}
}
public static boolean isPolicy(MClass aIn)
{
return aIn.isSubclassOf("policy/Component") || aIn.isSubclassOf("policy/Definition");
}
public static boolean isObservable(MClass aIn)
{
return aIn.isSubclassOf("observer/Component") || aIn.isSubclassOf("observer/Definition"); //TODO: WHAT SHOULD THESE CLASSES BE?
}
public static boolean isRelationshipSource(MClass aIn)
{
return aIn.isSubclassOf("relator/Source");
}
public static boolean isRelationshipTarget(MClass aIn)
{
return aIn.isSubclassOf("relator/Target");
}
public static boolean isRelationshipResolver(MClass aIn)
{
return aIn.isSubclassOf("relator/Resolver");
}
public static String getOwner(MClass aIn)
{
Collection<MOwner> lOwners = aIn.findOwners();
return lOwners.isEmpty() ? "default" : lOwners.iterator().next().getLID().getName();
}
private void genMo(int aInIndent, MClass aInClass)
{
out.println(aInIndent, '(');
out.println(aInIndent + 1, "ClassInfo(" + aInClass.getGID().getId() + ", " + getClassType(aInClass) + ", \"" + aInClass.getFullConcatenatedName() + "\", \"" + getOwner(aInClass) + "\",");
genProps(aInIndent + 2, aInClass);
genNamingProps(aInIndent + 2, aInClass);
out.println(aInIndent + 2, ")");
out.println(aInIndent, ')');
}
private void genProps(int aInIndent, MClass aInClass)
{
//boolean hasDesc = aInClass.hasProps() || aInClass.hasContained();
TreeMap<String,MProp> lProps = new TreeMap<String, MProp>();
aInClass.findProp(lProps,false);
TreeMap<Ident,MClass> lConts = new TreeMap<Ident, MClass>();
aInClass.getContainsClasses(lConts, true, true);
if (lProps.size() + lConts.size() == 0)
{
out.println(aInIndent, "std::vector<PropertyInfo>(),");
}
else
{
int lCount = 0;
out.println(aInIndent, "list_of");
// HANLDE PROPS
for (MProp lProp : lProps.values())
{
MType lPropType = lProp.getType(false);
MType lPrimitiveType = lPropType.getBuiltInType();
MTypeHint lHint = lPrimitiveType.getTypeHint();
int lLocalId = (++lCount);
out.println(aInIndent + 1, "(PropertyInfo(" + lLocalId + ", \"" + lProp.getLID().getName() + "\", PropertyInfo::" + lPrimitiveType.getLID().getName().toUpperCase() + ", PropertyInfo::SCALAR)) // " + lProp.toString());
propIds.put(lProp.getLID().getName(),lLocalId);
}
// HANDLE CONTAINED CLASSES
for (MClass lContained : lConts.values())
{
out.println(aInIndent + 1, "(PropertyInfo(" + (++lCount) + ", \"" + lContained.getFullConcatenatedName() + "\", PropertyInfo::COMPOSITE, " + lContained.getGID().getId() + ")) // " + lContained.toString());
}
out.println(aInIndent + 1, ",");
}
}
private MNameRule getNamingRule(MClass aInClass)
{
Collection<MNameRule> lNrs = aInClass.findNamingRules();
return lNrs.isEmpty() ? null : lNrs.iterator().next();
}
private void genNamingProps(int aInIndent, MClass aInClass)
{
MNameRule lNr = getNamingRule(aInClass);
if (null == lNr)
{
out.println(aInIndent, "std::vector<PropertyInfo>() // no naming rule; assume cardinality of 1 in any containment rule");
}
else
{
Collection<MNameComponent> lComps = lNr.getComponents();
int lNamePropsCount = 0;
for (MNameComponent lIt : lComps)
{
if (lIt.hasPropName())
{
lNamePropsCount++;
}
}
if (0 == lNamePropsCount)
{
out.println(aInIndent, "std::vector<PropertyInfo>() // no naming props in rule " + lNr + "; assume cardinality of 1");
}
else
{
out.println(aInIndent, "list_of // " + lNr);
for (MNameComponent lIt : lComps)
{
if (lIt.hasPropName())
{
MProp lProp = aInClass.findProp(lIt.getPropName(),false);
if (null != lProp)
{
out.println(aInIndent + 1, "(" + propIds.get(lProp.getLID().getName()) + ") //" + lProp + " of name component " + lIt);
}
}
}
}
}
}
private TreeMap<String, Integer> propIds = new TreeMap<String, Integer>();
}
|
package ie.tcd.slscs.kfclone;
import ie.tcd.slscs.ngramtool.NGram;
import org.junit.Rule;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.rules.TemporaryFolder;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class ImpExpTest {
private TemporaryFolder tempFolder;
private File tempFile;
private OutputStream fos;
private OutputStreamWriter osw;
private BufferedWriter bw;
private List<NGram> expList;
private FileInputStream readBackIn;
public ImpExpTest() {
expList = new ArrayList<NGram>();
expList.add(new NGram("this is a", 6));
expList.add(new NGram("is a small", 6));
expList.add(new NGram("a small test", 6));
try {
tempFolder = new TemporaryFolder();
tempFolder.create();
tempFile = tempFolder.newFile("test.txt");
fos = new FileOutputStream(tempFile);
osw = new OutputStreamWriter(fos);
bw = new BufferedWriter(osw);
readBackIn = new FileInputStream(tempFile);
bw.write("this is a\t6");
bw.newLine();
bw.write("is a small\t6");
bw.newLine();
bw.write("a small test\t6");
bw.newLine();
bw.close();
osw.close();
fos.close();
} catch (Exception e) {
System.out.print(e);
}
}
// FIXME: why does this fail?
@Test
public void load() throws Exception {
List<NGram> out = ImpExp.load(readBackIn);
for (NGram n : out) {
System.out.println(n.getText() + " : " + n.getCount());
}
//assert(out.equals(expList));
}
}
|
package mousio.etcd4j;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.CancellationException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import mousio.client.retry.RetryWithExponentialBackOff;
import mousio.etcd4j.promises.EtcdResponsePromise;
import mousio.etcd4j.responses.EtcdAuthenticationException;
import mousio.etcd4j.responses.EtcdException;
import mousio.etcd4j.responses.EtcdHealthResponse;
import mousio.etcd4j.responses.EtcdKeyAction;
import mousio.etcd4j.responses.EtcdKeysResponse;
import mousio.etcd4j.responses.EtcdLeaderStatsResponse;
import mousio.etcd4j.responses.EtcdMembersResponse;
import mousio.etcd4j.responses.EtcdSelfStatsResponse;
import mousio.etcd4j.responses.EtcdStoreStatsResponse;
import mousio.etcd4j.responses.EtcdVersionResponse;
import mousio.etcd4j.transport.EtcdNettyClient;
import mousio.etcd4j.transport.EtcdNettyConfig;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Performs tests on a real server at local address. All actions are performed in "etcd4j_test" dir
*/
public class TestFunctionality {
private EtcdClient etcd;
@Before
public void setUp() throws Exception {
this.etcd = new EtcdClient();
this.etcd.setRetryHandler(new RetryWithExponentialBackOff(20, 4, 10000));
try {
etcd.deleteDir("/etcd4j_test").recursive().send().get();
etcd.deleteDir("/etcd4j_testGetAll_1").recursive().send().get();
etcd.deleteDir("/etcd4j_testGetAll_2").recursive().send().get();
} catch (EtcdException | IOException e) {
}
}
@After
public void tearDown() throws Exception {
try {
etcd.deleteDir("/etcd4j_test").recursive().send().get();
etcd.deleteDir("/etcd4j_testGetAll_1").recursive().send().get();
etcd.deleteDir("/etcd4j_testGetAll_2").recursive().send().get();
} catch (EtcdException | IOException e) {
}
this.etcd.close();
}
/**
* Test version
*
* @throws Exception
*/
@Test
public void testOldVersion() {
String version = etcd.getVersion();
assertNotNull(version);
assertTrue(version.contains("etcd"));
}
/**
* Test version
*
* @throws Exception
*/
@Test
public void testVersion() {
EtcdVersionResponse version = etcd.version();
assertNotNull(version);
assertTrue(version.server.startsWith("2.") || version.server.startsWith("3."));
assertTrue(version.cluster.startsWith("2.") || version.cluster.startsWith("3."));
}
/**
* Test Self Stats
*
* @throws Exception
*/
@Test
public void testSelfStats() {
EtcdSelfStatsResponse stats = etcd.getSelfStats();
assertNotNull(stats);
assertNotNull(stats.getLeaderInfo());
assertEquals(stats.getId(), stats.getLeaderInfo().getLeader());
}
/**
* Test leader Stats
*
* @throws Exception
*/
@Test
public void testLeaderStats() {
EtcdLeaderStatsResponse stats = etcd.getLeaderStats();
assertNotNull(stats);
// stats
assertNotNull(stats.getLeader());
assertNotNull(stats.getFollowers());
assertEquals(stats.getFollowers().size(), 0);
}
/**
* Test Store Stats
*
* @throws Exception
*/
@Test
public void testStoreStats() {
EtcdStoreStatsResponse stats = etcd.getStoreStats();
assertNotNull(stats);
}
/**
* Test Members
*
* @throws Exception
*/
@Test
public void testMembers() {
EtcdMembersResponse members = etcd.getMembers();
assertNotNull(members);
assertTrue(members.getMembers().size() >= 1);
}
/**
* Test Health
*
* @throws Exception
*/
@Test
public void testHealth() {
EtcdHealthResponse health = etcd.getHealth();
assertNotNull(health);
assertTrue(health.getHealth().equals("true"));
}
@Test
public void testTimeout() throws IOException, EtcdException, EtcdAuthenticationException {
try {
etcd.put("etcd4j_test/fooTO", "bar").timeout(1, TimeUnit.MILLISECONDS).send().get();
fail();
} catch (TimeoutException e) {
// Should time out
}
}
/**
* Simple value tests
*/
@Test
public void testKey() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
EtcdKeysResponse response = etcd.put("etcd4j_test/foo", "bar").send().get();
assertEquals(EtcdKeyAction.set, response.action);
response = etcd.put("etcd4j_test/foo2", "bar").prevExist(false).send().get();
assertEquals(EtcdKeyAction.create, response.action);
response = etcd.put("etcd4j_test/foo", "bar1").ttl(40).prevExist(true).send().get();
assertEquals(EtcdKeyAction.update, response.action);
assertNotNull(response.node.expiration);
response = etcd.put("etcd4j_test/foo", "bar2").prevValue("bar1").send().get();
assertEquals(EtcdKeyAction.compareAndSwap, response.action);
response = etcd.put("etcd4j_test/foo", "bar3").prevIndex(response.node.modifiedIndex).send().get();
assertEquals(EtcdKeyAction.compareAndSwap, response.action);
response = etcd.get("etcd4j_test/foo").consistent().send().get();
assertEquals("bar3", response.node.value);
// Test slash before key
response = etcd.get("/etcd4j_test/foo").consistent().send().get();
assertEquals("bar3", response.node.value);
response = etcd.delete("etcd4j_test/foo").send().get();
assertEquals(EtcdKeyAction.delete, response.action);
}
/**
* Simple value tests
*/
@Test
public void testError() throws IOException, EtcdAuthenticationException, TimeoutException {
try {
etcd.get("etcd4j_test/barf").send().get();
} catch (EtcdException e) {
assertEquals(100, e.errorCode);
}
try {
etcd.put("etcd4j_test/barf", "huh").prevExist(true).send().get();
} catch (EtcdException e) {
assertEquals(100, e.errorCode);
}
}
/**
* Refresh test
*/
@Test
public void testRefreshTtl() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
EtcdKeysResponse initialResponse = etcd.put("etcd4j_test/foo", "bar").ttl(60).send().get();
assertEquals(EtcdKeyAction.set, initialResponse.action);
final EtcdKeysResponse refreshedResponse = etcd.refresh("etcd4j_test/foo", 120).send().get();
assertEquals(initialResponse.node.createdIndex, refreshedResponse.node.createdIndex);
assertTrue("expected ttl to be updated", refreshedResponse.node.ttl > 60);
}
/**
* Tests redirect by sending a key with too many slashes.
*/
@Test
public void testRedirect() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
etcd.put("etcd4j_test/redirect", "bar").send().get();
// Test redirect with a double slash
EtcdKeysResponse response = etcd.get("//etcd4j_test/redirect").consistent().send().get();
assertEquals("bar", response.node.value);
}
/**
* Directory tests
*/
@Test
public void testDir() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
EtcdKeysResponse r = etcd.putDir("etcd4j_test/foo_dir").send().get();
assertEquals(r.action, EtcdKeyAction.set);
r = etcd.getDir("etcd4j_test/foo_dir").consistent().send().get();
assertEquals(r.action, EtcdKeyAction.get);
// Test slash before key
r = etcd.getDir("/etcd4j_test/foo_dir").send().get();
assertEquals(r.action, EtcdKeyAction.get);
r = etcd.put("etcd4j_test/foo_dir/foo", "bar").send().get();
assertEquals(r.node.value, "bar");
r = etcd.putDir("etcd4j_test/foo_dir/foo_subdir").ttl(20).send().get();
assertEquals(r.action, EtcdKeyAction.set);
assertNotNull(r.node.expiration);
r = etcd.deleteDir("etcd4j_test/foo_dir").recursive().send().get();
assertEquals(r.action, EtcdKeyAction.delete);
}
/**
* Recursive
*/
@Test
public void testRecursive() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
etcd.put("etcd4j_test/nested/root/key-1", "key1").send().get();
etcd.put("etcd4j_test/nested/root/node-1/key-2", "key2").send().get();
etcd.put("etcd4j_test/nested/root/node-1/child/key-3", "key3").send().get();
etcd.put("etcd4j_test/nested/root/node-2/key-4", "key4").send().get();
EtcdKeysResponse r;
r = etcd.get("etcd4j_test/nested").recursive().timeout(10, TimeUnit.SECONDS).send().get();
assertEquals(1, r.node.nodes.size());
assertEquals(3, r.node.nodes.get(0).nodes.size());
r = etcd.deleteDir("etcd4j_test/nested").recursive().send().get();
assertEquals(r.action, EtcdKeyAction.delete);
}
/**
* In order key tests
*/
@Test
public void testInOrderKeys() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
EtcdKeysResponse r = etcd.post("etcd4j_test/queue", "Job1").send().get();
assertEquals(r.action, EtcdKeyAction.create);
r = etcd.post("etcd4j_test/queue", "Job2").ttl(20).send().get();
assertEquals(r.action, EtcdKeyAction.create);
r = etcd.get(r.node.key).consistent().send().get();
assertTrue(r.node.key.endsWith(r.node.createdIndex+""));
assertEquals(r.node.value, "Job2");
r = etcd.get("etcd4j_test/queue").consistent().recursive().sorted().send().get();
assertEquals(2, r.node.nodes.size());
assertEquals("Job2", r.node.nodes.get(1).value);
r = etcd.deleteDir("etcd4j_test/queue").recursive().send().get();
assertEquals(r.action, EtcdKeyAction.delete);
}
/**
* In order key tests
*/
@Test
public void testWait() throws IOException, EtcdException, EtcdAuthenticationException, InterruptedException, TimeoutException {
EtcdResponsePromise<EtcdKeysResponse> p = etcd.get("etcd4j_test/test").waitForChange().send();
// Ensure the change is received after the listen command is received.
new Timer().schedule(new TimerTask() {
@Override
public void run() {
try {
etcd.put("etcd4j_test/test", "changed").send().get();
} catch (IOException | EtcdException | EtcdAuthenticationException | TimeoutException e) {
fail();
}
}
}, 20);
EtcdKeysResponse r = p.get();
assertEquals("changed", r.node.value);
}
@Test(expected = TimeoutException.class)
public void testWaitTimeout() throws IOException, EtcdException, EtcdAuthenticationException, InterruptedException, TimeoutException {
etcd.get("etcd4j_test/test").waitForChange().timeout(1, TimeUnit.SECONDS).send().get();
// get should have thrown TimeoutException
fail();
}
@Test(timeout = 1000)
public void testChunkedData() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
//creating very long key to force content to be chunked
StringBuilder stringBuilder = new StringBuilder(15000);
for (int i = 0; i < 15000; i++) {
stringBuilder.append("a");
}
EtcdKeysResponse response = etcd.put("etcd4j_test/foo", stringBuilder.toString()).send().get();
assertEquals(EtcdKeyAction.set, response.action);
}
@Test
public void testIfCleanClose() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
EtcdClient client = new EtcdClient();
client.setRetryHandler(new RetryWithExponentialBackOff(20, 4, 1000));
EtcdResponsePromise<EtcdKeysResponse> p = client.get("etcd4j_test/test").waitForChange().send();
client.close();
try {
p.get();
fail();
} catch (IOException e){
// should be catched because connection was canceled
if (!(e.getCause() instanceof CancellationException)) {
fail();
}
}
}
@Test
public void testGetAll() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
List<EtcdKeysResponse.EtcdNode> nodes;
EtcdClient client = new EtcdClient();
nodes = client.getAll().timeout(30, TimeUnit.SECONDS).send().get().getNode().getNodes();
assertNotNull(nodes);
assertEquals(0, nodes.size());
client.put("etcd4j_testGetAll_1/foo1", "bar").prevExist(false).send().get();
client.put("etcd4j_testGetAll_2/foo1", "bar").prevExist(false).send().get();
nodes = client.getAll().timeout(30, TimeUnit.SECONDS).send().get().getNode().getNodes();
assertNotNull(nodes);
assertEquals(2, nodes.size());
}
@Test
public void testGetHugeDir() throws IOException, EtcdException, EtcdAuthenticationException, TimeoutException {
EtcdNettyConfig config = new EtcdNettyConfig();
config.setMaxFrameSize(1024 * 1024); // Desired max size
EtcdNettyClient nettyClient = new EtcdNettyClient(config, URI.create("http://localhost:4001"));
EtcdClient client = new EtcdClient(nettyClient);
for (int i = 0; i < 2000; i++) {
client.put("/etcd4j_test/huge-dir/node-" + i, "bar").send().get();
}
List<EtcdKeysResponse.EtcdNode> nodes;
nodes = client.getDir("/etcd4j_test/huge-dir/").send().get().getNode().getNodes();
assertNotNull(nodes);
assertEquals(2000, nodes.size());
}
}
|
package org.almibe.codeeditor.demo;
import javafx.application.Application;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.control.Button;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import org.almibe.codeeditor.CodeMirrorEditor;
import org.almibe.codeeditor.CodeEditor;
import java.nio.file.Paths;
public class Demo extends Application {
private final CodeEditor codeEditor = new CodeMirrorEditor();
@Override
public void start(Stage primaryStage) throws Exception {
BorderPane borderPane = new BorderPane();
borderPane.setCenter(codeEditor.getWidget());
VBox controls = new VBox();
controls.getChildren().addAll(readOnlyControls(), modeControls(), themeControls(), contentControls());
borderPane.setBottom(controls);
Scene scene = new Scene(borderPane);
primaryStage.setScene(scene);
codeEditor.init(Paths.get("src/test/resources/html/editor.html").toUri());
//codeEditor.setReadOnly(true); <-- this will crash
codeEditor.editorInitializedProperty().addListener(change -> codeEditor.setReadOnly(true));
codeEditor.editorInitializedProperty().addListener(change -> codeEditor.setReadOnly(false));
codeEditor.editorInitializedProperty().addListener(change -> codeEditor.setContent("Test content?!?@!?@!?@!?@!?"));
primaryStage.show();
}
private Parent readOnlyControls() {
HBox box = new HBox();
Button setReadOnly = new Button("Set Read Only");
Button unsetReadOnly = new Button("Set Editable");
setReadOnly.setOnAction(event -> codeEditor.setReadOnly(true));
unsetReadOnly.setOnAction(event -> codeEditor.setReadOnly(false));
box.getChildren().addAll(setReadOnly, unsetReadOnly);
return box;
}
private Parent modeControls() {
HBox box = new HBox();
TextField modeInput = new TextField();
Button button = new Button("Set Mode");
box.getChildren().addAll(modeInput, button);
button.setOnAction(event -> codeEditor.setMode(modeInput.getText()));
return box;
}
private Parent themeControls() {
HBox box = new HBox();
TextField themeInput = new TextField();
Button button = new Button("Set Theme");
button.setOnAction(event -> codeEditor.setTheme(themeInput.getText()));
box.getChildren().addAll(themeInput, button);
return box;
}
private Parent contentControls() {
HBox box = new HBox();
TextArea content = new TextArea();
Button setButton = new Button("Set Content");
Button getButton = new Button("Get Content");
setButton.setOnAction(event -> {
codeEditor.setContent(content.getText());});
getButton.setOnAction(event -> {content.setText(codeEditor.getContent());});
box.getChildren().addAll(content, setButton, getButton);
return box;
}
public static void main(String[] args) {
Demo.launch();
}
}
|
package org.supercsv.ext.tool;
import java.io.PrintWriter;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.sql.Time;
import java.sql.Timestamp;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import org.supercsv.cellprocessor.CellProcessorAdaptor;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.util.CsvContext;
/**
* Utility methods and constants for tests.
*
* @since 1.2
* @author T.TSUCHIE
*
*/
public class TestUtils {
public static final CsvContext ANONYMOUS_CSVCONTEXT = new CsvContext(1, 2, 3);
/**
* create Intger from str.
* @param value
* @return
*/
public static Integer toInteger(final String value) {
return Integer.parseInt(value);
}
/**
* create Date instance.
* @param year
* @param month Month(start with 1)
* @param dayOfMonth
* @return
*/
public static Date toDate(final int year, final int month, final int dayOfMonth) {
return toDate(year, month, dayOfMonth, 0, 0, 0);
}
/**
* create Date instance.
* @param year
* @param month Month(start with 1)
* @param dayOfMonth
* @param hour
* @param minute
* @param second
* @return
*/
public static Date toDate(final int year, final int month, final int dayOfMonth,
final int hour, final int minute, final int second) {
Calendar cal = Calendar.getInstance();
cal.set(year, month-1, dayOfMonth, hour, minute, second);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
public static java.sql.Date toSqlDate(final int year, final int month, final int dayOfMonth) {
return new java.sql.Date(toDate(year, month, dayOfMonth).getTime());
}
public static Timestamp toTimestamp(final int year, final int month, final int dayOfMonth,
final int hour, final int minute, final int second, final int millsecond) {
Calendar cal = Calendar.getInstance();
cal.set(year, month-1, dayOfMonth, hour, minute, second);
cal.set(Calendar.MILLISECOND, millsecond);
return new Timestamp(cal.getTimeInMillis());
}
public static Time toTime(final int hour, final int minute, final int second) {
Calendar cal = Calendar.getInstance();
cal.set(1970, 0, 1, hour, minute, second);
cal.set(Calendar.MILLISECOND, 0);
return new Time(cal.getTime().getTime());
}
public static Date plusDays(final Date date, final int daysToAdd) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.DAY_OF_MONTH, daysToAdd);
return cal.getTime();
}
public static Date minusDays(final Date date, final int daysToSubstract) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.DAY_OF_MONTH, -daysToSubstract);
return cal.getTime();
}
public static Date plusHours(final Date date, final int hoursToAdd) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.HOUR_OF_DAY, hoursToAdd);
return cal.getTime();
}
public static Date minusHours(final Date date, final int hoursToSubstract) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.HOUR_OF_DAY, -hoursToSubstract);
return cal.getTime();
}
public static Date plusSeconds(final Date date, final int secondsToAdd) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.SECOND, secondsToAdd);
return cal.getTime();
}
public static Date minusSeconds(final Date date, final int secondsToSubstract) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.SECOND, -secondsToSubstract);
return cal.getTime();
}
public static Date plusMillseconds(final Date date, final int millsecondsToAdd) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.MILLISECOND, millsecondsToAdd);
return cal.getTime();
}
public static Date minusMillseconds(final Date date, final int millsecondsToSubstract) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.MILLISECOND, -millsecondsToSubstract);
return cal.getTime();
}
public static Annotation[] getAnnotations(final Class<?> clazz, final String fieldName) {
try {
Field field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
return field.getAnnotations();
} catch(ReflectiveOperationException e) {
throw new RuntimeException(e);
}
}
public static void printCellProcessorChain(final CellProcessor cellProcessor, final String message) {
if(cellProcessor == null) {
return;
}
System.out.printf("======= print CellProcessor chain structures. :: %s ========\n", message);
printCellProcessorChain(cellProcessor, new PrintWriter(System.out));
System.out.println();
}
public static void printCellProcessorChain(final CellProcessor cellProcessor, final PrintWriter writer) {
String index = "";
CellProcessor cp = cellProcessor;
do {
if(index.length() == 0) {
writer.printf("%s%s\n", index, cp.getClass().getName());
writer.flush();
} else {
writer.printf("%s%s\n", index, cp.getClass().getName());
writer.flush();
}
// next processor
try {
if(cp instanceof CellProcessorAdaptor) {
Field field = CellProcessorAdaptor.class.getDeclaredField("next");
field.setAccessible(true);
cp = (CellProcessor) field.get(cp);
} else {
break;
}
} catch(ReflectiveOperationException e) {
return;
}
index += " ";
} while(cp != null);
}
public static final String format(final Date value, final String pattern) {
SimpleDateFormat formatter = new SimpleDateFormat(pattern);
return formatter.format(value);
}
public static final String format(final Number value, final String pattern) {
DecimalFormat formatter = new DecimalFormat(pattern);
return formatter.format(value);
}
}
|
package potaufeu;
import static org.junit.Assert.*;
import static potaufeu.PathMatcherFactory.*;
import java.io.*;
import java.lang.reflect.*;
import java.nio.file.*;
import java.nio.file.attribute.*;
import java.util.*;
import java.util.function.*;
import org.junit.*;
import org.junit.Test;
import org.junit.rules.*;
import junit.framework.*;
import potaufeu.OptionSet.*;
public final class PathMatcherFactoryTest {
@Rule
public TemporaryFolder tmpFolder = new TemporaryFolder();
private Path path1;
private Path path2;
@Before
public void createFile() throws IOException {
this.path1 = tmpFolder.newFile("test1.txt").toPath();
this.path2 = tmpFolder.newFile("test2.xml").toPath();
}
@Test
public void testPathMatcherFactory() throws Exception {
Constructor<?> ctor = PathMatcherFactory.class.getDeclaredConstructor();
ctor.setAccessible(true);
ctor.newInstance();
}
@Test
public void testToPathMatchers() {
List<String> a = Arrays.asList("aaa", "bbb");
assertEquals(a.size(), toPathMatchers(a, x -> f -> true).size());
}
@SuppressWarnings("deprecation")
@Test
public void testExclusionMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, exclusionMatchers(parser.parse("aaa")).size());
assertEquals(1, exclusionMatchers(parser.parse("--exclude", "aaa")).size());
assertEquals(2, exclusionMatchers(parser.parse("--exclude", "aaa", "--exclude", "bbb")).size());
}
@SuppressWarnings("deprecation")
@Test
public void testExclusionMatcher() {
PathMatcher f = exclusionMatcher("test2");
assertTrue(f.matches(path1));
assertFalse(f.matches(path2));
}
@SuppressWarnings("deprecation")
@Test
public void testNameMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, nameMatchers(parser.parse("aaa")).size());
assertEquals(1, nameMatchers(parser.parse("--name", "aaa")).size());
assertEquals(2, nameMatchers(parser.parse("--name", "aaa", "--name", "bbb")).size());
}
@SuppressWarnings("deprecation")
@Test
public void testNameMatcher() {
PathMatcher f = nameMatcher("test1");
assertTrue(f.matches(path1));
assertFalse(f.matches(path2));
}
@Test
public void testExtensionMatchers() throws Exception {
Parser parser = new Parser();
assertFalse(extensionMatchers(parser.parse("java")).isPresent());
assertTrue(extensionMatchers(parser.parse(".java")).isPresent());
assertTrue(extensionMatchers(parser.parse(".java", ".xml")).isPresent());
assertTrue(extensionMatchers(parser.parse(".java,xml")).isPresent());
}
@Test
public void testExtensionMatcher() {
PathMatcher f = extensionMatcher("txt").get();
assertTrue(f.matches(path1));
assertFalse(f.matches(path2));
}
@SuppressWarnings("deprecation")
@Test
public void testPathMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, pathMatchers(parser.parse()).size());
assertEquals(1, pathMatchers(parser.parse("aaa")).size());
assertEquals(2, pathMatchers(parser.parse("aaa", "bbb")).size());
}
@SuppressWarnings("deprecation")
@Test
public void testPathMatcher() {
final String slash = Paths.get("/").toString().replaceFirst("^.*(.)$", "$1");
PathMatcher f = pathMatcher(slash + "test1");
assertTrue(f.matches(path1.toAbsolutePath()));
assertFalse(f.matches(path2.toAbsolutePath()));
}
@Test
public void testFileTypeMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, fileTypeMatchers(parser.parse()).size());
assertEquals(0, fileTypeMatchers(parser.parse("aaa")).size());
assertEquals(1, fileTypeMatchers(parser.parse("--file")).size());
assertEquals(1, fileTypeMatchers(parser.parse("-F")).size());
PathMatcher f = fileTypeMatchers(parser.parse("-F")).get(0);
assertTrue(f.matches(path1));
assertTrue(f.matches(path2));
}
@Test
public void testFileSizeMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, fileSizeMatchers(parser.parse("size")).size());
assertEquals(1, fileSizeMatchers(parser.parse("--size", "111")).size());
assertEquals(2, fileSizeMatchers(parser.parse("--size", "111", "--size", "222KB")).size());
}
@Test
public void testFileSizeMatcher() throws IOException {
Files.write(path2, Arrays.asList("<xml>", "</xml>"), StandardOpenOption.WRITE);
PathMatcher f1 = fileSizeMatcher("5");
assertFalse(f1.matches(path1));
assertTrue(f1.matches(path2));
PathMatcher f2 = fileSizeMatcher("-5");
assertTrue(f2.matches(path1));
assertFalse(f2.matches(path2));
PathMatcher f3 = fileSizeMatcher("12-");
assertFalse(f3.matches(path1));
assertTrue(f3.matches(path2));
PathMatcher f4a = fileSizeMatcher("13-15");
assertFalse(f4a.matches(path1));
assertTrue(f4a.matches(path2));
PathMatcher f4b = fileSizeMatcher("0-15");
assertTrue(f4b.matches(path1));
assertTrue(f4b.matches(path2));
PathMatcher f4c = fileSizeMatcher("0-0");
assertTrue(f4c.matches(path1));
assertFalse(f4c.matches(path2));
assertEquals("java.lang.IllegalArgumentException: min > max: 3-2",
getExceptionAsString(() -> fileSizeMatcher("3-2")));
}
@Test
public void testCtimeMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, ctimeMatchers(parser.parse("ctime")).size());
assertEquals(1, ctimeMatchers(parser.parse("--ctime", "2015")).size());
assertEquals(2, ctimeMatchers(parser.parse("--ctime", "2015-", "--ctime", "-2017")).size());
}
@Test
public void testMtimeMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, ctimeMatchers(parser.parse("mtime")).size());
assertEquals(1, mtimeMatchers(parser.parse("--mtime", "2015")).size());
assertEquals(2, mtimeMatchers(parser.parse("--mtime", "2015-", "--mtime", "-2017")).size());
}
@Test
public void testAtimeMatchers() throws Exception {
Parser parser = new Parser();
assertEquals(0, atimeMatchers(parser.parse("atime")).size());
assertEquals(1, atimeMatchers(parser.parse("--atime", "2015")).size());
assertEquals(2, atimeMatchers(parser.parse("--atime", "2015-", "--atime", "-2017")).size());
}
@Test
public void testFileTimeMatcher() throws Exception {
Files.setLastModifiedTime(path1, FileTime.fromMillis(TimePoint.millis("201304030000")));
Files.setLastModifiedTime(path2, FileTime.fromMillis(TimePoint.millis("201305120000")));
Parser parser = new Parser();
OptionSet opts = parser.parse("aaa");
final long now = opts.createdTime;
final ToLongFunction<Path> toLong = FileAttributeFormatter::mtime;
PathMatcher f1a = fileTimeMatcher("2012", toLong, now);
assertFalse(f1a.matches(path1));
assertFalse(f1a.matches(path2));
PathMatcher f1b = fileTimeMatcher("2013", toLong, now);
assertTrue(f1b.matches(path1));
assertTrue(f1b.matches(path2));
PathMatcher f1c = fileTimeMatcher("2014", toLong, now);
assertFalse(f1c.matches(path1));
assertFalse(f1c.matches(path2));
PathMatcher f2 = fileTimeMatcher("201305-", toLong, now);
assertFalse(f2.matches(path1));
assertTrue(f2.matches(path2));
PathMatcher f3 = fileTimeMatcher("-201304", toLong, now);
assertTrue(f3.matches(path1));
assertFalse(f3.matches(path2));
PathMatcher f4a = fileTimeMatcher("20130403-20130511", toLong, now);
assertTrue(f4a.matches(path1));
assertFalse(f4a.matches(path2));
PathMatcher f4b = fileTimeMatcher("20130403-20130512", toLong, now);
assertTrue(f4b.matches(path1));
assertTrue(f4b.matches(path2));
PathMatcher f4c = fileTimeMatcher("2013040312-201305", toLong, now);
assertFalse(f4c.matches(path1));
assertTrue(f4c.matches(path2));
assertEquals("java.lang.IllegalArgumentException: min > max: 2015-2014",
getExceptionAsString(() -> fileTimeMatcher("2015-2014", toLong, now)));
}
@FunctionalInterface
interface ActionWithThrowsException {
void perform() throws Exception;
}
static String getExceptionAsString(ActionWithThrowsException action) {
try {
action.perform();
throw new AssertionFailedError();
} catch (Exception e) {
return e.toString();
}
}
}
|
package seedu.typed.model.task;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import seedu.typed.commons.exceptions.IllegalValueException;
import seedu.typed.model.tag.Tag;
import seedu.typed.model.tag.UniqueTagList;
/**
* Unit testing for Task class 98.3%
* @author YIM CHIA HUI
*
*/
public class TaskTest {
private Name nullName;
private Name name;
private Name name2;
private Tag tag;
private Tag tag2;
private Date nullDate;
private Date date;
private boolean isCompleted;
private boolean isCompleted2;
private Task test;
private UniqueTagList tagList;
private UniqueTagList tagList2;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
try {
nullName = null;
nullDate = null;
tag = new Tag("work");
tag2 = new Tag("friends");
name = new Name("Meet John");
name2 = new Name("Meet Honey");
date = new Date("12/12/2017");
isCompleted = false;
isCompleted2 = false;
test = new Task(name, date, new UniqueTagList());
tagList = new UniqueTagList();
tagList2 = new UniqueTagList();
tagList2.add(tag2);
tagList.add(tag);
} catch (IllegalValueException e) {
e.printStackTrace();
}
}
@Test
public void equals_same_success() {
Task test = new TaskBuilder().setName(name).build();
assertTrue(test.equals(test));
}
@Test
public void equals_notSameButSimilar_success() {
Task test2 = new TaskBuilder().setName(name).setTags(new UniqueTagList()).setDate(date).build();
assertTrue(test.equals(test2));
}
@Test
public void equals_notSameAndNotSameInstance_false() {
Task test = new TaskBuilder().setName(name).build();
assertFalse(test.equals(name));
}
@Test
public void task_nameNull_assertError() {
thrown.expect(AssertionError.class);
Task test = new Task(nullName, date, new UniqueTagList());
test.setName(name);
}
@Test
public void task_task_success() {
Task test2 = new Task(test);
assertTrue(test.equals(test2));
}
@Test
public void setName_valid_success() {
Task test = new TaskBuilder().setName(name).build();
test.setName(name2);
assertTrue(test.getName().equals(name2));
}
@Test
public void setName_nameNull_assertError() {
thrown.expect(AssertionError.class);
Task test = new TaskBuilder().setName(name).build();
test.setName(nullName);
}
@Test
public void setDate_dateNull_assertError() {
thrown.expect(AssertionError.class);
Task test = new TaskBuilder().setName(name).build();
test.setDate(nullDate);
}
@Test
public void setDate_valid_success() {
Task test = new TaskBuilder().setName(name).build();
test.setDate(date);
assertTrue(test.getDate().equals(date));
}
@Test
public void getTags_valid_success() {
Task test = new TaskBuilder().setName(name).setTags(tagList).build();
assertTrue(test.getTags().equals(tagList));
}
@Test
public void setTags_valid_success() {
Task test = new TaskBuilder().setName(name).build();
test.setTags(tagList2);
assertTrue(test.getTags().equals(tagList2));
}
@Test
public void resetData_valid_success() {
Task test1 = new TaskBuilder().setName(name).build();
test1.resetData(test);
assertTrue(test1.equals(test));
}
@Test
public void resetDate_null_assertError() {
thrown.expect(AssertionError.class);
Task test = new TaskBuilder().setName(name).build();
test.resetData(null);
}
@Test
public void toString_valid_success() {
assertEquals(test.toString(), " Name: Meet John Date: 12/12/2017 Completed: false Tags: ");
}
}
|
package org.apache.lenya.cms.ac;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.lenya.cms.publication.Publication;
import org.apache.lenya.cms.publication.PublicationFactory;
import junit.framework.TestCase;
/**
* @author egli
*
*
*/
public class LDAPUserTest extends TestCase {
/**
* Constructor for LDAPUserTest.
* @param arg0 a <code>String</code>
*/
public LDAPUserTest(String arg0) {
super(arg0);
}
/**
*
* @param args an array of <code>String</code>
*/
public static void main(String[] args) {
junit.textui.TestRunner.run(LDAPUserTest.class);
}
/**
* @see TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
}
/**
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
}
/**
* get a publication
*
* @return a <code>Publication</code>
*/
final public Publication getPublication() {
String publicationId = "default";
String servletContextPath =
"/home/egli/build/jakarta-tomcat-4.1.21-LE-jdk14/webapps/lenya/";
return PublicationFactory.getPublication(
publicationId,
servletContextPath);
}
/**
* Create and save an ldap user
*
* @param userName name of the user
* @param email of the user
* @param ldapId ldap id of the user
* @throws AccessControlException if the creating or the saving fails
*/
final public void createAndSaveUser(
String userName,
String email,
String ldapId)
throws AccessControlException {
Publication publication = getPublication();
String editorGroupName = "editorGroup";
String adminGroupName = "adminGroup";
String editorRoleName = "editorRole";
String adminRoleName = "adminRole";
FileRole editorRole = new FileRole(publication, editorRoleName);
FileRole adminRole = new FileRole(publication, adminRoleName);
FileGroup editorGroup = new FileGroup(publication, editorGroupName);
FileGroup adminGroup = new FileGroup(publication, adminGroupName);
LDAPUser user = null;
try {
user = new LDAPUser(publication, userName, email, ldapId);
} catch (ConfigurationException e) {
throw new AccessControlException("Could not create user", e);
}
editorRole.save();
adminRole.save();
editorGroup.addRole(editorRole);
user.addGroup(editorGroup);
adminGroup.addRole(editorRole);
adminGroup.addRole(adminRole);
editorGroup.save();
adminGroup.save();
user.addGroup(adminGroup);
user.save();
}
/**
* Test loading an LDAPUser
*
* @param userName the name of the user
* @return an <code>LDAPUser</code>
* @throws AccessControlException of the loading fails
*/
final public LDAPUser loadUser(String userName)
throws AccessControlException {
Publication publication = getPublication();
UserManager manager = UserManager.instance(publication);
return (LDAPUser) manager.getUser(userName);
}
// final public void testGetFullName() throws AccessControlException {
// String userName = "felix";
// createAndSaveUser(userName, "felix@wyona.com", "m400032");
// LDAPUser user = null;
// user = loadUser(userName);
// assertNotNull(user);
// String fullName = user.getFullName();
// assertTrue(fullName.equals(" Felix Maeder - Wayona"));
/**
* Test the setter of the full name
*/
final public void testSetFullName() {
// the setFullName method is supposed to do nothing
}
// final public void testAuthenticate() throws AccessControlException {
// String userName = "felix";
// createAndSaveUser(userName, "felix@wyona.com", "m400032");
// User user = null;
// user = loadUser(userName);
// assertNotNull(user);
// assertTrue(user.authenticate("sekret"));
/**
* Test the ldap id getter
*
* @throws AccessControlException if the test fails
*/
final public void testGetLdapId() throws AccessControlException {
String userName = "felix";
String ldapId = "m400032";
createAndSaveUser(userName, "felix@wyona.com", ldapId);
LDAPUser user = null;
user = loadUser(userName);
assertNotNull(user);
assertEquals(ldapId, user.getLdapId());
}
/**
* Test settinf the ldap id
*
* @throws AccessControlException if the test fails
*/
final public void testSetLdapId() throws AccessControlException {
String userName = "felix";
String newLdapId = "foo";
createAndSaveUser(userName, "felix@wyona.com", "bar");
LDAPUser user = null;
user = loadUser(userName);
assertNotNull(user);
user.setLdapId(newLdapId);
user.save();
user = null;
user = loadUser(userName);
assertNotNull(user);
assertEquals(newLdapId, user.getLdapId());
}
/**
* Test save
*
* @throws AccessControlException if the test fails
*/
final public void testSave() throws AccessControlException {
String userName = "felix";
createAndSaveUser(userName, "felix@wyona.com", "m400032");
User user = null;
user = loadUser(userName);
assertNotNull(user);
}
/**
* Test the deletion of a ldap user
*
*/
final public void testDelete() {
//TODO Implement delete().
}
}
|
package uk.me.graphe.server.database;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import uk.me.graphe.server.database.dbitems.*;
import uk.me.graphe.shared.Edge;
import uk.me.graphe.shared.Vertex;
import uk.me.graphe.shared.graphmanagers.OTGraphManager2d;
import uk.me.graphe.shared.graphmanagers.OTGraphManager2dImpl;
import uk.me.graphe.shared.messages.operations.AddNodeOperation;
import uk.me.graphe.shared.messages.operations.CompositeOperation;
import uk.me.graphe.shared.messages.operations.EdgeOperation;
import uk.me.graphe.shared.messages.operations.GraphOperation;
import uk.me.graphe.shared.messages.operations.MoveNodeOperation;
import uk.me.graphe.shared.messages.operations.NodeOperation;
import com.google.code.morphia.Datastore;
import com.google.code.morphia.Morphia;
import com.mongodb.Mongo;
public class DatabaseImpl implements Database{
private Mongo mMongo;
private Morphia mMorphia = new Morphia();
private Datastore mData;
public DatabaseImpl () {
try {
mMongo = new Mongo();
} catch (UnknownHostException e) {
return;
}
mData = mMorphia.createDatastore(mMongo, "graphs");
}
@Override
public void delete(int key) {
// TODO Auto-generated method stub
}
@Override
public OTGraphManager2d retrieve(int key) {
List<OTGraphManager2dStore> retrieves = mData.find(OTGraphManager2dStore.class, "id=", key).asList();
if (retrieves.size() != 1)
throw new Error("Could not locate item");
OTGraphManager2dStore retrieve = retrieves.get(0);
OTGraphManager2d toReturn = new OTGraphManager2dImpl(retrieve.getId());
toReturn.setStateId(retrieve.getStateid());
return toReturn;
}
@Override
public int store(OTGraphManager2d manager) {
OTGraphManager2dStore toStore = new OTGraphManager2dStore(manager);
List<GraphDB> storedOperations = new ArrayList<GraphDB>();
CompositeOperation history = manager.getCompleteHistory();
List<GraphOperation> operations = history.asIndividualOperations();
for (GraphOperation item : operations) {
GraphDB toAdd = null;
int itemId = item.getHistoryId();
if (item.isEdgeOperation()) {
EdgeOperation edgeOp = item.asEdgeOperation();
Edge edge = edgeOp.getEdge();
DBEdge storeEdge = new DBEdge(edge, new DBVertex(edge.getFromVertex()), new DBVertex(edge.getToVertex()));
if (edgeOp.createsEdge(edge)) {
toAdd = new AddEdgeDB(storeEdge);
}
if (edgeOp.deletesEdge(edge)) {
toAdd = new DeleteEdgeDB(storeEdge);
}
}
if (item.isNodeOperation()) {
NodeOperation nodeOp = item.asNodeOperation();
Vertex vertex = nodeOp.getNode();
DBVertex storeVertex = new DBVertex(vertex);
if (nodeOp.createsNode(vertex)) {
AddNodeOperation addOp = (AddNodeOperation) nodeOp;
toAdd = new AddNodeDB(storeVertex, addOp.getX(), addOp.getY());
}
if(nodeOp.deletesNode(vertex)) {
toAdd = new DeleteNodeDB(storeVertex);
}
if(nodeOp.movesNode(vertex)) {
MoveNodeOperation moveOp = (MoveNodeOperation) nodeOp;
toAdd = new MoveNodeDB(storeVertex, moveOp.getToX(), moveOp.getToY());
}
}
if (item.isNoOperation()) {
toAdd = new NoOpDB();
}
toAdd.setHistoryId(itemId);
storedOperations.add(toAdd);
}
toStore.setmOps(storedOperations);
mData.save(toStore);
return toStore.getId();
}
}
|
package com.kaylerrenslow.armaplugin.lang.sqf.psi.codestyle;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.fileTypes.SyntaxHighlighter;
import com.intellij.openapi.options.colors.AttributesDescriptor;
import com.intellij.openapi.options.colors.ColorDescriptor;
import com.intellij.openapi.options.colors.ColorSettingsPage;
import com.kaylerrenslow.armaplugin.ArmaPluginIcons;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.Icon;
import java.util.HashMap;
import java.util.Map;
/**
* @author Kayler
* @since 12/19/2017
*/
public class SQFColorSettingsPage implements ColorSettingsPage {
private static final Map<String, TextAttributesKey> map = new HashMap<>();
static {
// Things placed in map can be referenced in sample code.
// Example: <magicVariable>_x</magicVariable> will give _x SQFSyntaxHighligher.MAGIC_VAR highlighting
map.put("magicVariable", SQFSyntaxHighlighter.MAGIC_VAR);
map.put("controlStructureCommand", SQFSyntaxHighlighter.CONTROL_STRUCTURE_COMMAND);
}
private static final AttributesDescriptor[] ATTR_DESCRIPTORS = new AttributesDescriptor[]{
new AttributesDescriptor("Global Variable", SQFSyntaxHighlighter.GLOBAL_VAR),
new AttributesDescriptor("Local Variable", SQFSyntaxHighlighter.LOCAL_VAR),
new AttributesDescriptor("Magic Variable", SQFSyntaxHighlighter.MAGIC_VAR),
new AttributesDescriptor("Command", SQFSyntaxHighlighter.COMMAND),
new AttributesDescriptor("Comment", SQFSyntaxHighlighter.COMMENT),
new AttributesDescriptor("String", SQFSyntaxHighlighter.STRING),
new AttributesDescriptor("Number", SQFSyntaxHighlighter.NUM),
new AttributesDescriptor("Operator", SQFSyntaxHighlighter.OPERATOR),
new AttributesDescriptor("Parentheses", SQFSyntaxHighlighter.PAREN),
new AttributesDescriptor("Braces", SQFSyntaxHighlighter.BRACE),
new AttributesDescriptor("Brackets", SQFSyntaxHighlighter.BRACKET),
new AttributesDescriptor("Comma", SQFSyntaxHighlighter.COMMA),
new AttributesDescriptor("Control Structure Commands", SQFSyntaxHighlighter.CONTROL_STRUCTURE_COMMAND),
};
@Nullable
@Override
public Icon getIcon() {
return ArmaPluginIcons.ICON_SQF;
}
@NotNull
@Override
public SyntaxHighlighter getHighlighter() {
return new SQFSyntaxHighlighter();
}
@NotNull
@Override
public String getDemoText() {
return "/*\n" +
" This script does absolutely nothing useful.\n" +
"*/\n" +
"\n" +
"disableSerialization; //disable the serialization\n" +
"\n" +
"[] spawn\n" +
"{\n" +
" private[\"_arr\", \"_localVar\"];\n" +
"\n" +
" _localVariable = 'single quote string';\n" +
" meaningOfLife = 42;\n" +
"\n" +
" <controlStructureCommand>if</controlStructureCommand> (1==1 and 2==2 && 42==42) then {\n" +
" hint \"42 is equal to 42\";\n" +
" _arr = [2e2, 3.1415926535, missionConfigFile];\n" +
" };\n" +
"\n" +
" _localVar = 2 + 2;\n" +
"\n" +
" {\n" +
" <magicVariable>_x</magicVariable> setDamage 1;\n" +
" } <controlStructureCommand>forEach</controlStructureCommand> units group player;\n" +
"\n" +
" <controlStructureCommand>switch</controlStructureCommand> (meaningOfLife) <controlStructureCommand>do</controlStructureCommand> {\n" +
" <controlStructureCommand>case</controlStructureCommand> 42: { hint \"meaning of life is good\"; };\n" +
" <controlStructureCommand>default</controlStructureCommand> { hint \"meaning of life is wrong\"; };\n" +
" };\n" +
"\n" +
"};";
}
@Nullable
@Override
public Map<String, TextAttributesKey> getAdditionalHighlightingTagToDescriptorMap() {
return map;
}
@NotNull
@Override
public AttributesDescriptor[] getAttributeDescriptors() {
return ATTR_DESCRIPTORS;
}
@NotNull
@Override
public ColorDescriptor[] getColorDescriptors() {
return ColorDescriptor.EMPTY_ARRAY;
}
@NotNull
@Override
public String getDisplayName() {
return "SQF";
}
}
|
package com.synaptian.smoketracker.habits.contentprovider;
import java.util.Arrays;
import java.util.HashSet;
import android.content.ContentProvider;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.text.TextUtils;
import com.synaptian.smoketracker.habits.database.HabitDatabaseHelper;
import com.synaptian.smoketracker.habits.database.HabitTable;
import com.synaptian.smoketracker.habits.database.GoalTable;
import com.synaptian.smoketracker.habits.database.EventTable;
public class MyHabitContentProvider extends ContentProvider {
// database
private HabitDatabaseHelper database;
// Used for the UriMacher
private static final int HABITS = 10;
private static final int HABIT_ID = 20;
private static final int GOALS = 30;
private static final int GOAL_ID = 40;
private static final int EVENTS = 50;
private static final int EVENT_ID = 60;
private static final String AUTHORITY = "com.synaptian.smoketracker.habits.contentprovider";
private static final String HABITS_PATH = "habits";
public static final Uri HABITS_URI = Uri.parse("content://" + AUTHORITY + "/" + HABITS_PATH);
private static final String GOALS_PATH = "goals";
public static final Uri GOALS_URI = Uri.parse("content://" + AUTHORITY + "/" + GOALS_PATH);
private static final String EVENTS_PATH = "events";
public static final Uri EVENTS_URI = Uri.parse("content://" + AUTHORITY + "/" + EVENTS_PATH);
public static final String HABIT_CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/habits";
public static final String HABIT_CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/habit";
public static final String GOAL_CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/goals";
public static final String GOAL_CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/goal";
public static final String EVENT_CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/events";
public static final String EVENT_CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/event";
private static final UriMatcher sURIMatcher = new UriMatcher(UriMatcher.NO_MATCH);
static {
sURIMatcher.addURI(AUTHORITY, HABITS_PATH, HABITS);
sURIMatcher.addURI(AUTHORITY, HABITS_PATH + "/#", HABIT_ID);
sURIMatcher.addURI(AUTHORITY, GOALS_PATH, GOALS);
sURIMatcher.addURI(AUTHORITY, GOALS_PATH + "/#", GOAL_ID);
sURIMatcher.addURI(AUTHORITY, EVENTS_PATH, EVENTS);
sURIMatcher.addURI(AUTHORITY, EVENTS_PATH + "/#", EVENT_ID);
}
@Override
public boolean onCreate() {
database = new HabitDatabaseHelper(getContext());
return false;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
// Using SQLiteQueryBuilder instead of query() method
SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder();
String groupBy = null;
int uriType = sURIMatcher.match(uri);
switch (uriType) {
case HABIT_ID:
queryBuilder.appendWhere(HabitTable.COLUMN_ID + "=" + uri.getLastPathSegment());
case HABITS:
groupBy = HabitTable.TABLE_HABIT + HabitTable.COLUMN_ID;
queryBuilder.setTables(HabitTable.TABLE_HABIT + " LEFT OUTER JOIN " + EventTable.TABLE_EVENT
+ " ON " + HabitTable.TABLE_HABIT + "." + HabitTable.COLUMN_ID + " = " + EventTable.TABLE_EVENT + "." + EventTable.COLUMN_HABIT_ID);
break;
case GOAL_ID:
queryBuilder.appendWhere(GoalTable.TABLE_GOAL + "." + GoalTable.COLUMN_ID + "=" + uri.getLastPathSegment());
case GOALS:
queryBuilder.appendWhere(GoalTable.TABLE_GOAL + "." + GoalTable.COLUMN_HABIT_ID + "=" + HabitTable.TABLE_HABIT + "." + HabitTable.COLUMN_ID);
queryBuilder.setTables(GoalTable.TABLE_GOAL + "," + HabitTable.TABLE_HABIT);
break;
case EVENT_ID:
queryBuilder.appendWhere(EventTable.TABLE_EVENT + "." + EventTable.COLUMN_ID + "=" + uri.getLastPathSegment());
case EVENTS:
queryBuilder.appendWhere(EventTable.TABLE_EVENT + "." + EventTable.COLUMN_HABIT_ID + "=" + HabitTable.TABLE_HABIT + "." + HabitTable.COLUMN_ID);
queryBuilder.setTables(EventTable.TABLE_EVENT + "," + HabitTable.TABLE_HABIT);
break;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
SQLiteDatabase db = database.getWritableDatabase();
Cursor cursor = queryBuilder.query(db, projection, selection, selectionArgs, groupBy, null, sortOrder);
// Make sure that potential listeners are getting notified
cursor.setNotificationUri(getContext().getContentResolver(), uri);
return cursor;
}
@Override
public String getType(Uri uri) {
return null;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
int uriType = sURIMatcher.match(uri);
SQLiteDatabase sqlDB = database.getWritableDatabase();
Uri returnUri;
long id = 0;
switch (uriType) {
case HABITS:
id = sqlDB.insert(HabitTable.TABLE_HABIT, null, values);
returnUri = Uri.parse(HABITS_PATH + "/" + id);
break;
case GOALS:
id = sqlDB.insert(GoalTable.TABLE_GOAL, null, values);
returnUri = Uri.parse(GOALS_PATH + "/" + id);
break;
case EVENTS:
id = sqlDB.insert(EventTable.TABLE_EVENT, null, values);
returnUri = Uri.parse(EVENTS_PATH + "/" + id);
break;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return returnUri;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
int uriType = sURIMatcher.match(uri);
SQLiteDatabase sqlDB = database.getWritableDatabase();
int rowsDeleted = 0;
switch (uriType) {
case HABITS:
rowsDeleted = sqlDB.delete(HabitTable.TABLE_HABIT, selection, selectionArgs);
break;
case HABIT_ID:
String id = uri.getLastPathSegment();
if (TextUtils.isEmpty(selection)) {
rowsDeleted = sqlDB.delete(HabitTable.TABLE_HABIT, HabitTable.COLUMN_ID + "=" + id, null);
} else {
rowsDeleted = sqlDB.delete(HabitTable.TABLE_HABIT, HabitTable.COLUMN_ID + "=" + id + " and " + selection, selectionArgs);
}
break;
case GOALS:
rowsDeleted = sqlDB.delete(GoalTable.TABLE_GOAL, selection, selectionArgs);
break;
case GOAL_ID:
id = uri.getLastPathSegment();
if (TextUtils.isEmpty(selection)) {
rowsDeleted = sqlDB.delete(GoalTable.TABLE_GOAL, GoalTable.COLUMN_ID + "=" + id, null);
} else {
rowsDeleted = sqlDB.delete(GoalTable.TABLE_GOAL, GoalTable.COLUMN_ID + "=" + id + " and " + selection, selectionArgs);
}
break;
case EVENTS:
rowsDeleted = sqlDB.delete(EventTable.TABLE_EVENT, selection, selectionArgs);
break;
case EVENT_ID:
id = uri.getLastPathSegment();
if (TextUtils.isEmpty(selection)) {
rowsDeleted = sqlDB.delete(EventTable.TABLE_EVENT, EventTable.COLUMN_ID + "=" + id, null);
} else {
rowsDeleted = sqlDB.delete(EventTable.TABLE_EVENT, EventTable.COLUMN_ID + "=" + id + " and " + selection, selectionArgs);
}
break;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return rowsDeleted;
}
@Override
public int update(Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
int uriType = sURIMatcher.match(uri);
SQLiteDatabase sqlDB = database.getWritableDatabase();
int rowsUpdated = 0;
switch (uriType) {
case HABITS:
rowsUpdated = sqlDB.update(HabitTable.TABLE_HABIT, values, selection, selectionArgs);
break;
case HABIT_ID:
String id = uri.getLastPathSegment();
if (TextUtils.isEmpty(selection)) {
rowsUpdated = sqlDB.update(HabitTable.TABLE_HABIT, values, HabitTable.COLUMN_ID + "=" + id, null);
} else {
rowsUpdated = sqlDB.update(HabitTable.TABLE_HABIT, values, HabitTable.COLUMN_ID + "=" + id + " and " + selection, selectionArgs);
}
break;
default:
throw new IllegalArgumentException("Unknown URI: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return rowsUpdated;
}
}
|
/**
@author Andrew McCallum <a href="mailto:mccallum@cs.umass.edu">mccallum@cs.umass.edu</a>
*/
package cc.mallet.pipe.iterator;
import java.io.*;
import java.util.Iterator;
import java.util.regex.*;
import cc.mallet.pipe.Pipe;
import cc.mallet.types.*;
/** Iterate over groups of lines of text, separated by lines that
match a regular expression. For example, the WSJ BaseNP data
consists of sentences with one word per line, each sentence
separated by a blank line. If the "boundary" line is to be
included in the group, it is placed at the end of the group. */
public class LineGroupIterator implements Iterator<Instance>
{
LineNumberReader reader;
Pattern lineBoundaryRegex;
boolean skipBoundary;
//boolean putBoundaryLineAtEnd; // Not yet implemented
String nextLineGroup;
String nextBoundary;
String nextNextBoundary;
int groupIndex = 0;
boolean putBoundaryInSource = true;
public LineGroupIterator (Reader input, Pattern lineBoundaryRegex, boolean skipBoundary)
{
this.reader = new LineNumberReader (input);
this.lineBoundaryRegex = lineBoundaryRegex;
this.skipBoundary = skipBoundary;
setNextLineGroup();
}
public String peekLineGroup () {
return nextLineGroup;
}
private void setNextLineGroup ()
{
StringBuffer sb = new StringBuffer ();
String line;
if (!skipBoundary && nextBoundary != null)
sb.append(nextBoundary + '\n');
while (true) {
try {
line = reader.readLine();
} catch (IOException e) {
throw new RuntimeException (e);
}
//System.out.println ("LineGroupIterator: got line: "+line);
if (line == null) {
break;
} else if (lineBoundaryRegex.matcher (line).matches()) {
if (sb.length() > 0) {
this.nextBoundary = this.nextNextBoundary;
this.nextNextBoundary = line;
break;
} else { // The first line of the file.
if (!skipBoundary) sb.append(line + '\n');
this.nextNextBoundary = line;
}
} else {
sb.append(line);
sb.append('\n');
}
}
if (sb.length() == 0)
this.nextLineGroup = null;
else
this.nextLineGroup = sb.toString();
}
public Instance next ()
{
assert (nextLineGroup != null);
Instance carrier = new Instance (nextLineGroup, null, "linegroup"+groupIndex++,
putBoundaryInSource ? nextBoundary : null);
setNextLineGroup ();
return carrier;
}
public boolean hasNext () { return nextLineGroup != null; }
public void remove () {
throw new IllegalStateException ("This Iterator<Instance> does not support remove().");
}
}
|
package dr.app.beauti.components.tipdatesampling;
import dr.app.beauti.options.*;
import dr.app.beauti.types.OperatorType;
import dr.app.beauti.types.PriorScaleType;
import dr.app.beauti.types.PriorType;
import dr.app.beauti.types.TipDateSamplingType;
import dr.evolution.util.Taxon;
import dr.evolution.util.TaxonList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Andrew Rambaut
* @version $Id$
*/
public class TipDateSamplingComponentOptions implements ComponentOptions {
private final BeautiOptions options;
public TipDateSamplingComponentOptions(final BeautiOptions options) {
this.options = options;
}
public void createParameters(final ModelOptions modelOptions) {
modelOptions.createNonNegativeParameterInfinitePrior("treeModel.tipDates", "date of specified tips",
PriorScaleType.TIME_SCALE, 1.0);
modelOptions.createScaleOperator("treeModel.tipDates", modelOptions.demoTuning, 3.0);
}
public void selectParameters(final ModelOptions modelOptions, final List<Parameter> params) {
if (tipDateSamplingType == TipDateSamplingType.SAMPLE_INDIVIDUALLY) {
TaxonList taxa = getTaxonSet();
for (int i = 0; i < taxa.getTaxonCount(); i++) {
Taxon taxon = taxa.getTaxon(i);
Parameter parameter = tipDateParameters.get(taxon);
double height = 0.0;
if (taxon.getAttribute("height") != null) {
height = (Double)taxon.getAttribute("height");
}
if (parameter == null) {
parameter = new Parameter.Builder("age(" + taxon.getId() + ")", "sampled age of taxon, " + taxon.getId())
.scaleType(PriorScaleType.TIME_SCALE).prior(PriorType.UNIFORM_PRIOR).initial(height).isNonNegative(true).build();
parameter.setPriorEdited(true);
tipDateParameters.put(taxon, parameter);
}
params.add(parameter);
}
} else if (tipDateSamplingType == TipDateSamplingType.SAMPLE_JOINT) {
params.add(modelOptions.getParameter("treeModel.tipDates"));
}
}
public void selectStatistics(final ModelOptions modelOptions, final List<Parameter> stats) {
// no statistics
}
public void selectOperators(final ModelOptions modelOptions, final List<Operator> ops) {
if (tipDateSamplingType == TipDateSamplingType.SAMPLE_INDIVIDUALLY) {
TaxonList taxa = getTaxonSet();
for (int i = 0; i < taxa.getTaxonCount(); i++) {
Taxon taxon = taxa.getTaxon(i);
Operator operator = tipDateOperators.get(taxon);
if (operator == null) {
Parameter parameter = tipDateParameters.get(taxon);
// operator = new Operator("age(" + taxon.getId() + ")", "", parameter, OperatorType.SCALE, 0.75, 1.0);
operator = new Operator.Builder("age(" + taxon.getId() + ")", "", parameter, OperatorType.RANDOM_WALK, 1.0, 1.0).build();
tipDateOperators.put(taxon, operator);
}
ops.add(operator);
}
} else if (tipDateSamplingType == TipDateSamplingType.SAMPLE_JOINT) {
ops.add(modelOptions.getOperator("treeModel.tipDates"));
}
}
public TaxonList getTaxonSet() {
TaxonList taxa = options.taxonList;
if (tipDateSamplingTaxonSet != null) {
taxa = tipDateSamplingTaxonSet;
}
return taxa;
}
public TipDateSamplingType tipDateSamplingType = TipDateSamplingType.NO_SAMPLING;
public TaxonList tipDateSamplingTaxonSet = null;
private Map<Taxon, Parameter> tipDateParameters = new HashMap<Taxon, Parameter>();
private Map<Taxon, Operator> tipDateOperators = new HashMap<Taxon, Operator>();
}
|
package edu.psu.compbio.seqcode.projects.naomi.multiscalesignal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.math3.analysis.polynomials.PolynomialFunction;
import org.apache.commons.math3.distribution.NormalDistribution;
import edu.psu.compbio.seqcode.deepseq.experiments.ExptConfig;
import edu.psu.compbio.seqcode.genome.GenomeConfig;
import edu.psu.compbio.seqcode.genome.location.Region;
import edu.psu.compbio.seqcode.projects.naomi.utilities.MapUtility;
import edu.psu.compbio.seqcode.projects.seed.SEEDConfig;
/**
* Segmentation Tree
*
* Methods for MultiScaleSignalRepresentation
* Probabilistic Multiscale Image Segmentation, Vincken et al. IEEE (1997)
*
* @author naomi yamada
*
**/
public class SegmentationTree {
protected GenomeConfig gconfig;
protected ExptConfig econfig;
protected SEEDConfig sconfig;
protected int numScale;
/*********************
* Gaussian scale space and window parameters
*/
final static double DELTA_TAU = 0.5*Math.log(2);
final static double MINIMUM_VALUE = Math.pow(10, -100); //arbitrary minimum value; I cannot use Double.MIN_VALUE because it can become zero
// I have to determine P_MIN value carefully because P_MIN will substantially affect Gaussian window size
// final static double P_MIN = Math.pow(10,-3);
final static double P_MIN = Math.pow(10,-2);
//P_BIN is to determine window size of growing bin
final static double P_BIN = 0.4995;
final static double K_MIN = 1/Math.sqrt(1-Math.exp(-2*DELTA_TAU));
final static double K_N = Math.ceil(K_MIN);
/*********************
* Linkage parameters
*/
final static double WEIGHT_I = 1.00;
final static double WEIGHT_G = 0.0000001;
final static double WEIGHT_M = 1000;
public SegmentationTree(GenomeConfig gcon, ExptConfig econ, SEEDConfig scon, int scale){
gconfig = gcon;
econfig = econ;
sconfig = scon;
numScale = scale;
}
protected Map <Integer, Set<Integer>> buildTree (int currchromBinSize, float[][] gaussianBlur, Map <Integer, Integer> linkageMap, float DImax, int trailingZero, int zeroEnd){
Map<Integer,Set<Integer>> segmentationTree =new HashMap<Integer,Set<Integer>>();
segmentationTree.put(0, linkageMap.keySet());
System.out.println("curr Scale 0 size, printing from segmentationTree "+segmentationTree.get(0).size());
Set<Integer> startingNodes = new TreeSet<Integer>(segmentationTree.get(0));
// Map<Region, HashMap<Integer,Set<Integer>>> segmentationTree = new HashMap<Region, HashMap<Integer, Set<Integer>>>();
/*********************
* Matrices parameters
*/
double[] sigma = new double[numScale];
double[] radius = new double[numScale];
for (int i = 0; i<numScale;i++){
sigma[i] = 1;
radius[i] = 1;
}
for (int n = 1; n<numScale; n++){
final long gaussianStartTime = System.currentTimeMillis();
//sigma calculation
sigma[n] = Math.exp(n*DELTA_TAU);
// create normal distribution with mean zero and sigma[n]
NormalDistribution normDistribution = new NormalDistribution(0.00,sigma[n]);
//take inverse CDF based on the normal distribution using probability
double inverseCDF = normDistribution.inverseCumulativeProbability(P_MIN);
double binInverseCDF = normDistribution.inverseCumulativeProbability(P_BIN);
int windowSize = (int) (-Math.round(inverseCDF)*2+1);
float binWindowSize = -Math.round(binInverseCDF)*2+1;
//window calculation based on Gaussian(normal) distribution with sigma, mean=zero,x=X[i]
double window[] = new double[windowSize];
double windowSum = 0;
for (int i = 0;i<windowSize;i++){
window[i] = normDistribution.density(Math.round(inverseCDF)+i);
windowSum = windowSum+window[i];
}
double normalizedWindow[]=new double[windowSize];
for (int i = 0;i<windowSize;i++)
normalizedWindow[i] = window[i]/windowSum;
float fchromBinSize = currchromBinSize;
double polyCoeffi[] = new double [(int) Math.ceil(fchromBinSize/binWindowSize)];
System.out.println("binWindowSize is "+binWindowSize+"\tpolyCoeffi length is "+(int) Math.ceil(fchromBinSize/binWindowSize));
//copy from column[1] to column[0];this procedure need to be repeated for each iteration of scale
// copy from column[1] to array to store polynomial coefficient
for (int i = 0 ; i<currchromBinSize; i++){
gaussianBlur[i][0]=gaussianBlur[i][1];
polyCoeffi[(int) Math.floor(((float) i)/binWindowSize)] += gaussianBlur[i][1]/binWindowSize;
}
for (int i = 0; i < Math.ceil(fchromBinSize/binWindowSize); i++){
if (polyCoeffi[i] == 0)
polyCoeffi[i]=MINIMUM_VALUE;
}
PolynomialFunction poly1 = new PolynomialFunction(polyCoeffi);
PolynomialFunction poly2 = new PolynomialFunction(normalizedWindow);
PolynomialFunction polyMultiplication=poly1.multiply(poly2);
double coefficients[]= polyMultiplication.getCoefficients();
//taking mid point of polynomial coefficients
int coeffiMid = (int) Math.floor(((float) coefficients.length)/ 2.0);
System.out.println("currchromBin Size is : "+currchromBinSize+"\twindowSize is: "+windowSize+
"\tpolyCoeffi length is "+polyCoeffi.length+"\tcoefficients length is: "+coefficients.length);
//copy Gaussian blur results to the column[1] without increasing bin size
// for (int i = 0; i<currchromBinSize;i++){
// if (currchromBinSize % 2 ==0 && coefficients.length % 2 == 1){
// gaussianBlur[i][1]=(float) coefficients[polyMid-currchromBinSize/2+i+1];
// }else{
// gaussianBlur[i][1]=(float) coefficients[polyMid-currchromBinSize/2+i];
// copy Gaussian blur results to the column[1] with increasing bin size
for (int i = 0; i<currchromBinSize;i++){
if (polyCoeffi.length % 2 ==0 && coefficients.length % 2 == 1)
gaussianBlur[i][1]=(float) coefficients[(int) (coeffiMid-Math.floor((fchromBinSize/2-i)/binWindowSize))+1];
else
gaussianBlur[i][1]=(float) coefficients[(int) (coeffiMid-Math.floor((fchromBinSize/2-i)/binWindowSize))];
}
for (int i = 0; i< 11516987;i += 200000)
System.out.println(gaussianBlur[i][0]+" : "+gaussianBlur[i][1]);
final long gaussianEndTime = System.currentTimeMillis();
System.out.println("scale "+n+" Gausisian blur execusion time "+ (gaussianEndTime-gaussianStartTime));
/***************
* Search Volume
*/
double tempRadius;
if (n==1){
tempRadius = sigma[n];
}else{
tempRadius = Math.sqrt(Math.pow(sigma[n],2)-Math.pow(sigma[n-1], 2));
}
radius[n] = Math.ceil(K_MIN*tempRadius);
int DCPsize = (int) (Math.round(radius[n])*2+1);
int dcp[] = new int[DCPsize];
double distanceFactor[] = new double[DCPsize];
double affectionDistance;
double denom = -2*(Math.pow(sigma[n], 2)-Math.pow(sigma[n-1],2));
for (int i = 0; i<DCPsize;i++){
dcp[i] = (int) -Math.round(radius[n])+i;
// applying equation 7 in Vincken(1997)
affectionDistance=Math.exp(Math.pow(dcp[i], 2)/denom)/Math.exp(Math.pow(0.5*sigma[n],2)/denom);
//applying equation 8 in Vincken (1997)
if (Math.abs(dcp[i]) > 0.5*sigma[n]){distanceFactor[i]= affectionDistance;}
else{distanceFactor[i] = 1.0000;}
}
/***************
* Linkage Loop
*/
final long linkageLoopStart = System.currentTimeMillis();
// TreeMap<Integer, Integer> GvParents = new TreeMap<Integer,Integer>();
TreeMap<Integer, Integer> GvParents = new TreeMap<Integer,Integer>(linkageMap);
//First iteration only consider intensity differences between parent and kid and connect to the ones with the least difference.
//From the second iteration, we consider ground volume = number of nodes that parents are linked to the kids
//From third iteration, we increase the weight of the ground volume by 1e-7.
//Vincken paper said after 3-4 iteration, there would be no significant difference.
double groundVC = 0;
double groundVPmax = 0;
double tempScore = 0;
//updating ground volume and iterating to encourage convergence
for (int counter = 0; counter<5; counter++){
if (counter != 0){
for (Integer parent : GvParents.keySet()){
if ( GvParents.get(parent) > groundVPmax)
groundVPmax = GvParents.get(parent);
}
}
// look for parents within the windowSize
for (Integer kid : linkageMap.keySet()){
if (counter ==0 || groundVPmax == 0){groundVC = 0.00;}
else{ groundVC = (WEIGHT_I+WEIGHT_G*counter)*GvParents.get(linkageMap.get(kid))/groundVPmax;}
double intensityDiffScore = 0;
for (int i = 0; i<DCPsize; i++){
if (GvParents.containsKey(kid+dcp[i])){
tempScore = distanceFactor[i]*((1- Math.abs(gaussianBlur[kid][0] - gaussianBlur[kid+dcp[i]][1])/DImax)+groundVC);
if (tempScore > intensityDiffScore){
intensityDiffScore = tempScore;
linkageMap.put(kid,(kid+dcp[i]));
//test if (counter ==0){linkageMap.put(kid,(kid+dcp[i]));}
//test else{
// if(GvParents.containsKey(kid+dcp[i])){linkageMap.put(kid,(kid+dcp[i]));}
// if(linkageMap.containsValue(kid+dcp[i])){linkageMap.put(kid,(kid+dcp[i]));}
//test }
}
}
}
}
//test
// if (currchromBinSize > 20000000){
// System.out.println("current Chrom is: "+currChrom.getChrom());
// System.out.println("printing linkangeMap content");
// for (Map.Entry<Integer, Integer> entry : linkageMap.entrySet()){
// System.out.println("Key: "+entry.getKey()+" Value: "+entry.getValue());
GvParents.clear();
Integer lastParent = 0;
Map<Integer, Integer> sortedLinkageMap = new HashMap<Integer,Integer> (MapUtility.sortByValue(linkageMap));
for (Integer parent : sortedLinkageMap.values()){
GvParents.put(parent, (parent-lastParent));
lastParent = parent;
}
GvParents.put(0, trailingZero);
GvParents.put(gaussianBlur.length-1,gaussianBlur.length-zeroEnd-1);
}
Map<Integer, Integer> sortedLinkageMap = new HashMap<Integer,Integer> (MapUtility.sortByValue(linkageMap));
linkageMap.clear();
for (Integer parent : sortedLinkageMap.values()){
linkageMap.put(parent, parent);
}
//for each scaleNum, add the parents to the segmentationTree
final long linkageLoopEnd = System.currentTimeMillis();
System.out.println("linkage Loop excusion time "+( linkageLoopEnd -linkageLoopStart));
segmentationTree.put(n, GvParents.keySet());
}//end of scale space iteration
// scale zero is getting overwriting with the parents of the last scale; I'm overwriting the scale zero with initial nodesest for quick fix
segmentationTree.put(0, startingNodes);
for (Integer scale : segmentationTree.keySet()){
System.out.println("current scale is: "+scale);
Set<Integer> sortedNodeSet = new TreeSet<Integer>(segmentationTree.get(scale));
System.out.println("current nodeset size is: "+sortedNodeSet.size());
// for (Integer node : sortedNodeSet)
// System.out.println(node);
}
return segmentationTree;
}
}
|
package ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.dao;
import java.util.List;
//import java.util.Date;
import ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.model.Album;
import ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.model.Song;
/**
* @author Dose Team 2016
*/
public interface AlbumDAO {
/**
* @param id
* @return
*/
public Album findById(int id);
/**
* @return All albums
*/
public List<Album> getAllAlbums();
/**
* @param bandName
* @return
*/
public List<Album> findByBandName(String bandName);
/**
* @param name
* @return
*/
public Album findByName(String name);
/**
* @param year
* @return
*/
public List<Album> findByYear(int year);
/**
* @param genere
* @return
*/
public List<Album> findByGenere(String genere);
/**
* @param recordLabel
* @return
*/
public List<Album> findByRecordLabel(String recordLabel);
/**
* @param producer
* @return
*/
public List<Album> findByProducer(String producer);
/**
* @param duration
* @return
*/
public List<Album> findByDuration(int duration);
/**
* @param song
* @return
*/
public List<Album> findBySong(Song song);
// /**
// * @param releaseDate
// * @return
// */
// public List<Album> findByReleaseDate(Date releaseDate);
// /**
// * @param recordDate
// * @return
// */
// public List<Album> findByRecordDate(Date recordDate);
/**
* @param album
*/
public void createAlbum(Album album);
/**
* @param album
*/
public void updateAlbum(Album album);
/**
* @param album
*/
public void deleteAlbum(Album album);
}
|
package com.InfinityRaider.AgriCraft.compatibility.minetweaker;
import com.InfinityRaider.AgriCraft.api.v1.BlockWithMeta;
import com.InfinityRaider.AgriCraft.api.v1.GrowthRequirement;
import com.InfinityRaider.AgriCraft.farming.GrowthRequirementHandler;
import com.google.common.base.Joiner;
import minetweaker.IUndoableAction;
import minetweaker.MineTweakerAPI;
import minetweaker.api.item.IItemStack;
import minetweaker.api.minecraft.MineTweakerMC;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemSeeds;
import net.minecraft.item.ItemStack;
import stanhebben.zenscript.annotations.ZenClass;
import stanhebben.zenscript.annotations.ZenMethod;
import java.util.ArrayList;
import java.util.List;
public class Growing {
/**Provides functionality to add and remove fertile soils*/
@ZenClass("mods.agricraft.growing.FertileSoils")
public static class FertileSoils {
@ZenMethod
public static void add(IItemStack soil) {
add(new IItemStack[]{soil});
}
@ZenMethod
public static void add(IItemStack[] soils) {
ItemStack[] soilsToAdd = MineTweakerMC.getItemStacks(soils);
if (areValidSoils(soilsToAdd)) {
MineTweakerAPI.apply(new AddAction(soilsToAdd));
} else {
MineTweakerAPI.logError("Error adding soils to the whitelist. All provided items must be of type ItemBlock.");
}
}
@ZenMethod
public static void remove(IItemStack soil) {
remove(new IItemStack[]{soil});
}
@ZenMethod
public static void remove(IItemStack[] soils) {
ItemStack[] soilsToRemove = MineTweakerMC.getItemStacks(soils);
if (areValidSoils(soilsToRemove)) {
MineTweakerAPI.apply(new RemoveAction(soilsToRemove));
} else {
MineTweakerAPI.logError("Error removing soils from the whitelist. All provided items must be of type ItemBlock.");
}
}
/**
* @return False, if one of the provided ItemStacks is not of type ItemBlock, true otherwise
*/
private static boolean areValidSoils(ItemStack[] soils) {
for (ItemStack stack : soils) {
if (!(stack.getItem() instanceof ItemBlock)) {
return false;
}
}
return true;
}
private static class AddAction implements IUndoableAction {
private final List<BlockWithMeta> soils;
public AddAction(ItemStack[] soils) {
this.soils = new ArrayList<BlockWithMeta>();
for(ItemStack stack:soils) {
this.soils.add(new BlockWithMeta(((ItemBlock) stack.getItem()).field_150939_a, stack.getItemDamage()));
}
}
@Override
public void apply() {
GrowthRequirementHandler.addAllToSoilWhitelist(soils);
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
GrowthRequirementHandler.removeAllFromSoilWhitelist(soils);
}
@Override
public String describe() {
return "Adding soils [" + Joiner.on(", ").join(soils) + "] to whitelist.";
}
@Override
public String describeUndo() {
return "Removing previously added soils [" + Joiner.on(", ").join(soils) + "] from the whitelist.";
}
@Override
public Object getOverrideKey() {
return null;
}
}
private static class RemoveAction implements IUndoableAction {
private final List<BlockWithMeta> soils;
public RemoveAction(ItemStack[] soils) {
this.soils = new ArrayList<BlockWithMeta>();
for(ItemStack stack:soils) {
this.soils.add(new BlockWithMeta(((ItemBlock) stack.getItem()).field_150939_a, stack.getItemDamage()));
}
}
@Override
public void apply() {
GrowthRequirementHandler.removeAllFromSoilWhitelist(soils);
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
GrowthRequirementHandler.addAllToSoilWhitelist(soils);
}
@Override
public String describe() {
return "Removing soils [" + Joiner.on(", ").join(soils) + "] from the whitelist.";
}
@Override
public String describeUndo() {
return "Adding previously removed soils [" + Joiner.on(", ").join(soils) + "] to the whitelist.";
}
@Override
public Object getOverrideKey() {
return null;
}
}
}
/**Provides functionality to set or clear a specific soil for a plant*/
@ZenClass("mods.agricraft.growing.Soil")
public static class Soil {
@ZenMethod
public static void set(IItemStack seed, IItemStack soil) {
ItemStack seedStack = MineTweakerMC.getItemStack(seed);
ItemStack soilStack = MineTweakerMC.getItemStack(soil);
String error = "Invalid first argument: has to be a seed";
boolean success = seedStack.getItem()!=null && seedStack.getItem() instanceof ItemSeeds;
if(success) {
error = "Invalid second argument: has to be a block";
success = soilStack.getItem()!=null && soilStack.getItem() instanceof ItemBlock;
if(success) {
MineTweakerAPI.apply(new SetAction(seedStack, new BlockWithMeta(((ItemBlock) soilStack.getItem()).field_150939_a, soilStack.getItemDamage())));
}
}
if(!success) {
MineTweakerAPI.logError("Error when trying to set soil: "+error);
}
}
@ZenMethod
public static void clear(IItemStack seed) {
ItemStack seedStack = MineTweakerMC.getItemStack(seed);
if(seedStack.getItem()!=null && seedStack.getItem() instanceof ItemSeeds) {
MineTweakerAPI.apply(new SetAction(seedStack, null));
}
else {
MineTweakerAPI.logError("Error when trying to set soil: Invalid argument: has to be a seed");
}
}
private static class SetAction implements IUndoableAction {
private final ItemStack seedStack;
private final ItemSeeds seed;
private final int meta;
private final BlockWithMeta soil;
private BlockWithMeta oldSoil;
public SetAction(ItemStack seed, BlockWithMeta block) {
this.seedStack = seed;
this.seed = (ItemSeeds) seed.getItem();
this.meta = seed.getItemDamage();
this.soil = block;
}
@Override
public void apply() {
GrowthRequirement growthReq = GrowthRequirementHandler.getGrowthRequirement(seed, meta);
oldSoil = growthReq.getSoil();
growthReq.setSoil(soil);
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
GrowthRequirement growthReq = GrowthRequirementHandler.getGrowthRequirement(seed, meta);
growthReq.setSoil(oldSoil);
}
@Override
public String describe() {
String soilText = soil != null ? soil.toStack().getDisplayName() : "DEFAULT";
return "Setting soil for " + seedStack.getDisplayName() + " to " + soilText;
}
@Override
public String describeUndo() {
String soilText = oldSoil != null ? oldSoil.toStack().getDisplayName() : "DEFAULT";
return "Reverting soil for " + seedStack.getDisplayName() + " to " + soilText;
}
@Override
public Object getOverrideKey() {
return null;
}
}
}
/**Provides functionality to set the light level requirement for a plant*/
@ZenClass("mods.agricraft.growing.Brightness")
public static class Brightness {
@ZenMethod public static void set(IItemStack seed, int min, int max) {
ItemStack seedStack = MineTweakerMC.getItemStack(seed);
String error = "Invalid first argument: has to be a seed";
boolean success = seedStack.getItem()!=null && seedStack.getItem() instanceof ItemSeeds;
if(success) {
error = "Invalid second argument: has to be larger than or equal to 0";
success = min>=0;
if(success) {
error = "maximum should be higher than the minimum";
success = max>min;
if(success) {
error = "Invalid third argument: has to be smaller than or equal to 16";
success = max<=16;
if(success) {
MineTweakerAPI.apply(new SetAction(seedStack, min, max));
}
}
}
}
if(!success) {
MineTweakerAPI.logError("Error when trying to set brightness: "+error);
}
}
private static class SetAction implements IUndoableAction {
private final ItemSeeds seed;
private final int meta;
private final int min;
private final int max;
private int[] old;
public SetAction(ItemStack stack, int min, int max) {
this.seed = (ItemSeeds) stack.getItem();
this.meta = stack.getItemDamage();
this.min = min;
this.max = max;
}
@Override
public void apply() {
GrowthRequirement growthReq = GrowthRequirementHandler.getGrowthRequirement(seed, meta);
old = growthReq.getBrightnessRange();
growthReq.setBrightnessRange(min, max);
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
GrowthRequirement growthReq = GrowthRequirementHandler.getGrowthRequirement(seed, meta);
growthReq.setBrightnessRange(old[0], old[1]);
}
@Override
public String describe() {
return "Setting brightness range of "+(new ItemStack(seed, 1, meta)).getDisplayName() + " to ["+min+", "+max+"[";
}
@Override
public String describeUndo() {
return "Resetting brightness range of "+(new ItemStack(seed, 1, meta)).getDisplayName() + " to ["+old[0]+", "+old[1]+"[";
}
@Override
public Object getOverrideKey() {
return null;
}
}
}
/**Provides functionality to set or clear a base block requirement for a plant*/
@ZenClass("mods.agricraft.growing.BaseBlock")
public static class BaseBlock {
@ZenMethod
public static void set(IItemStack seed, IItemStack base, int type, boolean oreDict) {
if (type < 1 || type > 2) {
MineTweakerAPI.logError("Type needs to be either 1 (below) or 2 (nearby)");
return;
}
ItemStack seedIS = MineTweakerMC.getItemStack(seed);
if (seedIS == null || !(seedIS.getItem() instanceof ItemSeeds)) {
MineTweakerAPI.logError("Seeds has to be non-null and of type ItemSeeds.");
return;
}
ItemStack baseIS = MineTweakerMC.getItemStack(base);
if (baseIS == null || !(baseIS.getItem() instanceof ItemBlock)) {
MineTweakerAPI.logError("Base has to be non-null and ot type ItemBlock.");
return;
}
BlockWithMeta baseWM = new BlockWithMeta(((ItemBlock) baseIS.getItem()).field_150939_a, baseIS.getItemDamage());
GrowthRequirement.RequirementType reqType = type == 1 ? GrowthRequirement.RequirementType.BELOW
: GrowthRequirement.RequirementType.NEARBY;
MineTweakerAPI.apply(new SetAction(seedIS, baseWM, reqType, oreDict));
}
@ZenMethod
public static void clear(IItemStack seed) {
ItemStack seedIS = MineTweakerMC.getItemStack(seed);
if (seedIS == null || !(seedIS.getItem() instanceof ItemSeeds)) {
MineTweakerAPI.logError("Seeds has to be non-null and of type ItemSeeds.");
return;
}
MineTweakerAPI.apply(new SetAction(seedIS, null, GrowthRequirement.RequirementType.NONE, false));
}
private static class SetAction implements IUndoableAction {
private final ItemStack seedStack;
private final ItemSeeds seed;
private final int seedMeta;
private final BlockWithMeta base;
private final GrowthRequirement.RequirementType type;
private final boolean oreDict;
private BlockWithMeta oldReqBlock;
private GrowthRequirement.RequirementType oldRequiredType;
private boolean oldReqBlockIsOreDict;
public SetAction(ItemStack seed, BlockWithMeta base, GrowthRequirement.RequirementType type, boolean oreDict) {
this.seedStack = seed;
this.seed = (ItemSeeds) seed.getItem();
this.seedMeta = seed.getItemDamage();
this.base = base;
this.type = type;
this.oreDict = oreDict;
}
@Override
public void apply() {
GrowthRequirement growthReq = GrowthRequirementHandler.getGrowthRequirement(seed, seedMeta);
oldReqBlock = growthReq.getRequiredBlock();
oldRequiredType = growthReq.getRequiredType();
oldReqBlockIsOreDict = growthReq.isOreDict();
growthReq.setRequiredBlock(base, type, oreDict);
}
@Override
public boolean canUndo() {
return true;
}
@Override
public void undo() {
GrowthRequirement growthReq = GrowthRequirementHandler.getGrowthRequirement(seed, seedMeta);
growthReq.setRequiredBlock(oldReqBlock, oldRequiredType, oldReqBlockIsOreDict);
}
@Override
public String describe() {
String blockString = base != null ? base.getBlock().getLocalizedName() : "DEFAULT";
return "Setting base block requirement for seed " + seedStack.getDisplayName() + " to "
+ blockString + " (" + type.toString() + ")";
}
@Override
public String describeUndo() {
String blockString = oldReqBlock != null ? oldReqBlock.getBlock().getLocalizedName() : "DEFAULT";
return "Resetting base block requirement for seed " + seedStack.getDisplayName() + " to "
+ blockString + " (" + oldRequiredType.toString() + ")";
}
@Override
public Object getOverrideKey() {
return null;
}
}
}
}
|
package info.tregmine.commands;
import java.util.List;
import static org.bukkit.ChatColor.*;
import org.bukkit.Server;
import org.bukkit.World;
import org.bukkit.scheduler.BukkitScheduler;
import org.bukkit.entity.Horse;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import info.tregmine.Tregmine;
import info.tregmine.api.TregminePlayer;
import info.tregmine.api.Rank;
import info.tregmine.api.math.Distance;
public class TeleportCommand extends AbstractCommand
{
private static class TeleportTask implements Runnable
{
private TregminePlayer to;
private TregminePlayer from;
public TeleportTask(TregminePlayer to, TregminePlayer from)
{
this.to = to;
this.from = from;
}
@Override
public void run()
{
Horse horse = null;
if ((from.getVehicle() != null) && (from.getVehicle() instanceof Horse)){
horse = (Horse)from.getVehicle();
}
if (horse != null){
horse.eject();
horse.teleport(to.getLocation());
from.teleport(to.getLocation());
horse.setPassenger(from.getDelegate());
} else {
from.teleport(to.getLocation());
}
from.setNoDamageTicks(200);
if (!from.getRank().canDoHiddenTeleport()) {
to.sendMessage(AQUA + from.getName() + " teleported to you!");
PotionEffect ef =
new PotionEffect(PotionEffectType.BLINDNESS, 60, 100);
from.addPotionEffect(ef);
}
}
}
public TeleportCommand(Tregmine tregmine)
{
super(tregmine, "tp");
}
@Override
public boolean handlePlayer(TregminePlayer player, String[] args)
{
Rank rank = player.getRank();
if (args.length != 1) {
return false;
}
if (!rank.canTeleport()) {
return true;
}
Server server = tregmine.getServer();
BukkitScheduler scheduler = server.getScheduler();
String name = args[0];
List<TregminePlayer> candidates = tregmine.matchPlayer(name);
if (candidates.size() != 1) {
player.sendMessage(RED + "Can't find user.");
return true;
}
TregminePlayer target = candidates.get(0);
if (target.hasFlag(TregminePlayer.Flags.INVISIBLE)) {
return true;
}
if (target.hasFlag(TregminePlayer.Flags.TPSHIELD) &&
!player.getRank().canOverrideTeleportShield()) {
player.sendMessage(RED + target.getName() + AQUA +
"'s teloptical deflector absorbed all motion. " +
"Teleportation failed.");
target.sendMessage(player.getName() + AQUA +
"'s teleportation spell " +
"cannot bypass your sophisticated defenses.");
return true;
}
World sourceWorld = player.getWorld();
World targetWorld = target.getWorld();
String targetWorldName = targetWorld.getName();
String sourceWorldName = sourceWorld.getName();
if (!sourceWorldName.equalsIgnoreCase(targetWorldName) &&
!rank.canTeleportBetweenWorlds()) {
player.sendMessage(RED + "The user is in another world called "
+ BLUE + targetWorld.getName() + ".");
}
double distance = Distance.calc2d(player.getLocation(),
target.getLocation());
if (distance <= rank.getTeleportDistanceLimit()) {
player.sendMessage(AQUA + "You started teleport to " +
target.getName() + AQUA + " in " + BLUE +
targetWorld.getName() + ".");
scheduler.scheduleSyncDelayedTask(
tregmine,
new TeleportTask(target, player),
rank.getTeleportTimeout());
}
else {
player.sendMessage(RED
+ "Your teleportation spell is not strong "
+ "enough for the longer distances.");
}
return true;
}
}
|
package com.assertthat.selenium_shutterbug.utils.image.model;
import com.assertthat.selenium_shutterbug.utils.file.FileUtil;
import lombok.Getter;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
@Getter
public class ImageData {
private final int RED_RGB = new Color(255, 0, 0).getRGB();
private final BufferedImage image;
private final int width;
private final int height;
public ImageData(BufferedImage image) {
this.image = image;
this.width = image.getWidth(null);
this.height = image.getHeight(null);
}
public boolean notEqualsDimensions(ImageData imageData) {
return !equalsDimensions(imageData);
}
private boolean equalsDimensions(ImageData imageData) {
return this.width == imageData.width && this.height == imageData.height;
}
public boolean equalsEachPixelsWithCreateDifferencesImage(ImageData imageData, double deviation, String pathDifferenceImageFileName) {
return equalsEachPixelsWithCreateDifferencesImage(imageData.getImage(), deviation, pathDifferenceImageFileName);
}
private boolean equalsEachPixelsWithCreateDifferencesImage(BufferedImage image, double deviation, String pathDifferenceImageFileName) {
boolean isEqual = equalsEachPixels(image, deviation);
if(!isEqual) {
createDifferencesImage(image, pathDifferenceImageFileName);
}
return isEqual;
}
private void createDifferencesImage(BufferedImage image, String pathDifferenceImageFileName) {
BufferedImage output = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int rgb1 = this.getImage().getRGB(x, y);
int rgb2 = image.getRGB(x, y);
// If difference > recorded difference, change pixel to red. If zero, set to image 1's original pixel
if (rgb1 != rgb2)
output.setRGB(x, y, RED_RGB & rgb1); // Dark red = original position, Light red is moved to
else
output.setRGB(x, y, rgb1);
}
}
FileUtil.writeImage(output, "png", new File(pathDifferenceImageFileName + ".png"));
}
public boolean equalsEachPixels(ImageData imageData, double deviation) {
return equalsEachPixels(imageData.getImage(), deviation);
}
private boolean equalsEachPixels(BufferedImage image, double deviation) {
double p = calculatePixelsDifference(image);
return p == 0 || p <= deviation;
}
private double calculatePixelsDifference(BufferedImage image) {
long diff = 0;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int rgb1 = this.getImage().getRGB(x, y);
int rgb2 = image.getRGB(x, y);
int r1 = (rgb1 >> 16) & 0xff;
int g1 = (rgb1 >> 8) & 0xff;
int b1 = (rgb1) & 0xff;
int r2 = (rgb2 >> 16) & 0xff;
int g2 = (rgb2 >> 8) & 0xff;
int b2 = (rgb2) & 0xff;
diff += Math.abs(r1 - r2);
diff += Math.abs(g1 - g2);
diff += Math.abs(b1 - b2);
}
}
double n = width * height * 3;
return diff / n / 255.0;
}
}
|
package com.buuz135.industrial.tile.animal;
import com.buuz135.industrial.proxy.FluidsRegistry;
import com.buuz135.industrial.tile.WorkingAreaElectricMachine;
import net.minecraft.entity.passive.EntityCow;
import net.minecraft.entity.passive.EntitySheep;
import net.minecraft.init.Items;
import net.minecraft.inventory.Slot;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.IFluidTank;
import net.minecraftforge.items.ItemHandlerHelper;
import net.minecraftforge.items.ItemStackHandler;
import net.ndrei.teslacorelib.containers.BasicTeslaContainer;
import net.ndrei.teslacorelib.containers.FilteredSlot;
import net.ndrei.teslacorelib.gui.BasicTeslaGuiContainer;
import net.ndrei.teslacorelib.gui.IGuiContainerPiece;
import net.ndrei.teslacorelib.gui.TiledRenderedGuiPiece;
import net.ndrei.teslacorelib.inventory.BoundingRectangle;
import net.ndrei.teslacorelib.inventory.ColoredItemHandler;
import java.util.List;
public class AnimalResourceHarvesterTile extends WorkingAreaElectricMachine {
private ItemStackHandler outItems;
private IFluidTank milkTank;
public AnimalResourceHarvesterTile() {
super(AnimalResourceHarvesterTile.class.getName().hashCode());
}
@Override
protected void initializeInventories() {
super.initializeInventories();
milkTank = this.addFluidTank(FluidsRegistry.MILK,8000, EnumDyeColor.WHITE,"Milk tank",new BoundingRectangle(50, 25, 18, 54));
outItems = new ItemStackHandler(3 * 4);
this.addInventory(new ColoredItemHandler(outItems, EnumDyeColor.ORANGE, "Fish output", new BoundingRectangle(18 * 5+3, 25, 18 * 6, 18 * 3)) {
@Override
public boolean canInsertItem(int slot, ItemStack stack) {
return false;
}
@Override
public boolean canExtractItem(int slot) {
return true;
}
@Override
public List<Slot> getSlots(BasicTeslaContainer container) {
List<Slot> slots = super.getSlots(container);
BoundingRectangle box = this.getBoundingBox();
int i = 0;
for (int y = 0; y < 3; y++) {
for (int x = 0; x < 4; x++) {
slots.add(new FilteredSlot(this.getItemHandlerForContainer(), i, box.getLeft() + 1 + x * 18, box.getTop() + 1 + y * 18));
++i;
}
}
return slots;
}
@Override
public List<IGuiContainerPiece> getGuiContainerPieces(BasicTeslaGuiContainer container) {
List<IGuiContainerPiece> pieces = super.getGuiContainerPieces(container);
BoundingRectangle box = this.getBoundingBox();
pieces.add(new TiledRenderedGuiPiece(box.getLeft(), box.getTop(), 18, 18,
4, 3,
BasicTeslaGuiContainer.MACHINE_BACKGROUND, 108, 225, EnumDyeColor.ORANGE));
return pieces;
}
});
this.addInventoryToStorage(outItems, "animal_resource_harvester_out");
}
@Override
public AxisAlignedBB getWorkingArea() {
int r = 2;
int h = 2;
EnumFacing f = this.getFacing().getOpposite();
BlockPos corner1 = new BlockPos(0, 0, 0).offset(f, r + 1);
return this.getBlockType().getSelectedBoundingBox(this.world.getBlockState(this.pos), this.world, this.pos).offset(corner1).expand(r, 0, r).setMaxY(this.getPos().getY() + h);
}
@Override
protected float performWork() {
List<EntitySheep> animals = this.world.getEntitiesWithinAABB(EntitySheep.class, getWorkingArea());
for (EntitySheep sheep : animals) {
if (!sheep.getSheared()) {
List<ItemStack> stacks = sheep.onSheared(new ItemStack(Items.SHEARS), this.world, null, 0);
for (ItemStack stack : stacks){
ItemHandlerHelper.insertItem(outItems,stack,false);
}
return 1;
}
}
List<EntityCow> cows = this.world.getEntitiesWithinAABB(EntityCow.class, getWorkingArea());
milkTank.fill(new FluidStack(FluidsRegistry.MILK,cows.size()*1000),true);
return 1;
}
}
|
package com.freetymekiyan.algorithms.level.hard;
import java.util.ArrayList;
import java.util.List;
/**
* 282. Expression Add Operators
* <p>
* Given a string that contains only digits 0-9 and a target value, return all possibilities to add binary operators
* (not unary) +, -, or * between the digits so they evaluate to the target value.
* <p>
* Examples:
* "123", 6 -> ["1+2+3", "1*2*3"]
* "232", 8 -> ["2*3+2", "2+3*2"]
* "105", 5 -> ["1*0+5","10-5"]
* "00", 0 -> ["0+0", "0-0", "0*0"]
* "3456237490", 9191 -> []
* <p>
* Company Tags: Google, Facebook
* Tags: Divide and Conquer
* Similar Problems: (M) Evaluate Reverse Polish Notation, (H) Basic Calculator, (M) Basic Calculator II, (M) Different
* Ways to Add Parentheses
*/
public class ExpressionAddOperators {
/**
* Backtracking.
*/
public List<String> addOperators(String num, int target) {
List<String> res = new ArrayList<>();
dfs(res, num.toCharArray(), target, 0, new StringBuilder(), 0, 0);
return res;
}
/**
* Backtracking.
* What kind of data do we need to preserve for between recursive calls?
* Of course the result list we are going to generate.
* And the original data: the number string and the target.
* Then the states of each call, like:
* 1. current position in the number string.
* 2. the formula generated so far.
* 3. the evaluation result of that formula.
* 4. For multiplication, it is higher priority than + or -, so need to remember the previous multiplied number.
* <p>
* Implementation:
* Stop when we reach the end of number.
* If we found target, add the formula to result.
* For each value start from the starting index, parse it to long.
* If start is 0, it means the first number. Just add it to path.
* Else we search "+", "-", "*" one by one.
* For formula just concatenate the number to path with relative operator.
* res, num and target are the same.
* Next starting position will increment by 1.
* Evaluate result will change according to the operator added.
* Note that for multiplication, the evaluation result should first minus previous multiplied value.
* Then add the product of that value and current value.
* The multiplied value should also multiply current value.
*
* @param exprs Result paths.
* @param num Original number string. Converted to char array for faster speed.
* @param target The target value to find.
* @param start Starting index in number string.
* @param expr Current expression.
* @param eval Actual value of the expression.
* @param multed The value to be multiplied in the next recursion.
*/
private void dfs(List<String> exprs, char[] num, int target, int start, StringBuilder expr, long eval,
long multed) {
if (start == num.length) { // Reach the end of num.
if (target == eval) { // Found target.
exprs.add(expr.toString());
}
return;
}
long cur = 0;
for (int end = start; end < num.length; end++) {
if (num[start] == '0' && end != start) { // Avoid multiple digits start with 0.
break;
}
cur = 10 * cur + (num[end] - '0'); // Avoid overflow, along with eval and multed.
int len = expr.length();
if (start == 0) { // First number.
dfs(exprs, num, target, end + 1, expr.append(cur), cur, cur);
expr.setLength(len); // Reset string builder.
} else {
dfs(exprs, num, target, end + 1, expr.append("+").append(cur), eval + cur, cur);
expr.setLength(len);
dfs(exprs, num, target, end + 1, expr.append("-").append(cur), eval - cur, -cur);
expr.setLength(len);
// For multiplication, eval needs to subtract previous multed first, then add multed * cur
// multed just multiply cur
dfs(exprs, num, target, end + 1, expr.append("*").append(cur), eval - multed + multed * cur,
multed * cur);
expr.setLength(len);
}
}
}
/**
* Backtracking.
* More concise.
* But creating new strings thus more cost.
*/
public List<String> addOperators2(String num, int target) {
List<String> res = new ArrayList<>();
dfs(num, target, 0, "", 0, 0, res);
return res;
}
private void dfs(String num, int target, long eval, String expr, long m, int index, List<String> exprs) {
if (index == num.length()) {
if (eval == target) {
exprs.add(expr);
}
return;
}
for (int end = index + 1; end <= num.length(); end++) {
if (num.charAt(index) == '0' && end != index + 1) { // Cannot have multiple digits starting with 0.
break;
}
long n = Long.valueOf(num.substring(index, end));
if (index == 0) {
dfs(num, target, eval + n, "" + n, n, end, exprs);
} else {
dfs(num, target, eval + n, expr + "+" + n, n, end, exprs);
dfs(num, target, eval - n, expr + "-" + n, -n, end, exprs);
dfs(num, target, eval - m + m * n, expr + "*" + n, m * n, end, exprs);
}
}
}
}
|
package com.github.davidmoten.rx.internal.operators;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicLong;
import com.github.davidmoten.rx.buffertofile.DataSerializer;
import com.github.davidmoten.util.Preconditions;
class FileBasedSPSCQueue<T> implements QueueWithResources<T> {
int readBufferPosition = 0;
long readPosition = 0;
final byte[] readBuffer;
int readBufferLength = 0;
final byte[] writeBuffer;
final File file;
final DataSerializer<T> serializer;
final AtomicLong size;
volatile long writePosition;
volatile int writeBufferPosition;
final Object writeLock = new Object();
// guarded by accessLock
private FileAccessor accessor;
private final Object accessLock = new Object();
private final DataOutputStream output;
private final DataInputStream input;
private volatile boolean unsubscribed = false;
FileBasedSPSCQueue(int bufferSizeBytes, File file, DataSerializer<T> serializer) {
Preconditions.checkArgument(bufferSizeBytes > 0, "bufferSizeBytes must be greater than zero");
Preconditions.checkNotNull(file);
Preconditions.checkNotNull(serializer);
this.readBuffer = new byte[bufferSizeBytes];
this.writeBuffer = new byte[bufferSizeBytes];
try {
file.getParentFile().mkdirs();
file.createNewFile();
this.file = file;
} catch (IOException e) {
throw new RuntimeException(e);
}
this.accessor = new FileAccessor(file);
this.serializer = serializer;
this.size = new AtomicLong(0);
this.output = new DataOutputStream(new QueueWriter());
this.input = new DataInputStream(new QueueReader());
}
private final static class FileAccessor {
final RandomAccessFile fWrite;
final RandomAccessFile fRead;
FileAccessor(File file) {
try {
this.fWrite = new RandomAccessFile(file, "rw");
this.fRead = new RandomAccessFile(file, "r");
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
public void close() {
try {
fWrite.close();
fRead.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private final class QueueWriter extends OutputStream {
@Override
public void write(int b) throws IOException {
if (writeBufferPosition < writeBuffer.length) {
writeBuffer[writeBufferPosition] = (byte) b;
writeBufferPosition++;
} else synchronized (writeLock) {
accessor.fWrite.seek(writePosition);
accessor.fWrite.write(writeBuffer);
writeBuffer[0] = (byte) b;
writeBufferPosition = 1;
writePosition += writeBuffer.length;
}
}
}
private static final EOFException EOF = new EOFException();
private final class QueueReader extends InputStream {
@Override
public int read() throws IOException {
if (size.get() == 0) {
throw EOF;
} else {
if (readBufferPosition < readBufferLength) {
byte b = readBuffer[readBufferPosition];
readBufferPosition++;
return toUnsignedInteger(b);
} else {
// before reading more from file we see if we can emit
// directly from the writeBuffer by checking if the read
// position is past the write position
while (true) {
long wp;
int wbp;
synchronized (writeLock) {
wp = writePosition;
wbp = writeBufferPosition;
}
long over = wp - readPosition;
if (over > 0) {
// read position is not past the write position
readBufferLength = (int) Math.min(readBuffer.length, over);
synchronized (accessLock) {
if (accessor == null) {
accessor = new FileAccessor(file);
}
accessor.fRead.seek(readPosition);
accessor.fRead.read(readBuffer, 0, readBufferLength);
}
readPosition += readBufferLength;
readBufferPosition = 1;
return toUnsignedInteger(readBuffer[0]);
} else {
// read position is at or past the write position
int index = -(int) over;
if (index >= writeBuffer.length) {
throw EOF;
} else {
int b = toUnsignedInteger(writeBuffer[index]);
final boolean writeBufferUnchanged;
synchronized (writeLock) {
writeBufferUnchanged = wp == writePosition && wbp == writeBufferPosition;
// if (writeBufferUnchanged) {
// // reset write buffer a bit and the readPosition so that we avoid writing
// // the full contents of the write buffer
// if (index >= writeBuffer.length / 2 && index < writeBufferPosition) {
// System.arraycopy(writeBuffer, index + 1, writeBuffer, 0,
// writeBufferPosition - index - 1);
// writeBufferPosition -= index + 1;
// readPosition = writePosition;
// } else {
// readPosition++;
}
if (writeBufferUnchanged) {
readPosition++;
return b;
}
}
}
}
}
}
}
}
private static int toUnsignedInteger(byte b) {
return b & 0x000000FF;
}
@Override
public void unsubscribe() {
if (unsubscribed) {
return;
}
unsubscribed = true;
synchronized (accessLock) {
if (accessor != null) {
accessor.close();
accessor = null;
}
}
if (!file.delete()) {
throw new RuntimeException("could not delete file " + file);
}
}
@Override
public boolean isUnsubscribed() {
return unsubscribed;
}
@Override
public boolean offer(T t) {
try {
serializer.serialize(output, t);
size.incrementAndGet();
return true;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public T poll() {
try {
T t = serializer.deserialize(input, Integer.MAX_VALUE);
size.decrementAndGet();
return t;
} catch (EOFException e) {
return null;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean isEmpty() {
return size.get() == 0;
}
@Override
public void freeResources() {
synchronized (accessLock) {
if (accessor != null) {
accessor.close();
}
accessor = null;
}
}
@Override
public long resourcesSize() {
return writePosition;
}
@Override
public T element() {
throw new UnsupportedOperationException();
}
@Override
public T peek() {
throw new UnsupportedOperationException();
}
@Override
public int size() {
throw new UnsupportedOperationException();
}
@Override
public boolean add(T e) {
throw new UnsupportedOperationException();
}
@Override
public T remove() {
throw new UnsupportedOperationException();
}
@Override
public boolean contains(Object o) {
throw new UnsupportedOperationException();
}
@Override
public Iterator<T> iterator() {
throw new UnsupportedOperationException();
}
@Override
public Object[] toArray() {
throw new UnsupportedOperationException();
}
@SuppressWarnings("hiding")
@Override
public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
}
|
package com.github.dockerjava.jaxrs.util;
import java.io.EOFException;
import java.io.IOException;
import java.nio.charset.Charset;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientResponseContext;
import javax.ws.rs.client.ClientResponseFilter;
import javax.ws.rs.core.MediaType;
import org.apache.commons.io.IOUtils;
import com.github.dockerjava.api.BadRequestException;
import com.github.dockerjava.api.ConflictException;
import com.github.dockerjava.api.DeviceIsBusyException;
import com.github.dockerjava.api.DockerException;
import com.github.dockerjava.api.InternalServerErrorException;
import com.github.dockerjava.api.NotAcceptableException;
import com.github.dockerjava.api.NotFoundException;
import com.github.dockerjava.api.NotModifiedException;
import com.github.dockerjava.api.UnauthorizedException;
/**
* This {@link ClientResponseFilter} implementation detects http status codes and throws {@link DockerException}s
*
* @author marcus
*
*/
public class ResponseStatusExceptionFilter implements ClientResponseFilter {
@Override
public void filter(ClientRequestContext requestContext, ClientResponseContext responseContext) throws IOException {
int status = responseContext.getStatus();
switch (status) {
case 200:
case 201:
case 204:
return;
case 304:
throw new NotModifiedException(getBodyAsMessage(responseContext));
case 400:
throw new BadRequestException(getBodyAsMessage(responseContext));
case 401:
throw new UnauthorizedException(getBodyAsMessage(responseContext));
case 404:
throw new NotFoundException(getBodyAsMessage(responseContext));
case 406:
throw new NotAcceptableException(getBodyAsMessage(responseContext));
case 409:
throw new ConflictException(getBodyAsMessage(responseContext));
case 500:
throw new InternalServerErrorException(getBodyAsMessage(responseContext));
case 599:
throw new DeviceIsBusyException(getBodyAsMessage(responseContext));
default:
throw new DockerException(getBodyAsMessage(responseContext), status);
}
}
public String getBodyAsMessage(ClientResponseContext responseContext)
throws IOException {
if (responseContext.hasEntity()) {
int contentLength = responseContext.getLength();
if (contentLength != -1) {
byte[] buffer = new byte[contentLength];
try {
IOUtils.readFully(responseContext.getEntityStream(), buffer);
}
catch (EOFException e) {
return null;
}
Charset charset = null;
MediaType mediaType = responseContext.getMediaType();
if (mediaType != null) {
String charsetName = mediaType.getParameters().get("charset");
if (charsetName != null) {
try {
charset = Charset.forName(charsetName);
}
catch (Exception e) {
//Do noting...
}
}
}
if (charset == null) {
charset = Charset.defaultCharset();
}
String message = new String(buffer, charset);
return message;
}
}
return null;
}
}
|
package com.github.dylon.liblevenshtein.levenshtein;
import lombok.AccessLevel;
import lombok.NonNull;
import lombok.Setter;
import lombok.experimental.Accessors;
import lombok.experimental.FieldDefaults;
import it.unimi.dsi.fastutil.PriorityQueue;
import it.unimi.dsi.fastutil.chars.CharIterator;
import com.github.dylon.liblevenshtein.collection.dawg.ITransitionFunction;
import com.github.dylon.liblevenshtein.collection.dawg.IFinalFunction;
@Setter
@Accessors(fluent=true)
@FieldDefaults(level=AccessLevel.PROTECTED)
public abstract class AbstractTransducer<DictionaryNode>
implements ITransducer<DictionaryNode> {
/**
* Default, maximum number of spelling errors candidates may have from the
* query term.
*/
int defaultMaxDistance = Integer.MAX_VALUE;
/**
* Returns state-transition functions for specific, max edit distances
*/
ILevenshteinTransitionFunctionFactory stateTransitionFactory;
/**
* Returns instances of some, generic collection that is used to store
* spelling candidates for the query term.
*/
ICandidateCollectionFactory<DictionaryNode> candidatesFactory;
/**
* Returns instances of priority queues used for tracking the dictionary,
* spelling candidates most-similar to the query term.
*/
INearestCandidatesFactory<DictionaryNode> nearestCandidatesFactory;
/**
* Returns instances of a data structure used for maintaining information
* regarding each step in intersecting the dictionary automaton with the
* Levenshtein automaton.
*/
IIntersectionFactory<DictionaryNode> intersectionFactory;
/**
* Determines the minimum distance at which a Levenshtein state may be
* considered from the query term, based on its length.
*/
IDistanceFunction minDistance;
/**
* Returns whether a dictionary node is the final character in some term.
*/
IFinalFunction<DictionaryNode> isFinal;
/**
* Transition function for dictionary nodes.
*/
ITransitionFunction<DictionaryNode> dictionaryTransition;
/**
* State at which to begin traversing the Levenshtein automaton.
*/
int[][] initialState;
/**
* Root node of the dictionary, at which to begin searching for spelling
* candidates.
*/
DictionaryNode dictionaryRoot;
/** Pools instances of characteristic vectors */
private boolean[][] characteristicVectors = new boolean[32][];
/**
* Returns the characteristic vector of the term, from its characters between
* index i and index k. The characteristic vector contains true at each index
* where the corresponding character of the term is the value of x, and false
* elsewhere.
* @param x char to find all occurrences of in the relevant substring of term
* @param term Term in which to find all occurrences of the character, x
* @param k Length of the substring of term to examine
* @param i Base-index of the substring of term to examine
* @return Characteristic vector marking where x appears in the relevant
* substring of term.
*/
private boolean[] characteristicVector(
final char x,
final String term,
final int k,
final int i) {
boolean[] characteristicVector;
if (k >= characteristicVectors.length) {
final int m = characteristicVectors.length << 1;
final int n = (m > k) ? m : (k << 1);
final boolean[][] characteristicVectors = new boolean[n][];
for (int i_2 = 0; i_2 < this.characteristicVectors.length; ++i_2) {
characteristicVectors[i_2] = this.characteristicVectors[i_2];
}
characteristicVector = new boolean[k];
characteristicVectors[k] = characteristicVector;
this.characteristicVectors = characteristicVectors;
}
else {
characteristicVector = characteristicVectors[k];
if (null == characteristicVector) {
characteristicVector = new boolean[k];
characteristicVectors[k] = characteristicVector;
}
}
for (int j = 0; j < k; ++j) {
characteristicVector[j] = (x == term.charAt(i + j));
}
return characteristicVector;
}
/**
* Sets the default, maximum number of spelling errors candidates may have
* from the query term.
* @param defaultMaxDistance Default, maximum number of spelling errors
* candidates may have from the query term.
*/
public void defaultMaxDistance(final int defaultMaxDistance) {
if (defaultMaxDistance < 0) {
throw new IllegalArgumentException(
"defaultMaxDistance must be non-negative");
}
this.defaultMaxDistance = defaultMaxDistance;
}
/**
* {@inheritDoc}
*/
@Override
public ICandidateCollection<DictionaryNode> transduce(
@NonNull final String term) {
return transduce(term, defaultMaxDistance);
}
/**
* {@inheritDoc}
*/
@Override
public ICandidateCollection<DictionaryNode> transduce(
@NonNull final String term,
final int maxDistance) {
if (maxDistance < 0) {
throw new IllegalArgumentException(
"maxDistance must be non-negative: " + maxDistance);
}
final int termLength = term.length();
final ILevenshteinTransitionFunction stateTransition =
stateTransitionFactory.build(maxDistance);
final ICandidateCollection<DictionaryNode> candidates =
candidatesFactory.build();
// so results can be ranked by similarity to the query term, etc.
final PriorityQueue<Intersection<DictionaryNode>> nearestCandidates =
nearestCandidatesFactory.build(term);
nearestCandidates.enqueue(
intersectionFactory.build(
"",
dictionaryRoot,
initialState,
minDistance.at(initialState, termLength)));
final int a = (maxDistance << 1) + 1;
Intersection<DictionaryNode> intersection = null;
try {
while (!nearestCandidates.isEmpty()) {
intersection = nearestCandidates.dequeue();
final String candidate = intersection.candidate();
final DictionaryNode dictionaryNode = intersection.dictionaryNode();
final int[][] levenshteinState = intersection.levenshteinState();
final int i = levenshteinState[0][0];
final int b = termLength - i;
final int k = (a < b) ? a : b;
final CharIterator labels = dictionaryTransition.of(dictionaryNode);
while (labels.hasNext()) {
final char label = labels.nextChar();
final DictionaryNode nextDictionaryNode =
dictionaryTransition.of(dictionaryNode, label);
final boolean[] characteristicVector =
characteristicVector(label, term, k, i);
final int[][] nextLevenshteinState =
stateTransition.of(levenshteinState, /*given*/ characteristicVector);
if (null != nextLevenshteinState) {
final String nextCandidate = candidate + label;
final int distance =
minDistance.at(nextLevenshteinState, termLength);
enqueueAll(
nearestCandidates,
candidates,
nextCandidate,
nextDictionaryNode,
nextLevenshteinState,
distance,
maxDistance);
}
}
}
}
catch (final QueueFullException exception) {
// Nothing to do, this was expected (early termination) ...
}
finally {
if (null != intersection) {
intersectionFactory.recycle(intersection);
}
nearestCandidatesFactory.recycle(nearestCandidates);
stateTransitionFactory.recycle(stateTransition);
}
return candidates;
}
/**
* Enqueues into the results collection, candidates, all of the spelling
* candidates corresponding to the dictionary node.
* @param nearestCandidates Maintains which nodes to explore next
* @param candidates Collection of spelling candidates
* @param candidate Prefix (maybe whole term) of some spelling candidate
* @param dictionaryNode Current node in the dictionary automaton
* @param levenshteinState Current state in the Levenshtein automaton
* @param distance Minimum distance corresponding to levenshteinState
* @param maxDistance Maximum number of spelling errors candidates may have
* @throws QueueFullException When the results queue can no longer accept
* spelling candidates. This signifies that the transducer should return
* immediately.
*/
protected abstract void enqueueAll(
PriorityQueue<Intersection<DictionaryNode>> nearestCandidates,
ICandidateCollection<DictionaryNode> candidates,
String candidate,
DictionaryNode dictionaryNode,
int[][] levenshteinState,
int distance,
int maxDistance);
/**
* Specifies when transduce(...) should return early. This is thrown
* (optionally) from enqueueAll(...) when not all the candidate terms where
* queued into the results.
*/
protected static class QueueFullException extends RuntimeException {
static final long serialVersionUID = 1L;
}
}
|
package com.github.koraktor.steamcondenser.steam.sockets;
import java.io.IOException;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
import java.util.logging.Logger;
import com.github.koraktor.steamcondenser.exceptions.RCONBanException;
import com.github.koraktor.steamcondenser.exceptions.RCONNoAuthException;
import com.github.koraktor.steamcondenser.exceptions.SteamCondenserException;
import com.github.koraktor.steamcondenser.exceptions.UncompletePacketException;
import com.github.koraktor.steamcondenser.steam.packets.SteamPacket;
import com.github.koraktor.steamcondenser.steam.packets.SteamPacketFactory;
import com.github.koraktor.steamcondenser.steam.packets.rcon.RCONGoldSrcRequestPacket;
import com.github.koraktor.steamcondenser.steam.packets.rcon.RCONGoldSrcResponsePacket;
/**
* @author Sebastian Staudt
*/
public class GoldSrcSocket extends QuerySocket
{
private boolean isHLTV;
private long rconChallenge = -1;
/**
*
* @param ipAddress
* @param portNumber
* @throws IOException
*/
public GoldSrcSocket(InetAddress ipAddress, int portNumber)
throws IOException
{
super(ipAddress, portNumber);
this.isHLTV = false;
}
/**
*
* @param ipAddress
* @param portNumber
* @param isHLTV
* @throws IOException
*/
public GoldSrcSocket(InetAddress ipAddress, int portNumber, boolean isHLTV)
throws IOException
{
super(ipAddress, portNumber);
this.isHLTV = isHLTV;
}
/**
* @return The SteamPacket received from the server
* @throws IOException
* @throws com.github.koraktor.steamcondenser.exceptions.SteamCondenserException
* @throws TimeoutException
*/
public SteamPacket getReply()
throws IOException, SteamCondenserException, TimeoutException
{
int bytesRead;
SteamPacket packet;
bytesRead = this.receivePacket(1400);
if(this.packetIsSplit()) {
byte[] splitData;
int packetCount, packetNumber;
int requestId;
byte packetNumberAndCount;
ArrayList<byte[]> splitPackets = new ArrayList<byte[]>();
do {
// Parsing of split packet headers
requestId = Integer.reverseBytes(this.buffer.getInt());
packetNumberAndCount = this.buffer.get();
packetCount = packetNumberAndCount & 0xF;
packetNumber = (packetNumberAndCount >> 4) + 1;
// Caching of split packet Data
splitData = new byte[this.buffer.remaining()];
this.buffer.get(splitData);
splitPackets.ensureCapacity(packetCount);
splitPackets.add(packetNumber - 1, splitData);
Logger.getLogger("global").info("Received packet #" + packetNumber + " of " + packetCount + " for request ID " + requestId + ".");
// Receiving the next packet
if(splitPackets.size() < packetCount) {
try {
bytesRead = this.receivePacket();
}
catch(TimeoutException e) {
bytesRead = 0;
}
}
else {
bytesRead = 0;
}
} while(bytesRead > 0 && this.packetIsSplit());
packet = SteamPacketFactory.reassemblePacket(splitPackets);
}
else {
packet = this.getPacketFromData();
}
Logger.getLogger("global").info("Received packet of type \"" + packet.getClass().getSimpleName() + "\"");
return packet;
}
/**
* Sends a RCON command with the specified password to a GoldSrc server
* @param password RCON password to use
* @param command RCON command to send to the server
* @return The response send by the server
* @throws IOException
* @throws TimeoutException
* @throws SteamCondenserException
* @throws UncompletePacketException
*/
public String rconExec(String password, String command)
throws IOException, TimeoutException, SteamCondenserException
{
if(this.rconChallenge == -1 || this.isHLTV) {
this.rconGetChallenge();
}
this.rconSend("rcon " + this.rconChallenge + " " + password + " " + command);
String response;
if(this.isHLTV) {
try {
response = ((RCONGoldSrcResponsePacket)this.getReply()).getResponse();
}
catch(TimeoutException e) {
response = "";
}
}
else {
response = ((RCONGoldSrcResponsePacket)this.getReply()).getResponse();
}
if(response.trim().equals("Bad rcon_password")) {
throw new RCONNoAuthException();
} else if(response.trim().equals("You have been banned from this server")) {
throw new RCONBanException();
}
String responsePart;
try {
do {
responsePart = ((RCONGoldSrcResponsePacket)this.getReply()).getResponse();
response += responsePart;
} while(true);
}
catch(TimeoutException e) {
}
return response;
}
/**
* @throws SteamCondenserException
* @throws NumberFormatException
*
*/
public void rconGetChallenge()
throws IOException, TimeoutException, NumberFormatException, SteamCondenserException
{
this.rconSend("challenge rcon");
String response = ((RCONGoldSrcResponsePacket)this.getReply()).getResponse().trim();
if(response.equals("You have been banned from this server.")) {
throw new RCONBanException();
}
this.rconChallenge = Long.valueOf(response.substring(14));
}
private void rconSend(String command)
throws IOException
{
this.send(new RCONGoldSrcRequestPacket(command));
}
}
|
package com.hyperwallet.clientsdk.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class HyperwalletWebhookNotification extends HyperwalletBaseMonitor {
public static enum Type {
USER_CREATED("USERS.CREATED"),
USER_UPDATED("USERS.UPDATED"),
USER_UPDATED_STATUS_ACTIVATED("USERS.UPDATED.STATUS.ACTIVATED"),
USER_UPDATED_STATUS_LOCKED("USERS.UPDATED.STATUS.LOCKED"),
USER_UPDATED_STATUS_FROZEN("USERS.UPDATED.STATUS.FROZEN"),
USER_UPDATED_STATUS_DE_ACTIVATED("USERS.UPDATED.STATUS.DE_ACTIVATED"),
BANK_ACCOUNT_CREATED("USERS.BANK_ACCOUNTS.CREATED"),
BANK_ACCOUNT_UPDATED("USERS.BANK_ACCOUNTS.UPDATED"),
BANK_ACCOUNT_STATUS_ACTIVATED("USERS.BANK_ACCOUNTS.UPDATED.STATUS.ACTIVATED"),
BANK_ACCOUNT_STATUS_VERIFIED("USERS.BANK_ACCOUNTS.UPDATED.STATUS.VERIFIED"),
BANK_ACCOUNT_STATUS_INVALID("USERS.BANK_ACCOUNTS.UPDATED.STATUS.INVALID"),
BANK_ACCOUNT_STATUS_DE_ACTIVATED("USERS.BANK_ACCOUNTS.UPDATED.STATUS.DE_ACTIVATED"),
PREPAID_CARD_CREATED("USERS.PREPAID_CARDS.CREATED"),
PREPAID_CARD_UPDATED("USERS.PREPAID_CARDS.UPDATED"),
PREPAID_CARD_STATUS_QUEUED("USERS.PREPAID_CARDS.UPDATED.STATUS.QUEUED"),
PREPAID_CARD_STATUS_PRE_ACTIVATED("USERS.PREPAID_CARDS.UPDATED.STATUS.PRE_ACTIVATED"),
PREPAID_CARD_STATUS_ACTIVATED("USERS.PREPAID_CARDS.UPDATED.STATUS.ACTIVATED"),
PREPAID_CARD_STATUS_DECLINED("USERS.PREPAID_CARDS.UPDATED.STATUS.DECLINED"),
PREPAID_CARD_STATUS_SUSPENDED("USERS.PREPAID_CARDS.UPDATED.STATUS.SUSPENDED"),
PREPAID_CARD_STATUS_LOST_OR_STOLEN("USERS.PREPAID_CARDS.UPDATED.STATUS.LOST_OR_STOLEN"),
PREPAID_CARD_STATUS_DE_ACTIVATED("USERS.PREPAID_CARDS.UPDATED.STATUS.DE_ACTIVATED"),
PREPAID_CARD_STATUS_COMPLIANCE_HOLD("USERS.PREPAID_CARDS.UPDATED.STATUS.COMPLIANCE_HOLD"),
PREPAID_CARD_STATUS_KYC_HOLD("USERS.PREPAID_CARDS.UPDATED.STATUS.KYC_HOLD"),
PREPAID_CARD_STATUS_LOCKED("USERS.PREPAID_CARDS.UPDATED.STATUS.LOCKED"),
PAPER_CHECK_CREATED("USERS.PAPER_CHECKS.CREATED"),
PAPER_CHECK_UPDATED("USERS.PAPER_CHECKS.UPDATED"),
PAPER_CHECK_STATUS_ACTIVATED("USERS.PAPER_CHECKS.UPDATED.STATUS.ACTIVATED"),
PAPER_CHECK_STATUS_VERIFIED("USERS.PAPER_CHECKS.UPDATED.STATUS.VERIFIED"),
PAPER_CHECK_STATUS_INVALID("USERS.PAPER_CHECKS.UPDATED.STATUS.INVALID"),
PAPER_CHECK_STATUS_DE_ACTIVATED("USERS.PAPER_CHECKS.UPDATED.STATUS.DE_ACTIVATED"),
PAYMENT_CREATED("PAYMENTS.CREATED");
static Map<String, Type> eventTypeMap = new HashMap<String, Type>();
static {
for (Type eventType : Type.values()) {
eventTypeMap.put(eventType.toString(), eventType);
}
}
public static Type getType(String type) {
return eventTypeMap.get(type);
}
private String type;
Type(String type) {
this.type = type;
}
@Override
public String toString() {
return this.type;
}
}
private String token;
private String type;
private Date createdOn;
private Object object;
private List<HyperwalletLink> links;
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public Date getCreatedOn() {
return createdOn;
}
public void setCreatedOn(Date createdOn) {
this.createdOn = createdOn;
}
public Object getObject() {
return object;
}
public void setObject(Object object) {
this.object = object;
}
public List<HyperwalletLink> getLinks() {
return links;
}
public void setLinks(List<HyperwalletLink> links) {
addField("links", links);
this.links = links;
}
public HyperwalletWebhookNotification links(List<HyperwalletLink> links) {
addField("links", links);
this.links = links;
return this;
}
public HyperwalletWebhookNotification clearLinks() {
clearField("links");
this.links = null;
return this;
}
}
|
package com.kastkode.springsandwich.filter.internal;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.handler.HandlerInterceptorAdapter;
import com.kastkode.springsandwich.filter.annotation.After;
import com.kastkode.springsandwich.filter.annotation.AfterElement;
import com.kastkode.springsandwich.filter.annotation.Before;
import com.kastkode.springsandwich.filter.annotation.BeforeElement;
import com.kastkode.springsandwich.filter.api.AfterHandler;
import com.kastkode.springsandwich.filter.api.BeforeHandler;
import com.kastkode.springsandwich.filter.api.Flow;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
@Component
public class InterceptDelegator extends HandlerInterceptorAdapter {
@Autowired
ApplicationContext appContext;
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
if (!(handler instanceof HandlerMethod)) return true;
HandlerMethod handlerMethod = (HandlerMethod) handler;
Before classInterceptors = handlerMethod.getMethod().getDeclaringClass().getAnnotation(Before.class);
if (! preHandleInterceptors(classInterceptors, request, response, handlerMethod)) {
return false;
}
Before methodInterceptors = handlerMethod.getMethod().getAnnotation(Before.class);
if (! preHandleInterceptors(methodInterceptors, request, response, handlerMethod)) {
return false;
}
//currently just returns true, but who knows what necessary side-effects may be added in future
return super.preHandle(request, response, handler);
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
if (!(handler instanceof HandlerMethod)) return;
HandlerMethod handlerMethod = (HandlerMethod) handler;
After classInterceptors = handlerMethod.getMethod().getDeclaringClass().getAnnotation(After.class);
postHandleInterceptors(classInterceptors, request, response, handlerMethod, modelAndView);
After methodInterceptors = handlerMethod.getMethod().getAnnotation(After.class);
postHandleInterceptors(methodInterceptors, request, response, handlerMethod, modelAndView);
//currently a nop, but who knows what required side-effects may be added in future
super.postHandle(request, response, handler, modelAndView);
}
private boolean preHandleInterceptors(Before interceptors, HttpServletRequest request, HttpServletResponse response,
HandlerMethod handlerMethod) throws Exception {
if(interceptors == null) return true;
Flow result = Flow.CONTINUE;
for(BeforeElement classWithArgs:interceptors.value()) {
BeforeHandler interceptInstance = appContext.getBean(classWithArgs.value());
result = interceptInstance.handle(request, response, handlerMethod, classWithArgs.flags());
if(result != Flow.CONTINUE) break;
}
return (result == Flow.CONTINUE);
}
private void postHandleInterceptors(After interceptors, HttpServletRequest request, HttpServletResponse response,
HandlerMethod handlerMethod, ModelAndView modelAndView) throws Exception {
if(interceptors == null) return;
for(AfterElement classWithArgs:interceptors.value()) {
AfterHandler interceptInstance = appContext.getBean(classWithArgs.value());
interceptInstance.handle(request, response, handlerMethod, modelAndView, classWithArgs.flags());
}
}
}
|
package se.raddo.raddose3D;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
/**
* Holds X-ray cross section information for all elements of the periodic table.
* The ElementDatabase class is a Singleton. Its constructor is not publically
* accessible.
* To obtain an instance of the ElementDatabase class, call the getInstance()
* function.
*/
public class ElementDatabase {
/**
* Reference to the singleton instance of ElementDatabase.
*/
private static ElementDatabase singleton;
/**
* Map of all Element objects in the database.
*/
private final Map<Object, Element> elements;
/**
* List of available fields in the database file.
*/
public static enum DatabaseFields {
/** K edge in Angstroms. */
EDGE_K(2),
/** L edge in Angstroms. */
EDGE_L(3),
/** M edge in Angstroms. */
EDGE_M(4),
/** K coefficients in polynomial expansion. */
K_COEFF_0(5), K_COEFF_1(6), K_COEFF_2(7), K_COEFF_3(8),
/** L coefficients in polynomial expansion. */
L_COEFF_0(9), L_COEFF_1(10), L_COEFF_2(11), L_COEFF_3(12),
/** M coefficients in polynomial expansion. */
M_COEFF_0(13), M_COEFF_1(14), M_COEFF_2(15), M_COEFF_3(16),
/** N coefficients for polynomial expansion. */
N_COEFF_0(17), N_COEFF_1(18), N_COEFF_2(19), N_COEFF_3(20),
/** Atomic weight. */
ATOMIC_WEIGHT(23),
/** Coherent coefficients for polynomial expansion. */
COHERENT_COEFF_0(24), COHERENT_COEFF_1(25),
/** Coherent coefficients for polynomial expansion. */
COHERENT_COEFF_2(26), COHERENT_COEFF_3(27),
/** Incoherent coefficients for polynomial expansion. */
INCOHERENT_COEFF_0(28), INCOHERENT_COEFF_1(29),
/** Incoherent coefficients for polynomial expansion. */
INCOHERENT_COEFF_2(30), INCOHERENT_COEFF_3(31),
L2(36),
L3(37);
/**
* The position of each element in the database line. Index starting at 0.
*/
private final int field;
/**
* Initialization of each enum type entry.
*
* @param fieldnumber
* The position of this element in the database line. Index
* starting at 0.
*/
DatabaseFields(final int fieldnumber) {
field = fieldnumber;
}
/**
* Returns the position of this element in the database line.
*
* @return
* Position of this element in the database line. Index starting at
* 0.
*/
private int fieldNumber() {
return field;
}
}
/**
* Location of MuCalcConstants library.
*/
private static final String MUCALC_FILE = "constants/MuCalcConstants.txt";
/** Position of the atomic number in the database file. */
private static final int ATOMIC_NUMBER = 0;
/** Position of the element name in the database file. */
private static final int ELEMENT_NAME = 1;
/**
* Protected constructor of ElementDatabase. This reads in and parses the
* constant file and creates the element map.
* To obtain an instance of the ElementDatabase class, call the getInstance()
* function.
*/
protected ElementDatabase() {
elements = new HashMap<Object, Element>();
InputStreamReader isr;
try {
isr = locateConstantsFile();
} catch (IOException e) {
throw new RuntimeException("Error accessing element database file "
+ MUCALC_FILE, e);
}
BufferedReader br = new BufferedReader(isr);
// Read in constants file, consider some kind of error checking
try {
String line;
String[] components;
Map<DatabaseFields, Double> elementInfo =
new HashMap<DatabaseFields, Double>();
while ((line = br.readLine()) != null) {
// ignore commented out lines.
if (Character.toString(line.charAt(0)).equals("
continue;
}
// array containing all those numbers from the calculator file
components = line.split("\t", -1);
// Setting all the properties of the new atom.
// component[x] where the values of x are in order
// as listed in the constants file.
int atomicNumber = Integer.valueOf(components[ATOMIC_NUMBER]);
elementInfo.clear();
for (DatabaseFields df : DatabaseFields.values()) {
if ("".equals(components[df.fieldNumber()])) {
elementInfo.put(df, null);
} else {
elementInfo.put(df, Double.valueOf(components[df.fieldNumber()]));
}
}
Element elem = new Element(components[ELEMENT_NAME], atomicNumber,
elementInfo);
elements.put(components[ELEMENT_NAME].toLowerCase(), elem);
elements.put(atomicNumber, elem);
}
br.close();
isr.close();
} catch (IOException e) {
throw new RuntimeException("Error accessing element database file "
+ MUCALC_FILE, e);
}
}
/**
* Try to locate MUCALC_FILE. This may be in the class path (ie. within a .jar
* file), or in the file system.
*
* @return
* InputStreamReader pointing to the correct resource.
* @throws FileNotFoundException
* The file could not be found.
* @throws UnsupportedEncodingException
* The file charset cannot be interpreted.
*/
private InputStreamReader locateConstantsFile()
throws UnsupportedEncodingException, FileNotFoundException {
// Try to find it within class path;
InputStream is = getClass().getResourceAsStream("/" + MUCALC_FILE);
if (is == null) {
// If it is not within the class path, try via the file system.
is = new FileInputStream(MUCALC_FILE);
}
return new InputStreamReader(is, "US-ASCII");
}
/**
* Returns an instance of the element database. The true constructor of
* ElementDatabase is private, as ElementDatabase is a Singleton.
*
* @return
* Instance of the element database.
*/
@SuppressWarnings("PMD.AvoidSynchronizedAtMethodLevel")
public static synchronized ElementDatabase getInstance() {
if (singleton == null) {
singleton = new ElementDatabase();
}
return singleton;
}
/**
* Returns the Element object associated with the chemical element with z
* protons.
*
* @param z
* atomic number
* @return
* associated Element object
*/
public Element getElement(final int z) {
return elements.get(z);
}
/**
* Returns the Element object associated with the specified chemical element.
*
* @param name
* name of a chemical element
* @return
* associated Element object
*/
public Element getElement(final String name) {
return elements.get(name.toLowerCase());
}
}
|
package com.kiselev.reflection.ui.bytecode.assembly.build;
import com.kiselev.reflection.ui.bytecode.assembly.build.constant.Constants;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.List;
public class AgentBuilder {
public static Builder getBuilder() {
return new Builder();
}
public static class Builder {
private String commandPattern = "%s %s %s %s %s";
private String agentName = "agent.jar";
private Class<?> agentClass;
private String manifestName = "MANIFEST.mf";
private List<Class<?>> attachedClasses = new ArrayList<>();
public Builder addAgentName(String agentName) {
if (!agentName.endsWith(Constants.Suffix.JAR_SUFFIX)) {
agentName = agentName + Constants.Suffix.JAR_SUFFIX;
}
this.agentName = agentName;
return this;
}
public Builder addClass(Class<?> attachedClass) {
this.attachedClasses.add(attachedClass);
return this;
}
public Builder addManifest(String manifestName) {
if (!manifestName.endsWith(Constants.Suffix.MANIFEST_SUFFIX)) {
manifestName = manifestName + Constants.Suffix.MANIFEST_SUFFIX;
}
this.manifestName = manifestName;
return this;
}
public Builder addAgentClass(Class<?> agentClass) {
this.agentClass = agentClass;
return this;
}
public String build() {
createAgentJar();
return retrieveAgentPath();
}
private void createAgentJar() {
if (agentClass == null) {
throw new RuntimeException("Agent class cannot be null");
}
try {
Runtime.getRuntime().exec(convertCommand());
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
private String convertCommand() {
return String.format(commandPattern,
Constants.Command.JAR_EXE_COMMAND,
Constants.Flag.JAR_FLAG,
agentName,
convertManifestPath(),
convertAttachedClasses());
}
private String convertManifestPath() {
URL resource = getClass().getClassLoader().getResource("META-INF" + File.separatorChar + manifestName);
if (resource == null) {
throw new RuntimeException("Manifest file cannot be null");
}
return resource.getPath();
}
private String convertAttachedClasses() {
StringBuilder builder = new StringBuilder();
attachedClasses.add(agentClass);
for (Class<?> clazz : attachedClasses) {
ProtectionDomain domain = clazz.getProtectionDomain();
if (domain != null) {
String classFileProtectionDomain = domain.getCodeSource().getLocation().getPath().substring(1).replace("/", File.separator);
String classFilePath = clazz.getName().replace(".", File.separator) + Constants.Suffix.CLASS_FILE_SUFFIX;
builder.append(Constants.Flag.JAR_C_FLAG).append(Constants.Symbols.GAP)
.append(classFileProtectionDomain).append(Constants.Symbols.GAP)
.append(classFilePath).append(Constants.Symbols.GAP);
}
}
return builder.toString();
}
private String retrieveAgentPath() {
String agentJarPath = System.getProperty(Constants.Properties.HOME_DIR) + File.separator + agentName;
waitForCreationOfFile(agentJarPath);
return agentJarPath;
}
private void waitForCreationOfFile(String fileName) {
File file = new File(fileName);
while (!file.exists()) {
// nothing
}
}
}
}
|
package com.neon.intellij.plugins.gitlab.view.issues;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.diagnostic.Logger;
import com.neon.intellij.plugins.gitlab.controller.editor.GLIssueVirtualFile;
import com.neon.intellij.plugins.gitlab.model.EditableView;
import com.neon.intellij.plugins.gitlab.model.gitlab.GIPIssue;
import info.clearthought.layout.TableLayout;
import info.clearthought.layout.TableLayoutConstraints;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
public class GLIssueEditorView extends JPanel implements EditableView<GIPIssue, GIPIssue> {
private static final Logger LOG = Logger.getInstance("gitlab");
private final JButton buttonSave = new JButton( "save", AllIcons.Actions.Menu_saveall );
private final JButton buttonClose = new JButton( "close", AllIcons.Actions.Close );
private final JLabel labelTitle = new JLabel( "Title" );
private final JTextField textTitle = new JTextField();
private final JLabel labelDescription = new JLabel( "Description" );
private final JTextArea textDescription = new JTextArea( 5, 20 );
private GLIssueVirtualFile virtualFile;
private GIPIssue model;
public GLIssueEditorView( @NotNull final GLIssueVirtualFile vf ) {
this.virtualFile = vf;
this.model = vf.getIssue();
setupComponents();
setupLayout();
fill( vf.getIssue() );
}
private void setupComponents() {
textDescription.setWrapStyleWord( true );
textDescription.setLineWrap( true );
buttonSave.addActionListener(e -> {
virtualFile.setIssue( save() );
virtualFile.saveAndClose();
});
buttonClose.addActionListener(e -> virtualFile.close());
}
private void setupLayout() {
JScrollPane dp = new JScrollPane( textDescription, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER );
TableLayout layoutFields = new TableLayout(
new double[]{TableLayout.MINIMUM, TableLayout.FILL},
new double[]{TableLayout.MINIMUM, TableLayout.FILL}
);
layoutFields.setHGap( 5 );
layoutFields.setVGap( 5 );
JPanel fieldsPanel = new JPanel( layoutFields );
fieldsPanel.add( labelTitle, new TableLayoutConstraints( 0, 0, 0, 0 ) );
fieldsPanel.add( textTitle, new TableLayoutConstraints( 1, 0, 1, 0 ) );
fieldsPanel.add( labelDescription, new TableLayoutConstraints( 0, 1, 0, 1, TableLayout.LEFT, TableLayout.TOP ) );
fieldsPanel.add( dp, new TableLayoutConstraints( 1, 1, 1, 1 ) );
TableLayout layoutButtons = new TableLayout(
new double[]{TableLayout.MINIMUM, TableLayout.MINIMUM},
new double[]{TableLayout.MINIMUM}
);
layoutButtons.setHGap( 5 );
JPanel panelBottom = new JPanel( layoutButtons );
panelBottom.add( buttonSave, new TableLayoutConstraints( 0, 0, 0, 0 ) );
panelBottom.add( buttonClose, new TableLayoutConstraints( 1, 0, 1, 0 ) );
TableLayout layout = new TableLayout(
new double[]{ 5, TableLayout.FILL, 5 },
new double[]{ TableLayout.FILL, TableLayout.MINIMUM }
);
layout.setHGap( 5 );
layout.setVGap( 5 );
this.setLayout( layout );
this.add( fieldsPanel, new TableLayoutConstraints(1, 0, 1, 0 ) );
this.add( panelBottom, new TableLayoutConstraints(1, 1, 1, 1, TableLayout.CENTER, TableLayout.CENTER) );
}
private void clear() {
textTitle.setText( "" );
textDescription.setText( "" );
}
@Override
public void fill( GIPIssue issue ) {
clear();
textTitle.setText( issue.title );
textDescription.setText( issue.description );
}
@Override
public GIPIssue save() {
model.title = textTitle.getText();
model.description = textDescription.getText();
return model;
}
}
|
package com.nhl.link.move.extractor.model;
import java.util.Map;
import com.nhl.link.move.RowAttribute;
/**
* An {@link ExtractorModel} decorator that allows a model to inherit some
* properties from the parent {@link ExtractorModelContainer}.
*
* @since 1.4
*/
public class ContainerAwareExtractorModel implements ExtractorModel {
private ExtractorModelContainer parent;
private ExtractorModel delegate;
public ContainerAwareExtractorModel(ExtractorModelContainer parent, ExtractorModel delegate) {
this.delegate = delegate;
this.parent = parent;
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public long getLoadedOn() {
long loadedOn = delegate.getLoadedOn();
return loadedOn > 0 ? loadedOn : parent.getLoadedOn();
}
@Override
public String getType() {
String type = delegate.getType();
return type != null ? type : parent.getType();
}
@Override
public String getConnectorId() {
String connectorId = delegate.getConnectorId();
return connectorId != null ? connectorId : parent.getConnectorId();
}
@Override
public Map<String, String> getProperties() {
return delegate.getProperties();
}
@Override
public RowAttribute[] getAttributes() {
return delegate.getAttributes();
}
}
|
package jolie.lang.parse;
import jolie.lang.parse.context.ParsingContext;
import java.lang.reflect.Array;
import jolie.lang.Constants;
import jolie.lang.parse.ast.AndConditionNode;
import jolie.lang.parse.ast.AssignStatement;
import jolie.lang.parse.ast.DocumentationComment;
import jolie.lang.parse.ast.CompareConditionNode;
import jolie.lang.parse.ast.CompensateStatement;
import jolie.lang.parse.ast.ConstantIntegerExpression;
import jolie.lang.parse.ast.ConstantRealExpression;
import jolie.lang.parse.ast.ConstantStringExpression;
import jolie.lang.parse.ast.CorrelationSetInfo;
import jolie.lang.parse.ast.CurrentHandlerStatement;
import jolie.lang.parse.ast.DeepCopyStatement;
import jolie.lang.parse.ast.EmbeddedServiceNode;
import jolie.lang.parse.ast.ExecutionInfo;
import jolie.lang.parse.ast.ExitStatement;
import jolie.lang.parse.ast.ExpressionConditionNode;
import jolie.lang.parse.ast.ForEachStatement;
import jolie.lang.parse.ast.ForStatement;
import jolie.lang.parse.ast.IfStatement;
import jolie.lang.parse.ast.InstallFixedVariableExpressionNode;
import jolie.lang.parse.ast.InstallStatement;
import jolie.lang.parse.ast.IsTypeExpressionNode;
import jolie.lang.parse.ast.LinkInStatement;
import jolie.lang.parse.ast.LinkOutStatement;
import jolie.lang.parse.ast.NDChoiceStatement;
import jolie.lang.parse.ast.NotConditionNode;
import jolie.lang.parse.ast.NotificationOperationStatement;
import jolie.lang.parse.ast.NullProcessStatement;
import jolie.lang.parse.ast.OneWayOperationDeclaration;
import jolie.lang.parse.ast.OneWayOperationStatement;
import jolie.lang.parse.ast.OrConditionNode;
import jolie.lang.parse.ast.OutputPortInfo;
import jolie.lang.parse.ast.ParallelStatement;
import jolie.lang.parse.ast.PointerStatement;
import jolie.lang.parse.ast.PostDecrementStatement;
import jolie.lang.parse.ast.PostIncrementStatement;
import jolie.lang.parse.ast.PreDecrementStatement;
import jolie.lang.parse.ast.PreIncrementStatement;
import jolie.lang.parse.ast.ProductExpressionNode;
import jolie.lang.parse.ast.Program;
import jolie.lang.parse.ast.RequestResponseOperationDeclaration;
import jolie.lang.parse.ast.RequestResponseOperationStatement;
import jolie.lang.parse.ast.RunStatement;
import jolie.lang.parse.ast.Scope;
import jolie.lang.parse.ast.SequenceStatement;
import jolie.lang.parse.ast.InputPortInfo;
import jolie.lang.parse.ast.SolicitResponseOperationStatement;
import jolie.lang.parse.ast.DefinitionCallStatement;
import jolie.lang.parse.ast.DefinitionNode;
import jolie.lang.parse.ast.InstallFunctionNode;
import jolie.lang.parse.ast.InterfaceDefinition;
import jolie.lang.parse.ast.OLSyntaxNode;
import jolie.lang.parse.ast.SpawnStatement;
import jolie.lang.parse.ast.SumExpressionNode;
import jolie.lang.parse.ast.SynchronizedStatement;
import jolie.lang.parse.ast.ThrowStatement;
import jolie.lang.parse.ast.TypeCastExpressionNode;
import jolie.lang.parse.ast.UndefStatement;
import jolie.lang.parse.ast.ValueVectorSizeExpressionNode;
import jolie.lang.parse.ast.VariableExpressionNode;
import jolie.lang.parse.ast.VariablePathNode;
import jolie.lang.parse.ast.WhileStatement;
import jolie.lang.parse.ast.types.TypeDefinitionLink;
import jolie.lang.parse.ast.types.TypeInlineDefinition;
import jolie.util.Pair;
/** Builds an optimized version of an OL parse tree.
*
* @author Fabrizio Montesi
*/
public class OLParseTreeOptimizer
{
/**
* TODO Optimize expressions and conditions
*
*/
private static class OptimizerVisitor implements OLVisitor
{
private final Program program;
private OLSyntaxNode currNode;
public OptimizerVisitor( ParsingContext context )
{
program = new Program( context );
}
public Program optimize( Program p )
{
visit( p );
return program;
}
public void visit( Program p )
{
for( OLSyntaxNode node : p.children() ) {
node.accept( this );
}
}
public void visit( ExecutionInfo p )
{
program.addChild( p );
}
public void visit( CorrelationSetInfo p )
{
program.addChild( p );
}
public void visit( OutputPortInfo p )
{
if ( p.protocolConfiguration() != null ) {
p.protocolConfiguration().accept( this );
p.setProtocolConfiguration( currNode );
}
program.addChild( p );
}
public void visit( InputPortInfo p )
{
if ( p.protocolConfiguration() != null ) {
p.protocolConfiguration().accept( this );
InputPortInfo iport =
new InputPortInfo(
p.context(),
p.id(),
p.location(),
p.protocolId(),
currNode,
p.aggregationList(),
p.redirectionMap()
);
iport.operationsMap().putAll( p.operationsMap() );
iport.getInterfaceList().addAll( p.getInterfaceList() );
program.addChild( iport );
} else {
program.addChild( p );
}
}
public void visit( OneWayOperationDeclaration decl )
{}
public void visit( RequestResponseOperationDeclaration decl )
{}
public void visit( EmbeddedServiceNode n )
{
program.addChild( n );
}
public void visit( DefinitionNode procedure )
{
procedure.body().accept( this );
program.addChild( new DefinitionNode( procedure.context(), procedure.id(), currNode ) );
}
public void visit( ParallelStatement stm )
{
if ( stm.children().size() > 1 ) {
ParallelStatement tmp = new ParallelStatement( stm.context() );
for( OLSyntaxNode node : stm.children() ) {
node.accept( this );
if ( currNode instanceof ParallelStatement ) {
ParallelStatement curr = (ParallelStatement) currNode;
for( OLSyntaxNode subNode : curr.children() )
tmp.addChild( subNode );
} else if ( !( currNode instanceof NullProcessStatement ) ) {
tmp.addChild( currNode );
}
}
/*
* If we ended up with an empty composition, return nullProcess
*/
if ( tmp.children().isEmpty() ) {
currNode = new NullProcessStatement( stm.context() );
} else {
currNode = tmp;
}
} else {
stm.children().get( 0 ).accept( this );
}
}
public void visit( SequenceStatement stm )
{
if ( stm.children().size() > 1 ) {
SequenceStatement tmp = new SequenceStatement( stm.context() );
for( OLSyntaxNode node : stm.children() ) {
node.accept( this );
if ( currNode instanceof SequenceStatement ) {
SequenceStatement curr = (SequenceStatement) currNode;
for( OLSyntaxNode subNode : curr.children() )
tmp.addChild( subNode );
} else if ( !( currNode instanceof NullProcessStatement ) ) {
tmp.addChild( currNode );
}
}
/*
* If we ended up with an empty composition, return nullProcess
*/
if ( tmp.children().isEmpty() ) {
currNode = new NullProcessStatement( stm.context() );
} else {
currNode = tmp;
}
} else {
stm.children().get( 0 ).accept( this );
}
}
public void visit( NDChoiceStatement stm )
{
if ( stm.children().size() > 0 ) {
NDChoiceStatement tmp = new NDChoiceStatement( stm.context() );
for( Pair< OLSyntaxNode, OLSyntaxNode > pair : stm.children() ) {
pair.key().accept( this );
OLSyntaxNode n = currNode;
pair.value().accept( this );
tmp.addChild( new Pair< OLSyntaxNode, OLSyntaxNode >( n, currNode ) );
}
currNode = tmp;
} else {
currNode = new NullProcessStatement( stm.context() );
}
//} else {
/*SequenceStatement sequence = new SequenceStatement();
Pair< OLSyntaxNode, OLSyntaxNode > pair = stm.children().get( 0 );
sequence.addChild( pair.key() );
sequence.addChild( pair.value() );
sequence.accept( this );
}*/
}
public void visit( IfStatement n )
{
IfStatement stm = new IfStatement( n.context() );
OLSyntaxNode condition;
for( Pair< OLSyntaxNode, OLSyntaxNode > pair : n.children() ) {
pair.key().accept( this );
condition = currNode;
pair.value().accept( this );
stm.addChild( new Pair< OLSyntaxNode, OLSyntaxNode >( condition, currNode ) );
}
if ( n.elseProcess() != null ) {
n.elseProcess().accept( this );
stm.setElseProcess( currNode );
}
currNode = stm;
}
public void visit( SpawnStatement n )
{
currNode = new SpawnStatement(
n.context(),
optimizePath( n.indexVariablePath() ),
optimizeNode( n.upperBoundExpression() ),
optimizePath( n.inVariablePath() ),
optimizeNode( n.body() )
);
}
public void visit( WhileStatement n )
{
currNode = new WhileStatement(
n.context(),
optimizeNode( n.condition() ),
optimizeNode( n.body() )
);
}
public void visit( ForStatement n )
{
currNode = new ForStatement(
n.context(),
optimizeNode( n.init() ),
optimizeNode( n.condition() ),
optimizeNode( n.post() ),
optimizeNode( n.body() )
);
}
public void visit( ForEachStatement n )
{
currNode = new ForEachStatement(
n.context(),
optimizePath( n.keyPath() ),
optimizePath( n.targetPath() ),
optimizeNode( n.body() )
);
}
public void visit( VariablePathNode n )
{
VariablePathNode varPath = new VariablePathNode( n.context(), n.isGlobal() );
for( Pair< OLSyntaxNode, OLSyntaxNode > node : n.path() ) {
varPath.append( new Pair< OLSyntaxNode, OLSyntaxNode >( optimizeNode( node.key() ), optimizeNode( node.value() ) ) );
}
currNode = varPath;
}
private VariablePathNode optimizePath( VariablePathNode n )
{
if ( n == null ) {
return null;
}
n.accept( this );
return (VariablePathNode)currNode;
}
private OLSyntaxNode optimizeNode( OLSyntaxNode n )
{
if ( n == null ) {
return null;
}
n.accept( this );
return currNode;
}
public void visit( RequestResponseOperationStatement n )
{
OLSyntaxNode outputExpression = null;
if ( n.outputExpression() != null ) {
n.outputExpression().accept( this );
outputExpression = currNode;
}
currNode =
new RequestResponseOperationStatement(
n.context(),
n.id(),
optimizePath( n.inputVarPath() ),
outputExpression,
optimizeNode( n.process() ) );
}
public void visit( Scope n )
{
n.body().accept( this );
currNode = new Scope( n.context(), n.id(), currNode );
}
public void visit( InstallStatement n )
{
currNode = new InstallStatement( n.context(), optimizeInstallFunctionNode( n.handlersFunction() ) );
}
private InstallFunctionNode optimizeInstallFunctionNode( InstallFunctionNode n )
{
if ( n == null ) {
return null;
}
Pair< String, OLSyntaxNode >[] pairs =
(Pair< String, OLSyntaxNode >[]) Array.newInstance( Pair.class, n.pairs().length );
int i = 0;
for( Pair< String, OLSyntaxNode > pair : n.pairs() ) {
pair.value().accept( this );
pairs[ i++ ] = new Pair< String, OLSyntaxNode >( pair.key(), currNode );
}
return new InstallFunctionNode( pairs );
}
public void visit( SynchronizedStatement n )
{
n.body().accept( this );
currNode = new SynchronizedStatement( n.context(), n.id(), currNode );
}
public void visit( CompensateStatement n ) { currNode = n; }
public void visit( ThrowStatement n )
{
if ( n.expression() == null ) {
currNode = null;
} else {
n.expression().accept( this );
}
currNode = new ThrowStatement( n.context(), n.id(), currNode );
}
public void visit( OneWayOperationStatement n )
{
currNode = new OneWayOperationStatement(
n.context(),
n.id(),
optimizePath( n.inputVarPath() )
);
}
public void visit( NotificationOperationStatement n )
{
OLSyntaxNode outputExpression = null;
if ( n.outputExpression() != null ) {
n.outputExpression().accept( this );
outputExpression = currNode;
}
currNode = new NotificationOperationStatement(
n.context(),
n.id(),
n.outputPortId(),
outputExpression
);
}
public void visit( SolicitResponseOperationStatement n )
{
OLSyntaxNode outputExpression = null;
if ( n.outputExpression() != null ) {
n.outputExpression().accept( this );
outputExpression = currNode;
}
currNode = new SolicitResponseOperationStatement(
n.context(),
n.id(),
n.outputPortId(),
outputExpression,
optimizePath( n.inputVarPath() ),
optimizeInstallFunctionNode( n.handlersFunction() )
);
}
public void visit( LinkInStatement n ) { currNode = n; }
public void visit( LinkOutStatement n ) { currNode = n; }
public void visit( AssignStatement n )
{
currNode = new AssignStatement(
n.context(),
optimizePath( n.variablePath() ),
optimizeNode( n.expression() )
);
}
public void visit( DeepCopyStatement n )
{
currNode = new DeepCopyStatement(
n.context(),
optimizePath( n.leftPath() ),
optimizePath( n.rightPath() )
);
}
public void visit( PointerStatement n )
{
currNode = new PointerStatement(
n.context(),
optimizePath( n.leftPath() ),
optimizePath( n.rightPath() )
);
}
public void visit( DefinitionCallStatement n ) { currNode = n; }
public void visit( OrConditionNode n )
{
if ( n.children().size() > 1 ) {
OrConditionNode ret = new OrConditionNode( n.context() );
for( OLSyntaxNode child : n.children() ) {
child.accept( this );
ret.addChild( currNode );
}
currNode = ret;
} else {
n.children().get( 0 ).accept( this );
}
}
public void visit( AndConditionNode n )
{
if ( n.children().size() > 1 ) {
AndConditionNode ret = new AndConditionNode( n.context() );
for( OLSyntaxNode child : n.children() ) {
child.accept( this );
ret.addChild( currNode );
}
currNode = ret;
} else {
n.children().get( 0 ).accept( this );
}
}
public void visit( NotConditionNode n )
{
n.condition().accept( this );
currNode = new NotConditionNode( n.context(), currNode );
}
public void visit( CompareConditionNode n )
{
n.leftExpression().accept( this );
OLSyntaxNode leftExpression = currNode;
n.rightExpression().accept( this );
currNode = new CompareConditionNode( n.context(), leftExpression, currNode, n.opType() );
}
public void visit( ExpressionConditionNode n )
{
currNode = new ExpressionConditionNode(
n.context(),
optimizeNode( n.expression() )
);
}
public void visit( ConstantIntegerExpression n ) { currNode = n; }
public void visit( ConstantRealExpression n ) { currNode = n; }
public void visit( ConstantStringExpression n )
{
currNode = new ConstantStringExpression( n.context(), n.value().intern() );
}
public void visit( ProductExpressionNode n )
{
if ( n.operands().size() > 1 ) {
ProductExpressionNode ret = new ProductExpressionNode( n.context() );
for( Pair< Constants.OperandType, OLSyntaxNode > pair : n.operands() ) {
pair.value().accept( this );
if ( pair.key() == Constants.OperandType.MULTIPLY ) {
ret.multiply( currNode );
} else if ( pair.key() == Constants.OperandType.DIVIDE ) {
ret.divide( currNode );
} else if ( pair.key() == Constants.OperandType.MODULUS ) {
ret.modulo( currNode );
}
}
currNode = ret;
} else {
n.operands().iterator().next().value().accept( this );
}
}
public void visit( SumExpressionNode n )
{
if ( n.operands().size() > 1 ) {
SumExpressionNode ret = new SumExpressionNode( n.context() );
for( Pair< Constants.OperandType, OLSyntaxNode > pair : n.operands() ) {
pair.value().accept( this );
if ( pair.key() == Constants.OperandType.ADD ) {
ret.add( currNode );
} else {
ret.subtract( currNode );
}
}
currNode = ret;
} else {
n.operands().iterator().next().value().accept( this );
}
}
public void visit( VariableExpressionNode n )
{
currNode = new VariableExpressionNode(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( InstallFixedVariableExpressionNode n )
{
currNode = new InstallFixedVariableExpressionNode(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( NullProcessStatement n ) { currNode = n; }
public void visit( ExitStatement n ) { currNode = n; }
public void visit( RunStatement n ) { currNode = n; }
public void visit( TypeInlineDefinition n )
{
program.addChild( n );
}
public void visit( TypeDefinitionLink n )
{
program.addChild( n );
}
public void visit( ValueVectorSizeExpressionNode n )
{
currNode = new ValueVectorSizeExpressionNode(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( PreIncrementStatement n )
{
currNode = new PreIncrementStatement(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( PostIncrementStatement n )
{
currNode = new PostIncrementStatement(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( PreDecrementStatement n )
{
currNode = new PreDecrementStatement(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( PostDecrementStatement n )
{
currNode = new PostDecrementStatement(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( UndefStatement n )
{
currNode = new UndefStatement(
n.context(),
optimizePath( n.variablePath() )
);
}
public void visit( IsTypeExpressionNode n )
{
currNode = new IsTypeExpressionNode(
n.context(),
n.type(),
optimizePath( n.variablePath() )
);
}
public void visit( TypeCastExpressionNode n )
{
currNode = new TypeCastExpressionNode(
n.context(),
n.type(),
optimizeNode( n.expression() )
);
}
public void visit( CurrentHandlerStatement n ) { currNode = n; }
public void visit( InterfaceDefinition n )
{
program.addChild( n );
}
public void visit( DocumentationComment n ){}
}
private Program originalProgram;
public OLParseTreeOptimizer( Program originalProgram )
{
this.originalProgram = originalProgram;
}
public Program optimize()
{
return (new OptimizerVisitor( originalProgram.context() )).optimize( originalProgram );
}
}
|
package com.opengamma.sesame.marketdata;
import org.threeten.bp.LocalDate;
import com.opengamma.core.config.ConfigSource;
import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesSource;
import com.opengamma.engine.marketdata.spec.FixedHistoricalMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.MarketDataSpecification;
import com.opengamma.sesame.engine.ComponentMap;
import com.opengamma.util.ArgumentChecker;
/**
* Creates a {@link MarketDataProviderFunction} given a {@link MarketDataSpecification}.
*/
public class SpecificationMarketDataFactory implements MarketDataFactory {
// TODO do we want to reuse MarketDataSpecification or replace it?
private final MarketDataSpecification _marketDataSpecification;
public SpecificationMarketDataFactory(MarketDataSpecification marketDataSpecification) {
_marketDataSpecification = ArgumentChecker.notNull(marketDataSpecification, "marketDataSpecification");
if (!(_marketDataSpecification instanceof FixedHistoricalMarketDataSpecification)) {
throw new IllegalArgumentException("Only FixedHistoricalMarketDataSpecification is currently supported");
}
}
@Override
public MarketDataProviderFunction create(ComponentMap components) {
ConfigSource configSource = components.getComponent(ConfigSource.class);
HistoricalTimeSeriesSource timeSeriesSource = components.getComponent(HistoricalTimeSeriesSource.class);
LocalDate date = ((FixedHistoricalMarketDataSpecification) _marketDataSpecification).getSnapshotDate();
HistoricalRawMarketDataSource rawDataSource =
new HistoricalRawMarketDataSource(timeSeriesSource, date, "BLOOMBERG", "Market_Value");
return new EagerMarketDataProvider(rawDataSource, configSource, "BloombergLiveData");
}
}
|
package edu.neu.ccs.pyramid.classification.logistic_regression;
import org.apache.commons.math3.distribution.UniformRealDistribution;
import org.apache.mahout.math.DenseVector;
import org.apache.mahout.math.Vector;
import org.apache.mahout.math.VectorView;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
public class Weights implements Serializable {
private static final long serialVersionUID = 1L;
private int numClasses;
private int numFeatures;
/**
* vector is not serializable
*/
private transient Vector weightVector;
/**
* serialize this array instead
*/
private double[] serializableWeights;
public Weights(int numClasses, int numFeatures, boolean random) {
if (random) {
this.numClasses = numClasses;
this.numFeatures = numFeatures;
this.weightVector = new DenseVector((numFeatures + 1)*numClasses);
this.serializableWeights = new double[(numFeatures + 1)*numClasses];
UniformRealDistribution uniform = new UniformRealDistribution(-0.5,0.5);
for (int i=0; i<weightVector.size(); i++) {
double p = uniform.sample();
weightVector.set(i,p);
serializableWeights[i] = p;
}
} else {
this.numClasses = numClasses;
this.numFeatures = numFeatures;
this.weightVector = new DenseVector((numFeatures + 1)*numClasses);
this.serializableWeights = new double[(numFeatures + 1)*numClasses];
}
}
public Weights(int numClasses, int numFeatures) {
this.numClasses = numClasses;
this.numFeatures = numFeatures;
this.weightVector = new DenseVector((numFeatures + 1)*numClasses);
this.serializableWeights = new double[(numFeatures + 1)*numClasses];
}
public Weights(int numClasses, int numFeatures, Vector weightVector) {
this.numClasses = numClasses;
this.numFeatures = numFeatures;
if (weightVector.size()!=(numFeatures + 1)*numClasses){
throw new IllegalArgumentException("weightVector.size()!=(numFeatures + 1)*numClasses");
}
this.weightVector = weightVector;
this.serializableWeights = new double[(numFeatures + 1)*numClasses];
}
public void setWeightVector(Vector weightVector) {
if (weightVector.size()!=(numFeatures + 1)*numClasses){
throw new IllegalArgumentException("weightVector.size()!=(numFeatures + 1)*numClasses");
}
this.weightVector = weightVector;
}
public Weights deepCopy(){
Weights copy = new Weights(this.numClasses,numFeatures);
copy.weightVector = new DenseVector(this.weightVector);
return copy;
}
public int getClassIndex(int parameterIndex){
return parameterIndex/(numFeatures+1);
}
/**
*
* @param parameterIndex
* @return feature index
* -1 means bias
*/
public int getFeatureIndex(int parameterIndex){
return parameterIndex - getClassIndex(parameterIndex)*(numFeatures+1) -1;
}
public List<Integer> getAllBiasPositions(){
List<Integer> list = new ArrayList<>();
for (int k=0;k<numClasses;k++){
list.add((this.numFeatures+1)*k);
}
return list;
}
/**
*
* @return weights for all classes
*/
public Vector getAllWeights() {
return weightVector;
}
public int totalSize(){
return weightVector.size();
}
/**
*
* @param k class index
* @return weights for class k, including bias at the beginning
*/
public Vector getWeightsForClass(int k){
if (k>=numClasses){
throw new IllegalArgumentException("out of bound");
}
int start = (this.numFeatures+1)*k;
int length = this.numFeatures +1;
return new VectorView(this.weightVector,start,length);
}
/**
*
* @param k
* @return weights for class k, no bias
*/
public Vector getWeightsWithoutBiasForClass(int k){
if (k>=numClasses){
throw new IllegalArgumentException("out of bound");
}
int start = (this.numFeatures+1)*k + 1;
int length = this.numFeatures;
return new VectorView(this.weightVector,start,length);
}
/**
*
* @param k
* @return bias
*/
public double getBiasForClass(int k){
if (k>=numClasses){
throw new IllegalArgumentException("out of bound");
}
int start = (this.numFeatures+1)*k;
return this.weightVector.get(start);
}
private void writeObject(java.io.ObjectOutputStream out)
throws IOException {
for (int i=0;i<serializableWeights.length;i++){
serializableWeights[i] = weightVector.get(i);
}
out.writeInt(numClasses);
out.writeInt(numFeatures);
out.writeObject(serializableWeights);
}
private void readObject(java.io.ObjectInputStream in)
throws IOException, ClassNotFoundException{
numClasses = in.readInt();
numFeatures = in.readInt();
serializableWeights = (double[])in.readObject();
weightVector = new DenseVector((numFeatures + 1)*numClasses);
for (int i=0;i<serializableWeights.length;i++){
weightVector.set(i,serializableWeights[i]);
}
}
void serialize(File file) throws Exception{
File parent = file.getParentFile();
if (!parent.exists()){
parent.mkdirs();
}
try (
FileOutputStream fileOutputStream = new FileOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(fileOutputStream);
ObjectOutputStream objectOutputStream = new ObjectOutputStream(bufferedOutputStream);
){
objectOutputStream.writeObject(this);
}
}
public static Weights deserialize(File file) throws Exception{
try(
FileInputStream fileInputStream = new FileInputStream(file);
BufferedInputStream bufferedInputStream = new BufferedInputStream(fileInputStream);
ObjectInputStream objectInputStream = new ObjectInputStream(bufferedInputStream);
){
return (Weights)objectInputStream.readObject();
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("Weights{");
for (int k=0;k<numClasses;k++){
sb.append("for class ").append(k).append(":").append("\n");
sb.append("bias = "+getBiasForClass(k)).append(",");
sb.append("weights = "+getWeightsWithoutBiasForClass(k)).append("\n");
}
sb.append('}');
return sb.toString();
}
}
|
package edu.upc.caminstech.equipstic.client.dao;
import java.util.Arrays;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Repository;
import org.springframework.util.Assert;
import edu.upc.caminstech.equipstic.Infraestructura;
import edu.upc.caminstech.equipstic.client.EquipsTicClientConfiguration;
import edu.upc.caminstech.equipstic.client.EquipsTicClientException;
import edu.upc.caminstech.equipstic.client.Response;
@Repository
public class InfraestructuraDaoImpl extends RestDao implements InfraestructuraDao {
@Autowired
public InfraestructuraDaoImpl(EquipsTicClientConfiguration config) {
super(config);
}
@Override
@Cacheable(CacheUtils.PREFIX + "getInfraestructuraByMarcaAndNumeroDeSerie")
public Infraestructura getInfraestructuraByMarcaAndNumeroDeSerie(long idMarca, String sn, boolean ambDetalls) {
Assert.notNull(sn, "El número de sèrie no pot ser null");
Infraestructura i = get("/infraestructura/cerca/marca/{idMarca}/sn/{sn}",
new ParameterizedTypeReference<Response<Infraestructura>>() {
}, idMarca, sn);
if (i != null && ambDetalls) {
return getInfraestructuraById(i.getIdentificador(), ambDetalls);
}
return i;
}
@Override
@Cacheable(CacheUtils.PREFIX + "getInfraestructuraById")
public Infraestructura getInfraestructuraById(long id, boolean ambDetalls) {
String url = ambDetalls ? "/infraestructura/{id}/detall" : "/infraestructura/{id}";
Infraestructura i = get(url, new ParameterizedTypeReference<Response<Infraestructura>>() {
}, id);
return i;
}
@Override
@Cacheable(CacheUtils.PREFIX + "getInfraestructuresByUnitat")
public List<Infraestructura> getInfraestructuresByUnitat(long idUnitat) {
List<Infraestructura> result = get("/infraestructura/cerca/unitat/{idUnitat}",
new ParameterizedTypeReference<Response<List<Infraestructura>>>() {
}, idUnitat);
return sorted(result);
}
@Override
@CacheEvict(cacheNames = { CacheUtils.PREFIX + "getInfraestructuraByMarcaAndNumeroDeSerie",
CacheUtils.PREFIX + "getInfraestructuraById", CacheUtils.PREFIX + "getInfraestructuresByUnitat" })
public Infraestructura altaInfraestructura(Infraestructura infraestructura) {
HttpEntity<Infraestructura> req = preparaRequest(infraestructura);
ResponseEntity<Response<Infraestructura>> rp = getRestTemplate().exchange(getBaseUri() + "/infraestructura",
HttpMethod.POST, req, new ParameterizedTypeReference<Response<Infraestructura>>() {
});
Response<Infraestructura> response = rp.getBody();
if (response.isSuccess()) {
return response.getData();
}
throw new EquipsTicClientException(response, "Error en crear la infraestructura: " + response.getMessage());
}
@Override
@CacheEvict(cacheNames = { CacheUtils.PREFIX + "getInfraestructuraByMarcaAndNumeroDeSerie",
CacheUtils.PREFIX + "getInfraestructuraById", CacheUtils.PREFIX + "getInfraestructuresByUnitat" })
public void baixaInfraestructura(long id) {
ResponseEntity<Response<Object>> rp = getRestTemplate().exchange(getBaseUri() + "/infraestructura/{id}",
HttpMethod.DELETE, null, new ParameterizedTypeReference<Response<Object>>() {
}, id);
Response<Object> response = rp.getBody();
if (!response.isSuccess()) {
throw new EquipsTicClientException(response,
"Error en esborrar la infraestructura: " + response.getMessage());
}
}
@Override
@CacheEvict(cacheNames = { CacheUtils.PREFIX + "getInfraestructuraByMarcaAndNumeroDeSerie",
CacheUtils.PREFIX + "getInfraestructuraById", CacheUtils.PREFIX + "getInfraestructuresByUnitat" })
public Infraestructura modificaInfraestructura(Infraestructura infraestructura) {
HttpEntity<Infraestructura> req = preparaRequest(infraestructura);
ResponseEntity<Response<Infraestructura>> rp = getRestTemplate().exchange(
getBaseUri() + "/infraestructura/{id}", HttpMethod.PUT, req,
new ParameterizedTypeReference<Response<Infraestructura>>() {
}, infraestructura.getIdentificador());
Response<Infraestructura> response = rp.getBody();
if (response.isSuccess()) {
return response.getData();
}
throw new EquipsTicClientException(response, "Error en modificar la infraestructura: " + response.getMessage());
}
private HttpEntity<Infraestructura> preparaRequest(Infraestructura infraestructura) {
if (infraestructura == null) {
throw new IllegalArgumentException("La infraestructura no pot ser null");
}
HttpHeaders headers = new HttpHeaders();
headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON_UTF8));
headers.setContentType(MediaType.APPLICATION_JSON_UTF8);
return new HttpEntity<>(infraestructura, headers);
}
}
|
package info.ata4.minecraft.dragon.server.entity.ai;
import info.ata4.minecraft.dragon.server.entity.EntityTameableDragon;
import net.minecraft.entity.ai.EntityAIBase;
import net.minecraft.entity.player.EntityPlayer;
/**
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class EntityAICatchOwner extends EntityAIBase {
protected final EntityTameableDragon dragon;
protected EntityPlayer owner;
public EntityAICatchOwner(EntityTameableDragon dragon) {
this.dragon = dragon;
}
@Override
public boolean shouldExecute() {
owner = (EntityPlayer) dragon.getOwner();
// don't catch if ownerless
if (owner == null) {
return false;
}
// no point in catching players in creative mode
if (owner.capabilities.isCreativeMode) {
return false;
}
// don't catch if already being ridden
if (dragon.riddenByEntity != null) {
return false;
}
// don't catch if leashed
if (dragon.getLeashed()) {
return false;
}
return owner.fallDistance > 4;
}
}
|
package io.vertx.codegen.generators.cheatsheet;
import io.vertx.codegen.Generator;
import io.vertx.codegen.DataObjectModel;
import io.vertx.codegen.doc.Doc;
import io.vertx.codegen.doc.Tag;
import io.vertx.codegen.doc.Token;
import io.vertx.codegen.type.ClassKind;
import io.vertx.codegen.type.TypeInfo;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.time.Instant;
import java.util.Collections;
import java.util.Map;
public class DataObjectCheatsheetGen extends Generator<DataObjectModel> {
public DataObjectCheatsheetGen() {
name = "cheatsheet";
kinds = Collections.singleton("dataObject");
incremental = true;
}
@Override
public String filename(DataObjectModel model) {
return "asciidoc/dataobjects.adoc";
}
@Override
public String render(DataObjectModel model, int index, int size, Map<String, Object> session) {
StringWriter buffer = new StringWriter();
PrintWriter html = new PrintWriter(buffer);
if (index == 0) {
html.append("= Cheatsheets\n");
html.append("\n");
}
render(model, html);
html.append("\n");
return buffer.toString();
}
private void render(DataObjectModel model, PrintWriter html) {
html.append("[[").append(model.getType().getSimpleName()).append("]]\n");
html.append("== ").append(model.getType().getSimpleName()).append("\n");
html.append("\n");
Doc doc = model.getDoc();
if (doc != null) {
html.append("++++\n");
Token.toHtml(doc.getTokens(), "", Tag::getName, "\n", html);
html.append("++++\n");
|
package com.digi.xbee.api;
import java.io.IOException;
import java.util.Arrays;
import com.digi.xbee.api.connection.DataReader;
import com.digi.xbee.api.connection.IConnectionInterface;
import com.digi.xbee.api.connection.serial.SerialPortParameters;
import com.digi.xbee.api.exceptions.InterfaceAlreadyOpenException;
import com.digi.xbee.api.exceptions.InterfaceNotOpenException;
import com.digi.xbee.api.exceptions.InvalidOperatingModeException;
import com.digi.xbee.api.exceptions.OperationNotSupportedException;
import com.digi.xbee.api.exceptions.TimeoutException;
import com.digi.xbee.api.exceptions.XBeeException;
import com.digi.xbee.api.listeners.IIOSampleReceiveListener;
import com.digi.xbee.api.listeners.IPacketReceiveListener;
import com.digi.xbee.api.listeners.ISerialDataReceiveListener;
import com.digi.xbee.api.models.ATCommand;
import com.digi.xbee.api.models.ATCommandResponse;
import com.digi.xbee.api.models.OperatingMode;
import com.digi.xbee.api.models.XBee16BitAddress;
import com.digi.xbee.api.models.XBee64BitAddress;
import com.digi.xbee.api.models.XBeeMessage;
import com.digi.xbee.api.models.XBeePacketsQueue;
import com.digi.xbee.api.models.XBeeTransmitOptions;
import com.digi.xbee.api.packet.APIFrameType;
import com.digi.xbee.api.packet.XBeeAPIPacket;
import com.digi.xbee.api.packet.XBeePacket;
import com.digi.xbee.api.packet.common.ReceivePacket;
import com.digi.xbee.api.packet.common.TransmitPacket;
import com.digi.xbee.api.packet.raw.RX16Packet;
import com.digi.xbee.api.packet.raw.RX64Packet;
import com.digi.xbee.api.packet.raw.TX64Packet;
import com.digi.xbee.api.utils.HexUtils;
public class XBeeDevice extends AbstractXBeeDevice {
// Constants.
private static int TIMEOUT_RESET = 5000;
private static int TIMEOUT_READ_PACKET = 3000;
private static String COMMAND_MODE_CHAR = "+";
private static String COMMAND_MODE_OK = "OK\r";
// Variables.
protected XBeeNetwork network;
private Object resetLock = new Object();
private boolean modemStatusReceived = false;
public XBeeDevice(String port, int baudRate) {
super(port, baudRate);
}
public XBeeDevice(String port, int baudRate, int dataBits, int stopBits, int parity, int flowControl) {
super(port, baudRate, dataBits, stopBits, parity, flowControl);
}
/**
* Class constructor. Instantiates a new {@code XBeeDevice} object in the
* given serial port name and parameters.
*
* @param port Serial port name where XBee device is attached to.
* @param serialPortParameters Object containing the serial port parameters.
*
* @throws NullPointerException if {@code port == null} or
* if {@code serialPortParameters == null}.
*
* @see SerialPortParameters
*/
public XBeeDevice(String port, SerialPortParameters serialPortParameters) {
super(port, serialPortParameters);
}
/**
* Class constructor. Instantiates a new {@code XBeeDevice} object with the
* given connection interface.
*
* @param connectionInterface The connection interface with the physical
* XBee device.
*
* @throws NullPointerException if {@code connectionInterface == null}.
*
* @see IConnectionInterface
*/
public XBeeDevice(IConnectionInterface connectionInterface) {
super(connectionInterface);
}
/**
* Opens the connection interface associated with this XBee device.
*
* @throws XBeeException if there is any problem opening the device.
* @throws InterfaceAlreadyOpenException if the device is already open.
*
* @see #isOpen()
* @see #close()
*/
public void open() throws XBeeException {
logger.info(toString() + "Opening the connection interface...");
// First, verify that the connection is not already open.
if (connectionInterface.isOpen())
throw new InterfaceAlreadyOpenException();
// Connect the interface.
connectionInterface.open();
logger.info(toString() + "Connection interface open.");
// Initialize the data reader.
dataReader = new DataReader(connectionInterface, operatingMode, this);
dataReader.start();
// Wait 10 milliseconds until the dataReader thread is started.
// This is because when the connection is opened immediately after
// closing it, there is sometimes a concurrency problem and the
// dataReader thread never dies.
try {
Thread.sleep(10);
} catch (InterruptedException e) {}
// Determine the operating mode of the XBee device if it is unknown.
if (operatingMode == OperatingMode.UNKNOWN)
operatingMode = determineOperatingMode();
// Check if the operating mode is a valid and supported one.
if (operatingMode == OperatingMode.UNKNOWN) {
close();
throw new InvalidOperatingModeException("Could not determine operating mode.");
} else if (operatingMode == OperatingMode.AT) {
close();
throw new InvalidOperatingModeException(operatingMode);
}
// Read the device info (obtain its parameters and protocol).
readDeviceInfo();
}
/**
* Closes the connection interface associated with this XBee device.
*
* @see #isOpen()
* @see #open()
*/
public void close() {
// Stop XBee reader.
if (dataReader != null && dataReader.isRunning())
dataReader.stopReader();
// Close interface.
connectionInterface.close();
logger.info(toString() + "Connection interface closed.");
}
/**
* Retrieves whether or not the connection interface associated to the
* device is open.
*
* @return {@code true} if the interface is open, {@code false} otherwise.
*
* @see #open()
* @see #close()
*/
public boolean isOpen() {
if (connectionInterface != null)
return connectionInterface.isOpen();
return false;
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#isRemote()
*/
@Override
public boolean isRemote() {
return false;
}
/**
* Returns the network associated with the device.
*
* @return The XBee network of the device.
*
* @throws InterfaceNotOpenException If the device is not open.
*
* @see XBeeNetwork
*/
public XBeeNetwork getNetwork() {
if (!isOpen())
throw new InterfaceNotOpenException();
if (network == null)
network = new XBeeNetwork(this);
return network;
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#getOperatingMode()
*/
@Override
public OperatingMode getOperatingMode() {
return super.getOperatingMode();
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#getNextFrameID()
*/
@Override
protected int getNextFrameID() {
return super.getNextFrameID();
}
/**
* Retrieves the configured timeout for receiving packets in synchronous
* operations.
*
* @return The current receive timeout in milliseconds.
*
* @see #setReceiveTimeout(int)
*/
public int getReceiveTimeout() {
return receiveTimeout;
}
public void setReceiveTimeout(int receiveTimeout) {
if (receiveTimeout < 0)
throw new IllegalArgumentException("Receive timeout cannot be less than 0.");
this.receiveTimeout = receiveTimeout;
}
/**
* Determines the operating mode of the XBee device.
*
* @return The operating mode of the XBee device.
*
* @throws OperationNotSupportedException if the packet is being sent from
* a remote device.
* @throws InterfaceNotOpenException if the device is not open.
*
* @see OperatingMode
*/
protected OperatingMode determineOperatingMode() throws OperationNotSupportedException {
try {
// Check if device is in API or API Escaped operating modes.
operatingMode = OperatingMode.API;
dataReader.setXBeeReaderMode(operatingMode);
ATCommandResponse response = sendATCommand(new ATCommand("AP"));
if (response.getResponse() != null && response.getResponse().length > 0) {
if (response.getResponse()[0] != OperatingMode.API.getID())
operatingMode = OperatingMode.API_ESCAPE;
logger.debug(toString() + "Using {}.", operatingMode.getName());
return operatingMode;
}
} catch (TimeoutException e) {
// Check if device is in AT operating mode.
operatingMode = OperatingMode.AT;
dataReader.setXBeeReaderMode(operatingMode);
try {
// It is necessary to wait at least 1 second to enter in command mode after
// sending any data to the device.
Thread.sleep(TIMEOUT_BEFORE_COMMAND_MODE);
// Try to enter in AT command mode, if so the module is in AT mode.
boolean success = enterATCommandMode();
if (success)
return OperatingMode.AT;
} catch (TimeoutException e1) {
logger.error(e1.getMessage(), e1);
} catch (InvalidOperatingModeException e1) {
logger.error(e1.getMessage(), e1);
} catch (InterruptedException e1) {
logger.error(e1.getMessage(), e1);
}
} catch (InvalidOperatingModeException e) {
logger.error("Invalid operating mode", e);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return OperatingMode.UNKNOWN;
}
/**
* Attempts to put the device in AT Command mode. Only valid if device is
* working in AT mode.
*
* @return {@code true} if the device entered in AT command mode,
* {@code false} otherwise.
*
* @throws InvalidOperatingModeException if the operating mode cannot be
* determined or is not supported.
* @throws TimeoutException if the configured time expires.
* @throws InterfaceNotOpenException if the device is not open.
*/
private boolean enterATCommandMode() throws InvalidOperatingModeException, TimeoutException {
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
if (operatingMode != OperatingMode.AT)
throw new InvalidOperatingModeException("Invalid mode. Command mode can be only accessed while in AT mode.");
// Enter in AT command mode (send '+++'). The process waits 1,5 seconds for the 'OK\n'.
byte[] readData = new byte[256];
try {
// Send the command mode sequence.
connectionInterface.writeData(COMMAND_MODE_CHAR.getBytes());
connectionInterface.writeData(COMMAND_MODE_CHAR.getBytes());
connectionInterface.writeData(COMMAND_MODE_CHAR.getBytes());
// Wait some time to let the module generate a response.
Thread.sleep(TIMEOUT_ENTER_COMMAND_MODE);
// Read data from the device (it should answer with 'OK\r').
int readBytes = connectionInterface.readData(readData);
if (readBytes < COMMAND_MODE_OK.length())
throw new TimeoutException();
// Check if the read data is 'OK\r'.
String readString = new String(readData, 0, readBytes);
if (!readString.contains(COMMAND_MODE_OK))
return false;
// Read data was 'OK\r'.
return true;
} catch (IOException e) {
logger.error(e.getMessage(), e);
} catch (InterruptedException e) {
logger.error(e.getMessage(), e);
}
return false;
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#startListeningForPackets(com.digi.xbee.api.listeners.IPacketReceiveListener)
*/
@Override
public void startListeningForPackets(IPacketReceiveListener listener) {
super.startListeningForPackets(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#stopListeningForPackets(com.digi.xbee.api.listeners.IPacketReceiveListener)
*/
@Override
public void stopListeningForPackets(IPacketReceiveListener listener) {
super.stopListeningForPackets(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#startListeningForSerialData(com.digi.xbee.api.listeners.ISerialDataReceiveListener)
*/
@Override
public void startListeningForSerialData(ISerialDataReceiveListener listener) {
super.startListeningForSerialData(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#stopListeningForSerialData(com.digi.xbee.api.listeners.ISerialDataReceiveListener)
*/
@Override
public void stopListeningForSerialData(ISerialDataReceiveListener listener) {
super.stopListeningForSerialData(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#startListeningForIOSamples(com.digi.xbee.api.listeners.IIOSampleReceiveListener)
*/
@Override
public void startListeningForIOSamples(IIOSampleReceiveListener listener) {
super.startListeningForIOSamples(listener);
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#stopListeningForIOSamples(com.digi.xbee.api.listeners.IIOSampleReceiveListener)
*/
@Override
public void stopListeningForIOSamples(IIOSampleReceiveListener listener) {
super.stopListeningForIOSamples(listener);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-bit address asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param address The 64-bit address of the XBee that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if there is any XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
*/
protected void sendSerialDataAsync(XBee64BitAddress address, byte[] data) throws XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address == null)
throw new NullPointerException("Address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.debug(toString() + "Sending serial data asynchronously to {} >> {}.", address, HexUtils.prettyHexString(data));
XBeePacket xbeePacket;
switch (getXBeeProtocol()) {
case RAW_802_15_4:
xbeePacket = new TX64Packet(getNextFrameID(), address, XBeeTransmitOptions.NONE, data);
break;
default:
xbeePacket = new TransmitPacket(getNextFrameID(), address, XBee16BitAddress.UNKNOWN_ADDRESS, 0, XBeeTransmitOptions.NONE, data);
}
sendAndCheckXBeePacket(xbeePacket, true);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-Bit/16-Bit address asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param address64Bit The 64-bit address of the XBee that will receive the
* data.
* @param address16bit The 16-bit address of the XBee that will receive the
* data. If it is unknown the
* {@code XBee16BitAddress.UNKNOWN_ADDRESS} must be used.
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if a remote device is trying to send serial data or
* if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address64Bit == null} or
* if {@code address16bit == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see XBee16BitAddress
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(XBee64BitAddress, XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
protected void sendSerialDataAsync(XBee64BitAddress address64Bit, XBee16BitAddress address16bit, byte[] data) throws XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address64Bit == null)
throw new NullPointerException("64-bit address cannot be null");
if (address16bit == null)
throw new NullPointerException("16-bit address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.debug(toString() + "Sending serial data asynchronously to {}[{}] >> {}.",
address64Bit, address16bit, HexUtils.prettyHexString(data));
XBeePacket xbeePacket = new TransmitPacket(getNextFrameID(), address64Bit, address16bit, 0, XBeeTransmitOptions.NONE, data);
sendAndCheckXBeePacket(xbeePacket, true);
}
/**
* Sends the provided data to the provided XBee device asynchronously.
*
* <p>Asynchronous transmissions do not wait for answer from the remote
* device or for transmit status packet.</p>
*
* @param xbeeDevice The XBee device of the network that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws XBeeException if there is any XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code xbeeDevice == null} or
* if {@code data == null}.
*
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
*/
public void sendSerialDataAsync(RemoteXBeeDevice xbeeDevice, byte[] data) throws XBeeException {
if (xbeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null");
sendSerialDataAsync(xbeeDevice.get64BitAddress(), data);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-bit address.
*
* <p>This method blocks till a success or error response arrives or the
* configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendSerialData(XBee64BitAddress, byte[])}.</p>
*
* @param address The 64-bit address of the XBee that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
protected void sendSerialData(XBee64BitAddress address, byte[] data) throws TimeoutException, XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address == null)
throw new NullPointerException("Address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.debug(toString() + "Sending serial data to {} >> {}.", address, HexUtils.prettyHexString(data));
XBeePacket xbeePacket;
switch (getXBeeProtocol()) {
case RAW_802_15_4:
xbeePacket = new TX64Packet(getNextFrameID(), address, XBeeTransmitOptions.NONE, data);
break;
default:
xbeePacket = new TransmitPacket(getNextFrameID(), address, XBee16BitAddress.UNKNOWN_ADDRESS, 0, XBeeTransmitOptions.NONE, data);
}
sendAndCheckXBeePacket(xbeePacket, false);
}
/**
* Sends the provided data to the XBee device of the network corresponding
* to the given 64-Bit/16-Bit address.
*
* <p>This method blocks till a success or error response arrives or the
* configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendSerialData(XBee16BitAddress, byte[])}.</p>
*
* @param address64Bit The 64-bit address of the XBee that will receive the
* data.
* @param address16bit The 16-bit address of the XBee that will receive the
* data. If it is unknown the
* {@code XBee16BitAddress.UNKNOWN_ADDRESS} must be used.
* @param data Byte array containing data to be sent.
*
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if a remote device is trying to send serial data or
* if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code address64Bit == null} or
* if {@code address16bit == null} or
* if {@code data == null}.
*
* @see XBee64BitAddress
* @see XBee16BitAddress
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialData(AbstractXBeeDevice, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
protected void sendSerialData(XBee64BitAddress address64Bit, XBee16BitAddress address16bit, byte[] data) throws TimeoutException, XBeeException {
// Verify the parameters are not null, if they are null, throw an exception.
if (address64Bit == null)
throw new NullPointerException("64-bit address cannot be null");
if (address16bit == null)
throw new NullPointerException("16-bit address cannot be null");
if (data == null)
throw new NullPointerException("Data cannot be null");
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
// Check if device is remote.
if (isRemote())
throw new OperationNotSupportedException("Cannot send data to a remote device from a remote device.");
logger.debug(toString() + "Sending serial data to {}[{}] >> {}.",
address64Bit, address16bit, HexUtils.prettyHexString(data));
XBeePacket xbeePacket = new TransmitPacket(getNextFrameID(), address64Bit, address16bit, 0, XBeeTransmitOptions.NONE, data);
sendAndCheckXBeePacket(xbeePacket, false);
}
/**
* Sends the provided data to the given XBee device choosing the optimal send method
* depending on the protocol of the local XBee device.
*
* <p>This method blocks till a success or error response arrives or the
* configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>For non-blocking operations use the method
* {@link #sendSerialDataAsync(AbstractXBeeDevice, byte[])}.</p>
*
* @param xbeeDevice The XBee device of the network that will receive the data.
* @param data Byte array containing data to be sent.
*
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if there is any other XBee related exception.
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code xbeeDevice == null} or
* if {@code data == null}.
*
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
* @see #sendSerialData(XBee64BitAddress, byte[])
* @see #sendSerialData(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(XBee64BitAddress, byte[])
* @see #sendSerialDataAsync(XBee16BitAddress, byte[])
* @see #sendSerialDataAsync(AbstractXBeeDevice, byte[])
*/
public void sendSerialData(RemoteXBeeDevice xbeeDevice, byte[] data) throws TimeoutException, XBeeException {
if (xbeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null");
switch (getXBeeProtocol()) {
case ZIGBEE:
case DIGI_POINT:
if (xbeeDevice.get64BitAddress() != null && xbeeDevice.get16BitAddress() != null)
sendSerialData(xbeeDevice.get64BitAddress(), xbeeDevice.get16BitAddress(), data);
else
sendSerialData(xbeeDevice.get64BitAddress(), data);
break;
case RAW_802_15_4:
if (this instanceof Raw802Device) {
if (xbeeDevice.get64BitAddress() != null)
((Raw802Device)this).sendSerialData(xbeeDevice.get64BitAddress(), data);
else
((Raw802Device)this).sendSerialData(xbeeDevice.get16BitAddress(), data);
} else
sendSerialData(xbeeDevice.get64BitAddress(), data);
break;
case DIGI_MESH:
default:
sendSerialData(xbeeDevice.get64BitAddress(), data);
}
}
/**
* Sends the provided data to all the XBee nodes of the network (broadcast).
*
* <p>This method blocks till a success or error transmit status arrives or
* the configured receive timeout expires.</p>
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* @param data Byte array containing data to be sent.
*
* @throws NullPointerException if {@code data == null}.
* @throws InterfaceNotOpenException if the device is not open.
* @throws TimeoutException if there is a timeout sending the serial data.
* @throws XBeeException if there is any other XBee related exception.
*
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
*/
public void sendBroadcastSerialData(byte[] data) throws TimeoutException, XBeeException {
sendSerialData(XBee64BitAddress.BROADCAST_ADDRESS, data);
}
/**
* Sends the given XBee packet and registers the given packet listener
* (if not {@code null}) to wait for an answer.
*
* @param packet XBee packet to be sent.
* @param packetReceiveListener Listener for the operation, {@code null}
* not to be notified when the answer arrives.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code packet == null}.
* @throws XBeeException if there is any other XBee related exception.
*
* @see XBeePacket
* @see IPacketReceiveListener
* @see #sendXBeePacket(XBeePacket)
* @see #sendXBeePacketAsync(XBeePacket)
*/
public void sendPacket(XBeePacket packet, IPacketReceiveListener packetReceiveListener) throws XBeeException {
try {
sendXBeePacket(packet, packetReceiveListener);
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
}
/**
* Sends the given XBee packet asynchronously.
*
* <p>To be notified when the answer is received, use
* {@link #sendXBeePacket(XBeePacket, IPacketReceiveListener)}.</p>
*
* @param packet XBee packet to be sent asynchronously.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code packet == null}.
* @throws XBeeException if there is any other XBee related exception.
*
* @see XBeePacket
* @see #sendXBeePacket(XBeePacket)
* @see #sendXBeePacket(XBeePacket, IPacketReceiveListener)
*/
public void sendPacketAsync(XBeePacket packet) throws XBeeException {
try {
super.sendXBeePacket(packet, null);
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
}
/**
* Sends the given XBee packet synchronously and blocks until the response
* is received or the configured receive timeout expires.
*
* <p>The received timeout is configured using the {@code setReceiveTimeout}
* method and can be consulted with {@code getReceiveTimeout} method.</p>
*
* <p>Use {@link #sendXBeePacketAsync(XBeePacket)} for non-blocking
* operations.</p>
*
* @param packet XBee packet to be sent.
*
* @return An {@code XBeePacket} object containing the response of the sent
* packet or {@code null} if there is no response.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code packet == null}.
* @throws TimeoutException if there is a timeout sending the XBee packet.
* @throws XBeeException if there is any other XBee related exception.
*
* @see XBeePacket
* @see #sendXBeePacket(XBeePacket, IPacketReceiveListener)
* @see #sendXBeePacketAsync(XBeePacket)
* @see #setReceiveTimeout(int)
* @see #getReceiveTimeout()
*/
public XBeePacket sendPacket(XBeePacket packet) throws TimeoutException, XBeeException {
try {
return super.sendXBeePacket(packet);
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
}
/**
* Waits until a Modem Status packet with status 0x00 (hardware reset) or
* 0x01 (Watchdog timer reset) is received or the timeout is reached.
*
* @return True if the Modem Status packet is received, false otherwise.
*/
private boolean waitForModemStatusPacket() {
modemStatusReceived = false;
startListeningForPackets(modemStatusListener);
synchronized (resetLock) {
try {
resetLock.wait(TIMEOUT_RESET);
} catch (InterruptedException e) { }
}
stopListeningForPackets(modemStatusListener);
return modemStatusReceived;
}
/**
* Custom listener for Modem Status packets.
*
* <p>When a Modem Status packet is received with status 0x00 or 0x01, it
* notifies the object that was waiting for the reception.</p>
*/
private IPacketReceiveListener modemStatusListener = new IPacketReceiveListener() {
/*
* (non-Javadoc)
* @see com.digi.xbee.api.listeners.IPacketReceiveListener#packetReceived(com.digi.xbee.api.packet.XBeePacket)
*/
public void packetReceived(XBeePacket receivedPacket) {
// Discard non API packets.
if (!(receivedPacket instanceof XBeeAPIPacket))
return;
byte[] hardwareReset = new byte[] {(byte) 0x8A, 0x00};
byte[] watchdogTimerReset = new byte[] {(byte) 0x8A, 0x01};
if (Arrays.equals(receivedPacket.getPacketData(), hardwareReset) ||
Arrays.equals(receivedPacket.getPacketData(), watchdogTimerReset)) {
modemStatusReceived = true;
// Continue execution by notifying the lock object.
synchronized (resetLock) {
resetLock.notify();
}
}
}
};
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#reset()
*/
@Override
public void reset() throws TimeoutException, XBeeException {
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
logger.info(toString() + "Resetting the local module...");
ATCommandResponse response = null;
try {
response = sendATCommand(new ATCommand("FR"));
} catch (IOException e) {
throw new XBeeException("Error writing in the communication interface.", e);
}
// Check if AT Command response is valid.
checkATCommandResponseIsValid(response);
// Wait for a Modem Status packet.
if (!waitForModemStatusPacket())
throw new TimeoutException("Timeout waiting for the Modem Status packet.");
logger.info(toString() + "Module reset successfully.");
}
/**
* Retrieves an XBee Message object received by the local XBee device and
* containing the data and the source address of the node that sent the
* data.
*
* <p>The method will try to read (receive) a data packet during the configured
* receive timeout.</p>
*
* @return An XBee Message object containing the data and the source address
* of the node that sent the data. Null if the local device didn't
* receive a data packet during the configured receive timeout.
*
* @throws InterfaceNotOpenException if the device is not open.
*
* @see XBeeMessage
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
*/
public XBeeMessage readData() {
return readDataPacket(null, TIMEOUT_READ_PACKET);
}
public XBeeMessage readData(int timeout) {
if (timeout < 0)
throw new IllegalArgumentException("Read timeout must be 0 or greater.");
return readDataPacket(null, timeout);
}
/**
* Retrieves an XBee Message object received by the local XBee device that was
* sent by the provided remote XBee device. The XBee Message contains the data
* and the source address of the node that sent the data.
*
* <p>The method will try to read (receive) a data packet from the provided
* remote device during the configured receive timeout.</p>
*
* @param remoteXBeeDevice The remote device to get a data packet from.
* @return An XBee Message object containing the data and the source address
* of the node that sent the data. Null if the local device didn't
* receive a data packet from the remote XBee device during the
* configured receive timeout.
*
* @throws InterfaceNotOpenException if the device is not open.
* @throws NullPointerException if {@code remoteXBeeDevice == null}.
*
* @see XBeeMessage
* @see RemoteXBeeDevice
* @see #getReceiveTimeout()
* @see #setReceiveTimeout(int)
*/
public XBeeMessage readDataFrom(RemoteXBeeDevice remoteXBeeDevice) {
if (remoteXBeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null.");
return readDataPacket(remoteXBeeDevice, TIMEOUT_READ_PACKET);
}
public XBeeMessage readDataFrom(RemoteXBeeDevice remoteXBeeDevice, int timeout) {
if (remoteXBeeDevice == null)
throw new NullPointerException("Remote XBee device cannot be null.");
if (timeout < 0)
throw new IllegalArgumentException("Read timeout must be 0 or greater.");
return readDataPacket(remoteXBeeDevice, timeout);
}
/**
* Retrieves an XBee Message object received by the local XBee device. The
* XBee Message contains the data and the source address of the node that
* sent the data. Depending on if the provided remote XBee device is null
* or not, the method will get the first data packet read from any remote
* XBee device or from the provided one.
*
* <p>The method will try to read (receive) a data packet from the provided
* remote device or any other device during the provided timeout.</p>
*
* @param remoteXBeeDevice The remote device to get a data packet from. Null to
* read a data packet sent by any remote XBee device.
* @param timeout The time to wait for a data packet in milliseconds.
* @return An XBee Message object containing the data and the source address
* of the node that sent the data.
*
* @throws InterfaceNotOpenException if the device is not open.
*
* @see XBeeMessage
* @see RemoteXBeeDevice
*/
private XBeeMessage readDataPacket(RemoteXBeeDevice remoteXBeeDevice, int timeout) {
// Check connection.
if (!connectionInterface.isOpen())
throw new InterfaceNotOpenException();
XBeePacketsQueue xbeePacketsQueue = dataReader.getXBeePacketsQueue();
XBeePacket xbeePacket = null;
if (remoteXBeeDevice != null)
xbeePacket = xbeePacketsQueue.getFirstDataPacketFrom(remoteXBeeDevice, timeout);
else
xbeePacket = xbeePacketsQueue.getFirstDataPacket(timeout);
if (xbeePacket == null)
return null;
// Obtain the source address and data from the packet.
RemoteXBeeDevice remoteDevice;
byte[] data;
APIFrameType packetType = ((XBeeAPIPacket)xbeePacket).getFrameType();
switch (packetType) {
case RECEIVE_PACKET:
remoteDevice = new RemoteXBeeDevice(this, ((ReceivePacket)xbeePacket).get64bitSourceAddress());
data = ((ReceivePacket)xbeePacket).getRFData();
break;
case RX_16:
remoteDevice = new RemoteRaw802Device(this, ((RX16Packet)xbeePacket).get16bitSourceAddress());
data = ((RX16Packet)xbeePacket).getRFData();
break;
case RX_64:
remoteDevice = new RemoteXBeeDevice(this, ((RX64Packet)xbeePacket).get64bitSourceAddress());
data = ((RX64Packet)xbeePacket).getRFData();
break;
default:
return null;
}
// TODO: The remote XBee device should be retrieved from the XBee Network (contained
// in the xbeeDevice variable). If the network does not contain such remote device,
// then it should be instantiated and added there.
// Create and return the XBee message.
return new XBeeMessage(remoteDevice, data, ((XBeeAPIPacket)xbeePacket).isBroadcast());
}
/*
* (non-Javadoc)
* @see com.digi.xbee.api.AbstractXBeeDevice#toString()
*/
@Override
public String toString() {
String id = getNodeID();
if (id == null)
id = "";
String addr64 = get64BitAddress() == null ? "" : get64BitAddress().toString();
if (id.length() == 0 && addr64.length() == 0)
return super.toString();
StringBuilder message = new StringBuilder(super.toString());
message.append(addr64);
if (id.length() > 0) {
message.append(" (");
message.append(id);
message.append(")");
}
message.append(" - ");
return message.toString();
}
}
|
package me.lucaspickering.terraingen.world.generate;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import me.lucaspickering.terraingen.util.Direction;
import me.lucaspickering.terraingen.util.Funcs;
import me.lucaspickering.terraingen.util.IntRange;
import me.lucaspickering.terraingen.world.Biome;
import me.lucaspickering.terraingen.world.Tile;
import me.lucaspickering.terraingen.world.World;
import me.lucaspickering.terraingen.world.util.Cluster;
import me.lucaspickering.terraingen.world.util.TileMap;
import me.lucaspickering.terraingen.world.util.TileSet;
public class ContinentGenerator implements Generator {
// Range of number of continents to generate
private static final IntRange CONTINENT_COUNT_RANGE = new IntRange(10, 20);
// the range that a continent's target size can be in. Note that continents may end up being
// smaller than the minimum of this range, if there aren't enough tiles to make them bigger.
private static final IntRange CONTINENT_SIZE_RANGE = new IntRange(100, 1000);
// Average size of each biome
private static final int AVERAGE_BIOME_SIZE = 10;
// The biomes that we can paint in this routine, and the relative chance that each one will
// be selected
public static Map<Biome, Integer> BIOME_WEIGHTS;
// Initialize all the weights
static {
try {
BIOME_WEIGHTS = new EnumMap<>(Biome.class);
BIOME_WEIGHTS.put(Biome.PLAINS, 10);
BIOME_WEIGHTS.put(Biome.FOREST, 10);
BIOME_WEIGHTS.put(Biome.DESERT, 2);
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
private World world; // The world being operated on
private Random random;
private TileSet unassignedTiles; // All tiles that aren't currently assigned to a continent
@Override
public void generate(World world, Random random) {
this.world = world;
this.random = random;
this.unassignedTiles = new TileSet(world.getTiles()); // Make a copy so we can modify it
// Cluster tiles into continents
generateContinents();
// Adjust elevation to create oceans/coasts
generateOceanFloor();
// Paint biomes onto each continent
world.getContinents().forEach(c -> paintContinent(c, random));
}
/**
* Clusters together tiles to create a random number of continents. The generated continents
* will be added to {@link #world}.
*/
private void generateContinents() {
final int numToGenerate = CONTINENT_COUNT_RANGE.randomIn(random);
// While we haven't hit our target number and there are enough tiles left,
// generate a new continent
while (world.getContinents().size() < numToGenerate
&& unassignedTiles.size() >= CONTINENT_SIZE_RANGE.min()) {
final Cluster continent = generateContinent();
// If the continent is null, that means that it was generated, but merged into
// another continent that is already in the list.
if (continent != null) {
world.getContinents().add(continent);
}
}
// Re-cluster the continents to join any continents that connected to each other
reclusterContinents();
cleanupContinents();
}
/**
* Generates a single continent from the given collection of available tiles.
*
* @return the generated continent
*/
private Cluster generateContinent() {
final Cluster continent = Cluster.fromWorld(world.getTiles()); // The continent
final int targetSize = CONTINENT_SIZE_RANGE.randomIn(random); // Pick a target size
// Pick a random seed, add it to the continent, and remove it from the pool
final Tile seed = Funcs.randomFromCollection(random, unassignedTiles);
addToContinent(seed, continent);
// Keep adding until we hit our target size
while (continent.size() < targetSize) {
// If a tile is adjacent to any tile in the continent, it becomes a candidate
final TileSet candidates = continent.allAdjacents();
candidates.retainAll(unassignedTiles); // Filter out tiles that aren't available
// No candidates, done with this continent
if (candidates.isEmpty()) {
break;
}
// Pick a random tile adjacent to the continent and add it in
final Tile nextTile = Funcs.randomFromCollection(random, candidates);
addToContinent(nextTile, continent);
}
assert !continent.isEmpty(); // At least one tile should have been added
return continent;
}
private List<Cluster> reclusterContinents() {
final TileSet allTiles = new TileSet();
for (Cluster continent : world.getContinents()) {
allTiles.addAll(continent);
}
final List<Cluster> newContinents = allTiles.cluster();
for (Cluster continent : newContinents) {
for (Tile tile : continent) {
world.getTilesToContinents().put(tile, continent);
}
}
return newContinents;
}
/**
* "Cleans up" all continents in the world. This fixes errors/imperfections such as unassigned
* tiles inside of continents and long strings of land that look strange.
*/
private void cleanupContinents() {
// Cluster the negative tilesK
final List<Cluster> nonContinentClusters = unassignedTiles.cluster();
// Fill in the "holes" in each continent, i.e. find all clusters that are entirely inside
// one continent, and add them into that continent.
for (Cluster nonContinentCluster : nonContinentClusters) {
// If the cluster is small enough that it won't become an ocean, check if its inside
// one continent. This is just an optimization, as extremely large clusters are
// all but guaranteed to not be entirely inside one continent. Skipping them saves time.
if (nonContinentCluster.size() < WaterPainter.MIN_OCEAN_SIZE) {
// Copy the cluster so that it exists in the context of the entire world, then
// check if it is entirely within one continent.
final Cluster copiedCluster = Cluster.copyToWorld(world.getTiles(),
nonContinentCluster);
final Cluster surroundingContinent = getSurroundingContinent(copiedCluster);
if (surroundingContinent != null) {
// Add the cluster to the continent that completely surrounds it
for (Tile tile : nonContinentCluster) {
addToContinent(tile, surroundingContinent);
}
}
}
}
// Smooth each continent
for (Cluster continent : world.getContinents()) {
smoothCoast(continent);
}
}
private Cluster getSurroundingContinent(Cluster cluster) {
// Find out if all tiles adjacent to this cluster are in the same continent
Cluster prevAdjContinent = null;
for (Tile adjTile : cluster.allAdjacents()) {
final Cluster adjContinent = world.getTilesToContinents().get(adjTile);
// This shouldn't happen, because if this cluster is adjacent to a non-continent tile,
// then that tile should be in this cluster instead.
if (adjContinent == null) {
throw new IllegalStateException("Continent tile is missing from the map");
}
// If the previous continent hasn't been set yet, do that now. Otherwise, check if
// this tile has the same continent as the previous (== is correct because they should
// have the exact same Cluster object).
if (prevAdjContinent == null) {
prevAdjContinent = adjContinent;
} else if (prevAdjContinent != adjContinent) {
// Continent mismatch, this cluster borders multiple continents, return null
return null;
}
}
// Every tile in this cluster is adjacent to prevAdjContinent, so return that as our result
return prevAdjContinent;
}
/**
* Smooth the coast of continents by removing thin bits of land that stick out.
*
* @param continent the continent to be smoothed
*/
private void smoothCoast(Cluster continent) {
for (Tile tile : continent) {
final Map<Direction, Tile> adjTiles = continent.getAdjacentTiles(tile);
// If the tile borders only 1 other tile in the continent (or none), mark it for
// removal
boolean remove = adjTiles.size() <= 1;
// If it isn't already marked for removal, check if it borders only two tiles
// that aren't adjacent to each other (i.e. check if this tile is a "bridge")
if (!remove && adjTiles.size() == 2) {
final List<Direction> dir = new ArrayList<>(adjTiles.keySet());
// Check that the two directions aren't adjacent to each otherK
if (!dir.get(0).isAdjacentTo(dir.get(1))) {
remove = true;
}
}
// If we decided to, remove the tile from the continent
if (remove) {
removeFromContinent(tile, continent);
// Let a recursive call handle the rest (we can't modify the continent then
// continue to iterate on it)
// Potential optimization? Maybe we can modify it with the iterator?
smoothCoast(continent);
return;
}
}
}
/**
* Adds the given tile to the given continent and also removes the tile from the collection of
* unassigned tiles. Assuming the tile is added to the continent, then
* {@link World#getTilesToContinents()} will be updated.
*
* @param tile the tile to be added to the continent
* @param continent the continent receiving the tile
*/
private void addToContinent(Tile tile, Cluster continent) {
final boolean added = continent.add(tile);
if (added) {
world.getTilesToContinents().put(tile, continent);
if (!unassignedTiles.remove(tile)) {
throw new IllegalStateException("Tile is not available to be added");
}
}
}
/**
* Removes the given tile from the given continent and also adds it back to the collection
* unassigned tiles. Assuming the tile is removed the continent, then
* {@link World#getTilesToContinents()} will be updated.
*
* @param tile the tile to be removed from the continent
* @param continent the continent to have its tile removed
*/
private void removeFromContinent(Tile tile, Cluster continent) {
final boolean removed = continent.remove(tile);
if (removed) {
world.getTilesToContinents().remove(tile);
unassignedTiles.add(tile);
}
}
private void generateOceanFloor() {
unassignedTiles.forEach(tile -> tile.setElevation(-20));
// Make all tiles adjacent to each continent shallow, so they become coast
for (Cluster continent : world.getContinents()) {
for (Tile tile : continent.allAdjacents()) {
tile.setElevation(-6);
}
}
}
/**
* "Paints" biomes onto the given continent.
*
* @param continent the continent to be painted
* @param random the {@link Random} instance to use
*/
private void paintContinent(Cluster continent, Random random) {
// Step 1 - calculate n
// Figure out how many biome biomes we want
// n = number of tiles / average size of blotch
// Step 2 - select seeds
// Pick n tiles to be "seed tiles", i.e. the first tiles of their respective biomes.
// The seeds have a minimum spacing from each other, which is enforced now.
// Step 3 - grow seeds
// Each blotch will be grown from its seed to be about average size.
// By the end of this step, every tile will have been assigned.
// Iterate over each blotch, and at each iteration, add one tile to that blotch that is
// adjacent to it. Then, move onto the next blotch. Rinse and repeat until there are no
// more tiles to assign.
// Step 4 - assign the biomes
// Iterate over each blotch and select the biome for that blotch, then assign the biome for
// each tile in that blotch.
// Step 1
final int numSeeds = continent.size() / AVERAGE_BIOME_SIZE;
// Step 2
final TileSet seeds = continent.selectTiles(random, numSeeds, 0);
final TileSet unselectedTiles = new TileSet(continent); // Make a copy so we can modify it
unselectedTiles.removeAll(seeds); // We've already selected the seeds, so remove them
// Each biome, keyed by its seed
final TileMap<Cluster> biomes = new TileMap<>();
final TileSet incompleteBiomes = new TileSet(); // Biomes with room to grow
for (Tile seed : seeds) {
// Pick a biome for this seed, then add it to the map
final Cluster blotch = Cluster.fromWorld(continent);
blotch.add(seed);
biomes.put(seed, blotch);
incompleteBiomes.add(seed);
}
// Step 3 (the hard part)
// While there are tiles left to assign...
while (!unselectedTiles.isEmpty() && !incompleteBiomes.isEmpty()) {
// Pick a seed that still has openings to work from
final Tile seed = Funcs.randomFromCollection(random, incompleteBiomes);
final Cluster biome = biomes.get(seed); // The biome grown from that seed
final TileSet adjTiles = biome.allAdjacents(); // All tiles adjacent to this biome
adjTiles.retainAll(unselectedTiles); // Remove tiles that are already in a biome
if (adjTiles.isEmpty()) {
// We've run out of ways to expand this biome, so consider it complete
incompleteBiomes.remove(seed);
continue;
}
// Pick one of those unassigned adjacent tiles, and add it to this biome
final Tile tile = Funcs.randomFromCollection(random, adjTiles);
biome.add(tile);
unselectedTiles.remove(tile);
}
// Step 4
// Pick a biome for each cluster, using weighted chance as defined in BIOME_WEIGHTS
for (Cluster blotch : biomes.values()) {
final Biome biome = Funcs.randomFromCollectionWeighted(random,
BIOME_WEIGHTS.keySet(),
BIOME_WEIGHTS::get);
blotch.forEach(tile -> tile.setBiome(biome)); // Set the biome for each tile
}
}
}
|
package net.ossrs.yasea;
import android.media.AudioRecord;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AutomaticGainControl;
import com.github.faucamp.simplertmp.RtmpHandler;
import com.seu.magicfilter.utils.MagicFilterType;
import java.io.File;
public class SrsPublisher {
private static AudioRecord mic;
private static AcousticEchoCanceler aec;
private static AutomaticGainControl agc;
private byte[] mPcmBuffer = new byte[4096];
private boolean aloop = false;
private Thread aworker;
private SrsCameraView mCameraView;
private boolean sendAudioOnly = false;
private int videoFrameCount;
private long lastTimeMillis;
private double mSamplingFps;
private SrsFlvMuxer mFlvMuxer;
private SrsMp4Muxer mMp4Muxer;
private SrsEncoder mEncoder;
public SrsPublisher(SrsCameraView view) {
mCameraView = view;
mCameraView.setPreviewCallback(new SrsCameraView.PreviewCallback() {
@Override
public void onGetRgbaFrame(byte[] data, int width, int height) {
calcSamplingFps();
if (!sendAudioOnly) {
mEncoder.onGetRgbaFrame(data, width, height);
}
}
});
}
private void calcSamplingFps() {
// Calculate sampling FPS
if (videoFrameCount == 0) {
lastTimeMillis = System.nanoTime() / 1000000;
videoFrameCount++;
} else {
if (++videoFrameCount >= SrsEncoder.VGOP) {
long diffTimeMillis = System.nanoTime() / 1000000 - lastTimeMillis;
mSamplingFps = (double) videoFrameCount * 1000 / diffTimeMillis;
videoFrameCount = 0;
}
}
}
public void startCamera() {
mCameraView.startCamera();
}
public void stopCamera() {
mCameraView.stopCamera();
}
public void startEncode() {
if (!mEncoder.start()) {
return;
}
mic = mEncoder.chooseAudioRecord();
if (mic == null) {
return;
}
if (AcousticEchoCanceler.isAvailable()) {
aec = AcousticEchoCanceler.create(mic.getAudioSessionId());
if (aec != null) {
aec.setEnabled(true);
}
}
if (AutomaticGainControl.isAvailable()) {
agc = AutomaticGainControl.create(mic.getAudioSessionId());
if (agc != null) {
agc.setEnabled(true);
}
}
mCameraView.enableEncoding();
aworker = new Thread(new Runnable() {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
startAudio();
}
});
aloop = true;
aworker.start();
}
public void stopEncode() {
stopAudio();
stopCamera();
mEncoder.stop();
}
public void startPublish(String rtmpUrl) {
if (mFlvMuxer != null) {
mFlvMuxer.start(rtmpUrl);
mFlvMuxer.setVideoResolution(mEncoder.getOutputWidth(), mEncoder.getOutputHeight());
startEncode();
}
}
public void stopPublish() {
if (mFlvMuxer != null) {
stopEncode();
mFlvMuxer.stop();
}
}
public boolean startRecord(String recPath) {
return mMp4Muxer != null && mMp4Muxer.record(new File(recPath));
}
public void stopRecord() {
if (mMp4Muxer != null) {
mMp4Muxer.stop();
}
}
public void pauseRecord() {
if (mMp4Muxer != null) {
mMp4Muxer.pause();
}
}
public void resumeRecord() {
if (mMp4Muxer != null) {
mMp4Muxer.resume();
}
}
public void switchToSoftEncoder() {
mEncoder.switchToSoftEncoder();
}
public void switchToHardEncoder() {
mEncoder.switchToHardEncoder();
}
public boolean isSoftEncoder() {
return mEncoder.isSoftEncoder();
}
public int getPreviewWidth() {
return mEncoder.getPreviewWidth();
}
public int getPreviewHeight() {
return mEncoder.getPreviewHeight();
}
public double getmSamplingFps() {
return mSamplingFps;
}
public int getCamraId() {
return mCameraView.getCameraId();
}
public void setPreviewResolution(int width, int height) {
int resolution[] = mCameraView.setPreviewResolution(width, height);
mEncoder.setPreviewResolution(resolution[0], resolution[1]);
}
public void setOutputResolution(int width, int height) {
if (width <= height) {
mEncoder.setPortraitResolution(width, height);
} else {
mEncoder.setLandscapeResolution(width, height);
}
}
public void setScreenOrientation(int orientation) {
mCameraView.setPreviewOrientation(orientation);
mEncoder.setScreenOrientation(orientation);
}
public void setVideoHDMode() {
mEncoder.setVideoHDMode();
}
public void setVideoSmoothMode() {
mEncoder.setVideoSmoothMode();
}
public void setSendAudioOnly(boolean flag) {
sendAudioOnly = flag;
}
public boolean switchCameraFilter(MagicFilterType type) {
return mCameraView.setFilter(type);
}
public void switchCameraFace(int id) {
if (mEncoder.isEnabled()) {
mCameraView.stopCamera();
mCameraView.setCameraId(id);
if (id == 0) {
mEncoder.setCameraBackFace();
} else {
mEncoder.setCameraFrontFace();
}
mCameraView.enableEncoding();
mCameraView.startCamera();
}
}
private void startAudio() {
if (mic != null) {
mic.startRecording();
while (aloop && !Thread.interrupted()) {
int size = mic.read(mPcmBuffer, 0, mPcmBuffer.length);
if (size <= 0) {
break;
}
mEncoder.onGetPcmFrame(mPcmBuffer, size);
}
}
}
private void stopAudio() {
aloop = false;
if (aworker != null) {
aworker.interrupt();
try {
aworker.join();
} catch (InterruptedException e) {
aworker.interrupt();
}
aworker = null;
}
if (mic != null) {
mic.setRecordPositionUpdateListener(null);
mic.stop();
mic.release();
mic = null;
}
if (aec != null) {
aec.setEnabled(false);
aec.release();
aec = null;
}
if (agc != null) {
agc.setEnabled(false);
agc.release();
agc = null;
}
}
public void setRtmpHandler(RtmpHandler handler) {
mFlvMuxer = new SrsFlvMuxer(handler);
if (mEncoder != null) {
mEncoder.setFlvMuxer(mFlvMuxer);
}
}
public void setRecordHandler(SrsRecordHandler handler) {
mMp4Muxer = new SrsMp4Muxer(handler);
if (mEncoder != null) {
mEncoder.setMp4Muxer(mMp4Muxer);
}
}
public void setEncodeHandler(SrsEncodeHandler handler) {
mEncoder = new SrsEncoder(handler);
if (mFlvMuxer != null) {
mEncoder.setFlvMuxer(mFlvMuxer);
}
if (mMp4Muxer != null) {
mEncoder.setMp4Muxer(mMp4Muxer);
}
}
}
|
package microsoft.exchange.webservices.data.search.filter;
import microsoft.exchange.webservices.data.attribute.EditorBrowsable;
import microsoft.exchange.webservices.data.core.EwsServiceXmlReader;
import microsoft.exchange.webservices.data.core.EwsServiceXmlWriter;
import microsoft.exchange.webservices.data.core.XmlAttributeNames;
import microsoft.exchange.webservices.data.core.XmlElementNames;
import microsoft.exchange.webservices.data.core.enumeration.search.ComparisonMode;
import microsoft.exchange.webservices.data.core.enumeration.search.ContainmentMode;
import microsoft.exchange.webservices.data.core.enumeration.attribute.EditorBrowsableState;
import microsoft.exchange.webservices.data.core.enumeration.search.LogicalOperator;
import microsoft.exchange.webservices.data.core.enumeration.misc.XmlNamespace;
import microsoft.exchange.webservices.data.core.exception.service.local.ServiceValidationException;
import microsoft.exchange.webservices.data.core.exception.service.local.ServiceXmlDeserializationException;
import microsoft.exchange.webservices.data.core.exception.service.local.ServiceXmlSerializationException;
import microsoft.exchange.webservices.data.misc.OutParam;
import microsoft.exchange.webservices.data.property.complex.ComplexProperty;
import microsoft.exchange.webservices.data.property.complex.IComplexPropertyChangedDelegate;
import microsoft.exchange.webservices.data.property.complex.ISearchStringProvider;
import microsoft.exchange.webservices.data.property.definition.PropertyDefinitionBase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.xml.stream.XMLStreamException;
import java.util.ArrayList;
import java.util.Iterator;
/**
* Represents the base search filter class. Use descendant search filter classes
* such as SearchFilter.IsEqualTo, SearchFilter.ContainsSubstring and
* SearchFilter.SearchFilterCollection to define search filter.
*/
public abstract class SearchFilter extends ComplexProperty {
private static final Log LOG = LogFactory.getLog(SearchFilter.class);
/**
* Initializes a new instance of the SearchFilter class.
*/
protected SearchFilter() {
}
/**
* The search.
*
* @param reader the reader
* @return the search filter
* @throws Exception the exception
*/
//static SearchFilter search;
/**
* Loads from XML.
*
* @param reader the reader
* @return SearchFilter
* @throws Exception the exception
*/
public static SearchFilter loadFromXml(EwsServiceXmlReader reader)
throws Exception {
reader.ensureCurrentNodeIsStartElement();
SearchFilter searchFilter = null;
if (reader.getLocalName().equalsIgnoreCase(XmlElementNames.Exists)) {
searchFilter = new Exists();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.Contains)) {
searchFilter = new ContainsSubstring();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.Excludes)) {
searchFilter = new ExcludesBitmask();
} else if (reader.getLocalName().equalsIgnoreCase(XmlElementNames.Not)) {
searchFilter = new Not();
} else if (reader.getLocalName().equalsIgnoreCase(XmlElementNames.And)) {
searchFilter = new SearchFilterCollection(
LogicalOperator.And);
} else if (reader.getLocalName().equalsIgnoreCase(XmlElementNames.Or)) {
searchFilter = new SearchFilterCollection(
LogicalOperator.Or);
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.IsEqualTo)) {
searchFilter = new IsEqualTo();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.IsNotEqualTo)) {
searchFilter = new IsNotEqualTo();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.IsGreaterThan)) {
searchFilter = new IsGreaterThan();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.IsGreaterThanOrEqualTo)) {
searchFilter = new IsGreaterThanOrEqualTo();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.IsLessThan)) {
searchFilter = new IsLessThan();
} else if (reader.getLocalName().equalsIgnoreCase(
XmlElementNames.IsLessThanOrEqualTo)) {
searchFilter = new IsLessThanOrEqualTo();
} else {
searchFilter = null;
}
if (searchFilter != null) {
searchFilter.loadFromXml(reader, reader.getLocalName());
}
return searchFilter;
}
/**
* Gets the name of the XML element.
*
* @return the xml element name
*/
protected abstract String getXmlElementName();
/**
* Writes to XML.
*
* @param writer the writer
* @throws Exception the exception
*/
public void writeToXml(EwsServiceXmlWriter writer) throws Exception {
super.writeToXml(writer, this.getXmlElementName());
}
/**
* Represents a search filter that checks for the presence of a substring
* inside a text property. Applications can use ContainsSubstring to define
* conditions such as "Field CONTAINS Value" or
* "Field IS PREFIXED WITH Value".
*/
public static final class ContainsSubstring extends PropertyBasedFilter {
/**
* The containment mode.
*/
private ContainmentMode containmentMode = ContainmentMode.Substring;
/**
* The comparison mode.
*/
private ComparisonMode comparisonMode = ComparisonMode.IgnoreCase;
/**
* The value.
*/
private String value;
/**
* Initializes a new instance of the class.
*/
public ContainsSubstring() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value to compare with.
*/
public ContainsSubstring(PropertyDefinitionBase propertyDefinition,
String value) {
super(propertyDefinition);
this.value = value;
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value to compare with.
* @param containmentMode The containment mode.
* @param comparisonMode The comparison mode.
*/
public ContainsSubstring(PropertyDefinitionBase propertyDefinition,
String value, ContainmentMode containmentMode,
ComparisonMode comparisonMode) {
this(propertyDefinition, value);
this.containmentMode = containmentMode;
this.comparisonMode = comparisonMode;
}
/**
* validates instance.
*
* @throws ServiceValidationException the service validation exception
*/
@Override
protected void internalValidate() throws ServiceValidationException {
super.internalValidate();
if ((this.value == null) || this.value.isEmpty()) {
throw new ServiceValidationException("The Value property must be set.");
}
}
/**
* Gets the name of the XML element.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.Contains;
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return True if element was read.
* @throws Exception the exception
*/
@Override
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
boolean result = super.tryReadElementFromXml(reader);
if (!result) {
if (reader.getLocalName().equals(XmlElementNames.Constant)) {
this.value = reader
.readAttributeValue(XmlAttributeNames.Value);
result = true;
}
}
return result;
}
/**
* Reads the attribute of Xml.
*
* @param reader the reader
* @throws Exception the exception
*/
@Override
public void readAttributesFromXml(EwsServiceXmlReader reader)
throws Exception {
super.readAttributesFromXml(reader);
this.containmentMode = reader.readAttributeValue(
ContainmentMode.class, XmlAttributeNames.ContainmentMode);
try {
this.comparisonMode = reader.readAttributeValue(
ComparisonMode.class,
XmlAttributeNames.ContainmentComparison);
} catch (IllegalArgumentException ile) {
// This will happen if we receive a value that is defined in the
// EWS
// schema but that is not defined
// in the API. We map that
// value to IgnoreCaseAndNonSpacingCharacters.
this.comparisonMode = ComparisonMode.
IgnoreCaseAndNonSpacingCharacters;
}
}
/**
* Writes the attribute to XML.
*
* @param writer the writer
* @throws ServiceXmlSerializationException the service xml serialization exception
*/
@Override
public void writeAttributesToXml(EwsServiceXmlWriter writer)
throws ServiceXmlSerializationException {
super.writeAttributesToXml(writer);
writer.writeAttributeValue(XmlAttributeNames.ContainmentMode,
this.containmentMode);
writer.writeAttributeValue(XmlAttributeNames.ContainmentComparison,
this.comparisonMode);
}
/**
* Writes the elements to Xml.
*
* @param writer the writer
* @throws XMLStreamException the XML stream exception
* @throws ServiceXmlSerializationException the service xml serialization exception
*/
@Override
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws XMLStreamException, ServiceXmlSerializationException {
super.writeElementsToXml(writer);
writer.writeStartElement(XmlNamespace.Types,
XmlElementNames.Constant);
writer.writeAttributeValue(XmlAttributeNames.Value, this.value);
writer.writeEndElement(); // Constant
}
/**
* Gets the containment mode.
*
* @return ContainmentMode
*/
public ContainmentMode getContainmentMode() {
return containmentMode;
}
/**
* sets the ContainmentMode.
*
* @param containmentMode the new containment mode
*/
public void setContainmentMode(ContainmentMode containmentMode) {
this.containmentMode = containmentMode;
}
/**
* Gets the comparison mode.
*
* @return ComparisonMode
*/
public ComparisonMode getComparisonMode() {
return comparisonMode;
}
/**
* sets the comparison mode.
*
* @param comparisonMode the new comparison mode
*/
public void setComparisonMode(ComparisonMode comparisonMode) {
this.comparisonMode = comparisonMode;
}
/**
* gets the value to compare the specified property with.
*
* @return String
*/
public String getValue() {
return value;
}
/**
* sets the value to compare the specified property with.
*
* @param value the new value
*/
public void setValue(String value) {
this.value = value;
}
}
/**
* Represents a bitmask exclusion search filter. Applications can use
* ExcludesBitExcludesBitmaskFilter to define conditions such as
* "(OrdinalField and 0x0010) != 0x0010"
*/
public static class ExcludesBitmask extends PropertyBasedFilter {
/**
* The bitmask.
*/
private int bitmask;
/**
* Initializes a new instance of the class.
*/
public ExcludesBitmask() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition the property definition
* @param bitmask the bitmask
*/
public ExcludesBitmask(PropertyDefinitionBase propertyDefinition,
int bitmask) {
super(propertyDefinition);
this.bitmask = bitmask;
}
/**
* Gets the name of the XML element.
*
* @return XML element name
*/
@Override
public String getXmlElementName() {
return XmlElementNames.Excludes;
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return true if element was read
* @throws Exception the exception
*/
@Override
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
boolean result = super.tryReadElementFromXml(reader);
if (!result) {
if (reader.getLocalName().equals(XmlElementNames.Bitmask)) {
// EWS always returns the Bitmask value in hexadecimal
this.bitmask = Integer.decode(reader
.readAttributeValue(XmlAttributeNames.Value));
}
}
return result;
}
/**
* Writes the elements to XML.
*
* @param writer the writer
* @throws javax.xml.stream.XMLStreamException , ServiceXmlSerializationException
* @throws ServiceXmlSerializationException the service xml serialization exception
*/
@Override
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws XMLStreamException, ServiceXmlSerializationException {
super.writeElementsToXml(writer);
writer.writeStartElement(XmlNamespace.Types,
XmlElementNames.Bitmask);
writer.writeAttributeValue(XmlAttributeNames.Value, this.bitmask);
writer.writeEndElement(); // Bitmask
}
/**
* Gets the bitmask to compare the property with.
*
* @return bitmask
*/
public int getBitmask() {
return bitmask;
}
/**
* Sets the bitmask to compare the property with.
*
* @param bitmask the new bitmask
*/
public void setBitmask(int bitmask) {
this.bitmask = bitmask;
}
}
/**
* Represents a search filter checking if a field is set. Applications can
* use ExistsFilter to define conditions such as "Field IS SET".
*/
public static final class Exists extends PropertyBasedFilter {
/**
* Initializes a new instance of the class.
*/
public Exists() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition the property definition
*/
public Exists(PropertyDefinitionBase propertyDefinition) {
super(propertyDefinition);
}
/**
* Gets the name of the XML element.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.Exists;
}
}
/**
* Represents a search filter that checks if a property is equal to a given
* value or other property.
*/
public static class IsEqualTo extends RelationalFilter {
/**
* Initializes a new instance of the class.
*/
public IsEqualTo() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with.
*/
public IsEqualTo(PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition, otherPropertyDefinition);
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value of the property to compare with.
*/
public IsEqualTo(PropertyDefinitionBase propertyDefinition,
Object value) {
super(propertyDefinition, value);
}
/**
* Gets the name of the XML element.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.IsEqualTo;
}
}
/**
* Represents a search filter that checks if a property is greater than a
* given value or other property.
*/
public static class IsGreaterThan extends RelationalFilter {
/**
* Initializes a new instance of the class.
*/
public IsGreaterThan() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with.
*/
public IsGreaterThan(PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition, otherPropertyDefinition);
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value of the property to compare with.
*/
public IsGreaterThan(PropertyDefinitionBase propertyDefinition,
Object value) {
super(propertyDefinition, value);
}
/**
* Gets the name of the XML element.
*
* @return XML element name.
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.IsGreaterThan;
}
}
/**
* Represents a search filter that checks if a property is greater than or
* equal to a given value or other property.
*/
public static class IsGreaterThanOrEqualTo extends RelationalFilter {
/**
* Initializes a new instance of the class.
*/
public IsGreaterThanOrEqualTo() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with.
*/
public IsGreaterThanOrEqualTo(
PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition, otherPropertyDefinition);
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value of the property to compare with.
*/
public IsGreaterThanOrEqualTo(
PropertyDefinitionBase propertyDefinition, Object value) {
super(propertyDefinition, value);
}
/**
* Gets the name of the XML element. XML element name.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.IsGreaterThanOrEqualTo;
}
}
/**
* Represents a search filter that checks if a property is less than a given
* value or other property.
*/
public static class IsLessThan extends RelationalFilter {
/**
* Initializes a new instance of the class.
*/
public IsLessThan() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with.
*/
public IsLessThan(PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition, otherPropertyDefinition);
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value of the property to compare with.
*/
public IsLessThan(PropertyDefinitionBase propertyDefinition,
Object value) {
super(propertyDefinition, value);
}
/**
* Gets the name of the XML element. XML element name.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.IsLessThan;
}
}
/**
* Represents a search filter that checks if a property is less than or
* equal to a given value or other property.
*/
public static class IsLessThanOrEqualTo extends RelationalFilter {
/**
* Initializes a new instance of the class.
*/
public IsLessThanOrEqualTo() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with.
*/
public IsLessThanOrEqualTo(PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition, otherPropertyDefinition);
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value of the property to compare with.
*/
public IsLessThanOrEqualTo(PropertyDefinitionBase propertyDefinition,
Object value) {
super(propertyDefinition, value);
}
/**
* Gets the name of the XML element. XML element name.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.IsLessThanOrEqualTo;
}
}
/**
* Represents a search filter that checks if a property is not equal to a
* given value or other property.
*/
public static class IsNotEqualTo extends RelationalFilter {
/**
* Initializes a new instance of the class.
*/
public IsNotEqualTo() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with.
*/
public IsNotEqualTo(PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition, otherPropertyDefinition);
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value of the property to compare with.
*/
public IsNotEqualTo(PropertyDefinitionBase propertyDefinition,
Object value) {
super(propertyDefinition, value);
}
/**
* Gets the name of the XML element.
*
* @return XML element name.
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.IsNotEqualTo;
}
}
/**
* Represents a search filter that negates another. Applications can use
* NotFilter to define conditions such as "NOT(other filter)".
*/
public static class Not extends SearchFilter implements IComplexPropertyChangedDelegate {
/**
* The search filter.
*/
private SearchFilter searchFilter;
/**
* Initializes a new instance of the class.
*/
public Not() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param searchFilter the search filter
*/
public Not(SearchFilter searchFilter) {
super();
this.searchFilter = searchFilter;
}
/**
* Search filter changed.
*
* @param complexProperty the complex property
*/
private void searchFilterChanged(ComplexProperty complexProperty) {
this.changed();
}
/**
* validates the instance.
*
* @throws ServiceValidationException the service validation exception
*/
@Override
protected void internalValidate() throws ServiceValidationException {
if (this.searchFilter == null) {
throw new ServiceValidationException("The SearchFilter property must be set.");
}
}
/**
* Gets the name of the XML element.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return XmlElementNames.Not;
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return true if the element was read
* @throws Exception the exception
*/
@Override
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
this.searchFilter = SearchFilter.loadFromXml(reader);
return true;
}
/**
* Writes the elements to XML.
*
* @param writer the writer
* @throws Exception the exception
*/
@Override
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws Exception {
this.searchFilter.writeToXml(writer);
}
/**
* Gets the search filter to negate. Available search filter
* classes include SearchFilter.IsEqualTo,
* SearchFilter.ContainsSubstring and
* SearchFilter.SearchFilterCollection.
*
* @return SearchFilter
*/
public SearchFilter getSearchFilter() {
return searchFilter;
}
/**
* Sets the search filter to negate. Available search filter classes
* include SearchFilter.IsEqualTo, SearchFilter.ContainsSubstring and
* SearchFilter.SearchFilterCollection.
*
* @param searchFilter the new search filter
*/
public void setSearchFilter(SearchFilter searchFilter) {
if (this.searchFilter != null) {
this.searchFilter.removeChangeEvent(this);
}
if (this.canSetFieldValue(this.searchFilter, searchFilter)) {
this.searchFilter = searchFilter;
this.changed();
}
if (this.searchFilter != null) {
this.searchFilter.addOnChangeEvent(this);
}
}
/*
* (non-Javadoc)
*
* @see
* microsoft.exchange.webservices.
* ComplexPropertyChangedDelegateInterface#
* complexPropertyChanged(microsoft.exchange.webservices.ComplexProperty
* )
*/
@Override
public void complexPropertyChanged(ComplexProperty complexProperty) {
searchFilterChanged(complexProperty);
}
}
/**
* Represents a search filter where an item or folder property is involved.
*/
@EditorBrowsable(state = EditorBrowsableState.Never)
public static abstract class PropertyBasedFilter extends SearchFilter {
/**
* The property definition.
*/
private PropertyDefinitionBase propertyDefinition;
/**
* Initializes a new instance of the class.
*/
PropertyBasedFilter() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition the property definition
*/
PropertyBasedFilter(PropertyDefinitionBase propertyDefinition) {
super();
this.propertyDefinition = propertyDefinition;
}
/**
* validate instance.
*
* @throws ServiceValidationException the service validation exception
*/
@Override
protected void internalValidate() throws ServiceValidationException {
if (this.propertyDefinition == null) {
throw new ServiceValidationException("The PropertyDefinition property must be set.");
}
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return true if element was read
* @throws Exception the exception
*/
@Override
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
OutParam<PropertyDefinitionBase> outParam =
new OutParam<PropertyDefinitionBase>();
outParam.setParam(this.propertyDefinition);
return PropertyDefinitionBase.tryLoadFromXml(reader, outParam);
}
/**
* Writes the elements to XML.
*
* @param writer the writer
* @throws XMLStreamException the XML stream exception
* @throws ServiceXmlSerializationException the service xml serialization exception
*/
@Override
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws XMLStreamException, ServiceXmlSerializationException {
this.propertyDefinition.writeToXml(writer);
}
/**
* Gets the definition of the property that is involved in the search
* filter.
*
* @return propertyDefinition
*/
public PropertyDefinitionBase getPropertyDefinition() {
return this.propertyDefinition;
}
/**
* Sets the definition of the property that is involved in the search
* filter.
*
* @param propertyDefinition the new property definition
*/
public void setPropertyDefinition(
PropertyDefinitionBase propertyDefinition) {
this.propertyDefinition = propertyDefinition;
}
}
/**
* Represents the base class for relational filter (for example, IsEqualTo,
* IsGreaterThan or IsLessThanOrEqualTo).
*/
@EditorBrowsable(state = EditorBrowsableState.Never)
public abstract static class RelationalFilter extends PropertyBasedFilter {
/**
* The other property definition.
*/
private PropertyDefinitionBase otherPropertyDefinition;
/**
* The value.
*/
private Object value;
/**
* Initializes a new instance of the class.
*/
RelationalFilter() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param otherPropertyDefinition The definition of the property to compare with
*/
RelationalFilter(PropertyDefinitionBase propertyDefinition,
PropertyDefinitionBase otherPropertyDefinition) {
super(propertyDefinition);
this.otherPropertyDefinition = otherPropertyDefinition;
}
/**
* Initializes a new instance of the class.
*
* @param propertyDefinition The definition of the property that is being compared.
* @param value The value to compare with.
*/
RelationalFilter(PropertyDefinitionBase propertyDefinition,
Object value) {
super(propertyDefinition);
this.value = value;
}
/**
* validates the instance.
*
* @throws ServiceValidationException the service validation exception
*/
@Override
protected void internalValidate() throws ServiceValidationException {
super.internalValidate();
if (this.otherPropertyDefinition == null && this.value == null) {
throw new ServiceValidationException(
"Either the OtherPropertyDefinition or the Value property must be set.");
}
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return true if element was read
* @throws Exception the exception
*/
@Override
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
boolean result = super.tryReadElementFromXml(reader);
if (!result) {
if (reader.getLocalName().equals(
XmlElementNames.FieldURIOrConstant)) {
try {
reader.read();
reader.ensureCurrentNodeIsStartElement();
} catch (ServiceXmlDeserializationException e) {
LOG.error(e);
} catch (XMLStreamException e) {
LOG.error(e);
}
if (reader.isStartElement(XmlNamespace.Types,
XmlElementNames.Constant)) {
this.value = reader
.readAttributeValue(XmlAttributeNames.Value);
result = true;
} else {
OutParam<PropertyDefinitionBase> outParam =
new OutParam<PropertyDefinitionBase>();
outParam.setParam(this.otherPropertyDefinition);
result = PropertyDefinitionBase.tryLoadFromXml(reader,
outParam);
}
}
}
return result;
}
/**
* Writes the elements to XML.
*
* @param writer the writer
* @throws javax.xml.stream.XMLStreamException , ServiceXmlSerializationException
* @throws ServiceXmlSerializationException the service xml serialization exception
*/
@Override
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws XMLStreamException, ServiceXmlSerializationException {
super.writeElementsToXml(writer);
writer.writeStartElement(XmlNamespace.Types,
XmlElementNames.FieldURIOrConstant);
if (this.value != null) {
writer.writeStartElement(XmlNamespace.Types,
XmlElementNames.Constant);
writer.writeAttributeValue(XmlAttributeNames.Value,
true /* alwaysWriteEmptyString */, this.value);
writer.writeEndElement(); // Constant
} else {
this.otherPropertyDefinition.writeToXml(writer);
}
writer.writeEndElement(); // FieldURIOrConstant
}
/**
* Gets the definition of the property to compare with.
*
* @return otherPropertyDefinition
*/
public PropertyDefinitionBase getOtherPropertyDefinition() {
return this.otherPropertyDefinition;
}
/**
* Sets the definition of the property to compare with.
*
* @param OtherPropertyDefinition the new other property definition
*/
public void setOtherPropertyDefinition(
PropertyDefinitionBase OtherPropertyDefinition) {
this.otherPropertyDefinition = OtherPropertyDefinition;
this.value = null;
}
/**
* Gets the value of the property to compare with.
*
* @return the value
*/
public Object getValue() {
return value;
}
/**
* Sets the value of the property to compare with.
*
* @param value the new value
*/
public void setValue(Object value) {
this.value = value;
this.otherPropertyDefinition = null;
}
/**
* gets Xml Element name.
*
* @return the xml element name
*/
@Override
protected String getXmlElementName() {
return null;
}
}
/**
* Represents a collection of search filter linked by a logical operator.
* Applications can use SearchFilterCollection to define complex search
* filter such as "Condition1 AND Condition2".
*/
public static class SearchFilterCollection extends SearchFilter implements
Iterable<SearchFilter>, IComplexPropertyChangedDelegate {
/**
* The logical operator.
*/
private LogicalOperator logicalOperator = LogicalOperator.And;
/**
* The search filter.
*/
private ArrayList<SearchFilter> searchFilters =
new ArrayList<SearchFilter>();
/**
* Initializes a new instance of the class.
*/
public SearchFilterCollection() {
super();
}
/**
* Initializes a new instance of the class.
*
* @param logicalOperator The logical operator used to initialize the collection.
*/
public SearchFilterCollection(LogicalOperator logicalOperator) {
this.logicalOperator = logicalOperator;
}
/**
* Initializes a new instance of the class.
*
* @param logicalOperator The logical operator used to initialize the collection.
* @param searchFilters The search filter to add to the collection.
*/
public SearchFilterCollection(LogicalOperator logicalOperator,
SearchFilter... searchFilters) {
this(logicalOperator);
for (SearchFilter search : searchFilters) {
Iterable<SearchFilter> searchFil = java.util.Arrays
.asList(search);
this.addRange(searchFil);
}
}
/**
* Initializes a new instance of the class.
*
* @param logicalOperator The logical operator used to initialize the collection.
* @param searchFilters The search filter to add to the collection.
*/
public SearchFilterCollection(LogicalOperator logicalOperator,
Iterable<SearchFilter> searchFilters) {
this(logicalOperator);
this.addRange(searchFilters);
}
/**
* Validate instance.
*
* @throws Exception
*/
@Override
protected void internalValidate() throws Exception {
for (int i = 0; i < this.getCount(); i++) {
try {
this.searchFilters.get(i).internalValidate();
} catch (ServiceValidationException e) {
throw new ServiceValidationException(String.format("The search filter at index %d is invalid.", i),
e);
}
}
}
/**
* A search filter has changed.
*
* @param complexProperty The complex property
*/
private void searchFilterChanged(ComplexProperty complexProperty) {
this.changed();
}
/**
* Gets the name of the XML element.
*
* @return xml element name
*/
@Override
protected String getXmlElementName() {
return this.logicalOperator.toString();
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return true, if successful
* @throws Exception the exception
*/
@Override
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
this.add(SearchFilter.loadFromXml(reader));
return true;
}
/**
* Writes the elements to XML.
*
* @param writer the writer
* @throws Exception the exception
*/
@Override
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws Exception {
for (SearchFilter searchFilter : this.searchFilters) {
searchFilter.writeToXml(writer);
}
}
/**
* Writes to XML.
*
* @param writer the writer
* @throws Exception the exception
*/
@Override public void writeToXml(EwsServiceXmlWriter writer) throws Exception {
// If there is only one filter in the collection, which developers
// tend
// to do,
// we need to not emit the collection and instead only emit the one
// filter within
// the collection. This is to work around the fact that EWS does not
// allow filter
// collections that have less than two elements.
if (this.getCount() == 1) {
this.searchFilters.get(0).writeToXml(writer);
} else {
super.writeToXml(writer);
}
}
/**
* Adds a search filter of any type to the collection.
*
* @param searchFilter >The search filter to add. Available search filter classes
* include SearchFilter.IsEqualTo,
* SearchFilter.ContainsSubstring and
* SearchFilter.SearchFilterCollection.
*/
public void add(SearchFilter searchFilter) {
if (searchFilter == null) {
throw new IllegalArgumentException("searchFilter");
}
searchFilter.addOnChangeEvent(this);
this.searchFilters.add(searchFilter);
this.changed();
}
/**
* Adds multiple search filter to the collection.
*
* @param searchFilters The search filter to add. Available search filter classes
* include SearchFilter.IsEqualTo,
* SearchFilter.ContainsSubstring and
* SearchFilter.SearchFilterCollection
*/
public void addRange(Iterable<SearchFilter> searchFilters) {
if (searchFilters == null) {
throw new IllegalArgumentException("searchFilters");
}
for (SearchFilter searchFilter : searchFilters) {
searchFilter.addOnChangeEvent(this);
this.searchFilters.add(searchFilter);
}
this.changed();
}
/**
* Clears the collection.
*/
public void clear() {
if (this.getCount() > 0) {
for (SearchFilter searchFilter : this.searchFilters) {
searchFilter.removeChangeEvent(this);
}
this.searchFilters.clear();
this.changed();
}
}
/**
* Determines whether a specific search filter is in the collection.
*
* @param searchFilter The search filter to locate in the collection.
* @return True is the search filter was found in the collection, false
* otherwise.
*/
public boolean contains(SearchFilter searchFilter) {
return this.searchFilters.contains(searchFilter);
}
/**
* Removes a search filter from the collection.
*
* @param searchFilter The search filter to remove
*/
public void remove(SearchFilter searchFilter) {
if (searchFilter == null) {
throw new IllegalArgumentException("searchFilter");
}
if (this.contains(searchFilter)) {
searchFilter.removeChangeEvent(this);
this.searchFilters.remove(searchFilter);
this.changed();
}
}
/**
* Removes the search filter at the specified index from the collection.
*
* @param index The zero-based index of the search filter to remove.
*/
public void removeAt(int index) {
if (index < 0 || index >= this.getCount()) {
throw new IllegalArgumentException(
String.format("index %d is out of range [0..%d[.", index, this.getCount()));
}
this.searchFilters.get(index).removeChangeEvent(this);
this.searchFilters.remove(index);
this.changed();
}
/**
* Gets the total number of search filter in the collection.
*
* @return the count
*/
public int getCount() {
return this.searchFilters.size();
}
/**
* Gets the search filter at the specified index.
*
* @param index the index
* @return The search filter at the specified index.
*/
public SearchFilter getSearchFilter(int index) {
if (index < 0 || index >= this.getCount()) {
throw new IllegalArgumentException(
String.format("index %d is out of range [0..%d[.", index, this.getCount())
);
}
return this.searchFilters.get(index);
}
/**
* Sets the search filter at the specified index.
*
* @param index the index
* @param searchFilter the search filter
*/
public void setSearchFilter(int index, SearchFilter searchFilter) {
if (index < 0 || index >= this.getCount()) {
throw new IllegalArgumentException(
String.format("index %d is out of range [0..%d[.", index, this.getCount())
);
}
this.searchFilters.add(index, searchFilter);
}
/**
* Gets the logical operator that links the serach filter in this
* collection.
*
* @return LogicalOperator
*/
public LogicalOperator getLogicalOperator() {
return logicalOperator;
}
/**
* Sets the logical operator that links the serach filter in this
* collection.
*
* @param logicalOperator the new logical operator
*/
public void setLogicalOperator(LogicalOperator logicalOperator) {
this.logicalOperator = logicalOperator;
}
/*
* (non-Javadoc)
*
* @see
* microsoft.exchange.webservices.
* ComplexPropertyChangedDelegateInterface#
* complexPropertyChanged(microsoft.exchange.webservices.ComplexProperty
* )
*/
@Override
public void complexPropertyChanged(ComplexProperty complexProperty) {
searchFilterChanged(complexProperty);
}
/*
* (non-Javadoc)
*
* @see java.lang.Iterable#iterator()
*/
@Override
public Iterator<SearchFilter> iterator() {
return this.searchFilters.iterator();
}
}
}
|
package net.mcft.copy.betterstorage.inventory;
import java.util.Arrays;
import net.mcft.copy.betterstorage.api.crafting.BetterStorageCrafting;
import net.mcft.copy.betterstorage.api.crafting.CraftingSourceStation;
import net.mcft.copy.betterstorage.api.crafting.IRecipeInput;
import net.mcft.copy.betterstorage.api.crafting.IStationRecipe;
import net.mcft.copy.betterstorage.config.GlobalConfig;
import net.mcft.copy.betterstorage.item.recipe.VanillaStationRecipe;
import net.mcft.copy.betterstorage.tile.entity.TileEntityCraftingStation;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.entity.player.PlayerDestroyItemEvent;
public class InventoryCraftingStation extends InventoryBetterStorage {
public TileEntityCraftingStation entity = null;
public final ItemStack[] crafting;
public final ItemStack[] output;
public final ItemStack[] contents;
public final ItemStack[] lastOutput;
public IStationRecipe currentRecipe = null;
public boolean outputIsReal = false;
public int progress = 0;
public int craftingTime = 0;
public int experience = 0;
private IRecipeInput[] requiredInput = new IRecipeInput[9];
private boolean hasRequirements = false;
private boolean checkHasRequirements = true;
public InventoryCraftingStation(TileEntityCraftingStation entity) {
this("", entity.crafting, entity.output, entity.contents);
this.entity = entity;
}
public InventoryCraftingStation(String name) {
this(name, new ItemStack[9], new ItemStack[9], new ItemStack[18]);
}
private InventoryCraftingStation(String name, ItemStack[] crafting, ItemStack[] output, ItemStack[] contents) {
super(name);
this.crafting = crafting;
this.output = output;
this.contents = contents;
lastOutput = new ItemStack[output.length];
updateLastOutput();
onInventoryChanged();
}
public void update() {
if (!outputIsReal && (currentRecipe != null) &&
((progress < craftingTime ||
(progress < GlobalConfig.stationAutocraftDelaySetting.getValue())))) progress++;
}
/** Checks if the recipe changed and updates everything accordingly. */
public void checkRecipe() {
IStationRecipe previous = currentRecipe;
if ((currentRecipe == null) || !currentRecipe.matches(crafting)) {
currentRecipe = BetterStorageCrafting.findMatchingStationRecipe(crafting);
if (currentRecipe == null)
currentRecipe = VanillaStationRecipe.findVanillaRecipe(this);
}
if ((previous != currentRecipe) || !recipeOutputMatches()) {
progress = 0;
craftingTime = ((currentRecipe != null) ? currentRecipe.getCraftingTime(crafting) : 0);
experience = ((currentRecipe != null) ? currentRecipe.getExperienceDisplay(crafting) : 0);
if (!outputIsReal)
for (int i = 0; i < output.length; i++)
output[i] = null;
}
Arrays.fill(requiredInput, null);
if (currentRecipe != null)
currentRecipe.getCraftRequirements(crafting, requiredInput);
updateLastOutput();
}
private boolean recipeOutputMatches() {
if (currentRecipe == null) return false;
ItemStack[] recipeOutput = currentRecipe.getOutput(crafting);
for (int i = 0; i < output.length; i++)
if (!ItemStack.areItemStacksEqual(((i < recipeOutput.length) ? recipeOutput[i] : null), output[i]))
return false;
return true;
}
/** Called when an item is removed from the output
* slot while it doesn't store any real items. */
public void craft(EntityPlayer player) {
currentRecipe.craft(crafting, new CraftingSourceStation(entity, player));
for (int i = 0; i < crafting.length; i++) {
ItemStack stack = crafting[i];
if (stack == null) continue;
if (stack.stackSize <= 0) {
// Item stack is depleted.
} else if (stack.getItem().isDamageable() && (stack.getItemDamage() > stack.getMaxDamage())) {
// Item stack is destroyed.
if (player != null)
MinecraftForge.EVENT_BUS.post(new PlayerDestroyItemEvent(player, stack));
} else continue;
crafting[i] = null;
}
pullRequired(requiredInput, true);
outputIsReal = !outputEmpty();
progress = 0;
checkRecipe();
checkHasRequirements = true;
}
/** Pull items required for the recipe from the internal inventory.
* Returns if successful. If doPull is false, only checks but doesn't move items. */
public boolean pullRequired(IRecipeInput[] requiredInput, boolean doPull) {
ItemStack[] contents = (doPull ? this.contents : this.contents.clone());
ItemStack[] crafting = (doPull ? this.crafting : this.crafting.clone());
boolean success = true;
craftingLoop:
for (int i = 0; i < crafting.length; i++) {
ItemStack stack = crafting[i];
IRecipeInput required = requiredInput[i];
if (required != null) {
int currentAmount = 0;
if ((stack != null) && !doPull)
stack = StackUtils.copyStack(stack, stack.stackSize - required.getAmount());
if (stack != null) {
if (!required.matches(stack)) return false;
currentAmount = stack.stackSize;
}
int requiredAmount = (required.getAmount() - currentAmount);
if (requiredAmount <= 0) continue;
for (int j = 0; j < contents.length; j++) {
ItemStack contentsStack = contents[j];
if (contentsStack == null) continue;
if ((stack == null) ? required.matches(contentsStack)
: StackUtils.matches(stack, contentsStack)) {
int amount = Math.min(contentsStack.stackSize, requiredAmount);
crafting[i] = stack = StackUtils.copyStack(contentsStack, (currentAmount += amount));
contents[j] = StackUtils.copyStack(contentsStack, contentsStack.stackSize - amount);
if ((requiredAmount -= amount) <= 0)
continue craftingLoop;
}
}
success = false;
if (!doPull) break;
} else if (stack != null) {
success = false;
if (!doPull) break;
}
}
return success;
}
/** Returns if items can be taken out of the output slots. */
public boolean canTake(EntityPlayer player) {
return (outputIsReal || ((currentRecipe != null) &&
(currentRecipe.canCraft(crafting, new CraftingSourceStation(entity, player))) &&
(progress >= craftingTime) &&
((player != null) ||
((progress >= GlobalConfig.stationAutocraftDelaySetting.getValue()) &&
hasRequirements()))));
}
/** Returns if the crafting station has the items
* required in its inventory to craft the recipe again. */
private boolean hasRequirements() {
if (checkHasRequirements) {
hasRequirements = pullRequired(requiredInput, false);
checkHasRequirements = false;
}
return hasRequirements;
}
// IInventory implementation
@Override
public int getSizeInventory() { return (crafting.length + output.length + contents.length); }
@Override
public ItemStack getStackInSlot(int slot) {
if (slot < crafting.length) return crafting[slot];
else if (slot < crafting.length + output.length)
return output[slot - crafting.length];
else return contents[slot - (crafting.length + output.length)];
}
@Override
public void setInventorySlotContents(int slot, ItemStack stack) {
if (slot < crafting.length) crafting[slot] = stack;
else if (slot < crafting.length + output.length)
output[slot - crafting.length] = stack;
else contents[slot - (crafting.length + output.length)] = stack;
}
@Override
public boolean isUseableByPlayer(EntityPlayer player) { return true; }
@Override
public void openChest() { }
@Override
public void closeChest() { }
@Override
public void onInventoryChanged() {
boolean updateLastOutput = false;
// See if items were taken out from the output.
if (outputChanged()) {
// If the output is ghost items, there is a recipe and we're
// on the server, craft the items and decrement the input.
if (!outputIsReal && (currentRecipe != null) && (entity != null))
craft(null);
updateLastOutput = true;
}
if (outputEmpty()) {
// Otherwise set the output to not be real.
outputIsReal = false;
if (currentRecipe != null) {
// Fill it with ghost output from the recipe.
ItemStack[] output = currentRecipe.getOutput(crafting);
for (int i = 0; i < output.length; i++)
this.output[i] = ItemStack.copyItemStack(output[i]);
}
updateLastOutput = true;
}
checkHasRequirements = true;
if (updateLastOutput)
updateLastOutput();
}
// Utility functions
private void updateLastOutput() {
for (int i = 0; i < output.length; i++)
lastOutput[i] = ItemStack.copyItemStack(output[i]);
}
private boolean outputChanged() {
for (int i = 0; i < output.length; i++)
if (!ItemStack.areItemStacksEqual(output[i], lastOutput[i]))
return true;
return false;
}
private boolean outputEmpty() {
for (int i = 0; i < output.length; i++)
if (output[i] != null) return false;
return true;
}
}
|
package jolie.lang.parse;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Logger;
import jolie.lang.Constants.OperandType;
import jolie.lang.parse.ast.AndConditionNode;
import jolie.lang.parse.ast.AssignStatement;
import jolie.lang.parse.ast.DocumentationComment;
import jolie.lang.parse.ast.CompareConditionNode;
import jolie.lang.parse.ast.CompensateStatement;
import jolie.lang.parse.ast.ConstantIntegerExpression;
import jolie.lang.parse.ast.ConstantRealExpression;
import jolie.lang.parse.ast.ConstantStringExpression;
import jolie.lang.parse.ast.CorrelationSetInfo;
import jolie.lang.parse.ast.CurrentHandlerStatement;
import jolie.lang.parse.ast.DeepCopyStatement;
import jolie.lang.parse.ast.EmbeddedServiceNode;
import jolie.lang.parse.ast.ExecutionInfo;
import jolie.lang.parse.ast.ExitStatement;
import jolie.lang.parse.ast.ExpressionConditionNode;
import jolie.lang.parse.ast.ForEachStatement;
import jolie.lang.parse.ast.ForStatement;
import jolie.lang.parse.ast.IfStatement;
import jolie.lang.parse.ast.InstallFixedVariableExpressionNode;
import jolie.lang.parse.ast.InstallStatement;
import jolie.lang.parse.ast.IsTypeExpressionNode;
import jolie.lang.parse.ast.LinkInStatement;
import jolie.lang.parse.ast.LinkOutStatement;
import jolie.lang.parse.ast.NDChoiceStatement;
import jolie.lang.parse.ast.NotConditionNode;
import jolie.lang.parse.ast.NotificationOperationStatement;
import jolie.lang.parse.ast.NullProcessStatement;
import jolie.lang.parse.ast.OLSyntaxNode;
import jolie.lang.parse.ast.OneWayOperationDeclaration;
import jolie.lang.parse.ast.OneWayOperationStatement;
import jolie.lang.parse.ast.OperationDeclaration;
import jolie.lang.parse.ast.OrConditionNode;
import jolie.lang.parse.ast.OutputPortInfo;
import jolie.lang.parse.ast.ParallelStatement;
import jolie.lang.parse.ast.PointerStatement;
import jolie.lang.parse.ast.PostDecrementStatement;
import jolie.lang.parse.ast.PostIncrementStatement;
import jolie.lang.parse.ast.PreDecrementStatement;
import jolie.lang.parse.ast.PreIncrementStatement;
import jolie.lang.parse.ast.ProductExpressionNode;
import jolie.lang.parse.ast.Program;
import jolie.lang.parse.ast.RequestResponseOperationDeclaration;
import jolie.lang.parse.ast.RequestResponseOperationStatement;
import jolie.lang.parse.ast.RunStatement;
import jolie.lang.parse.ast.Scope;
import jolie.lang.parse.ast.SequenceStatement;
import jolie.lang.parse.ast.InputPortInfo;
import jolie.lang.parse.ast.SolicitResponseOperationStatement;
import jolie.lang.parse.ast.DefinitionCallStatement;
import jolie.lang.parse.ast.DefinitionNode;
import jolie.lang.parse.ast.InterfaceDefinition;
import jolie.lang.parse.ast.SpawnStatement;
import jolie.lang.parse.ast.SumExpressionNode;
import jolie.lang.parse.ast.SynchronizedStatement;
import jolie.lang.parse.ast.ThrowStatement;
import jolie.lang.parse.ast.TypeCastExpressionNode;
import jolie.lang.parse.ast.UndefStatement;
import jolie.lang.parse.ast.ValueVectorSizeExpressionNode;
import jolie.lang.parse.ast.VariableExpressionNode;
import jolie.lang.parse.ast.VariablePathNode;
import jolie.lang.parse.ast.WhileStatement;
import jolie.lang.parse.ast.types.TypeDefinition;
import jolie.lang.parse.ast.types.TypeDefinitionLink;
import jolie.lang.parse.ast.types.TypeInlineDefinition;
import jolie.util.Pair;
/**
* Checks the well-formedness and validity of a JOLIE program.
* @see Program
* @author Fabrizio Montesi
*/
public class SemanticVerifier implements OLVisitor
{
private final Program program;
private boolean valid = true;
private final Map< String, InputPortInfo > inputPorts = new HashMap< String, InputPortInfo >();
private final Map< String, OutputPortInfo > outputPorts = new HashMap< String, OutputPortInfo >();
private final Set< String > subroutineNames = new HashSet< String > ();
private final Map< String, OneWayOperationDeclaration > oneWayOperations =
new HashMap< String, OneWayOperationDeclaration >();
private final Map< String, RequestResponseOperationDeclaration > requestResponseOperations =
new HashMap< String, RequestResponseOperationDeclaration >();
private final Map< TypeDefinition, List< TypeDefinition > > typesToBeEqual = new HashMap< TypeDefinition, List< TypeDefinition > >();
private boolean insideInputPort = false;
private boolean mainDefined = false;
private final Logger logger = Logger.getLogger( "JOLIE" );
private final Map< String, TypeDefinition > definedTypes = OLParser.createTypeDeclarationMap();
private final List< TypeDefinitionLink > definedTypeLinks = new LinkedList< TypeDefinitionLink >();
//private TypeDefinition rootType; // the type representing the whole session state
private final Map< String, Boolean > isConstantMap = new HashMap< String, Boolean >();
private final Set< String > correlationSet = new HashSet< String >();
public SemanticVerifier( Program program )
{
this.program = program;
/*rootType = new TypeInlineDefinition(
new ParsingContext(),
"#RootType",
NativeType.VOID,
jolie.lang.Constants.RANGE_ONE_TO_ONE
);*/
}
private void encounteredAssignment( String varName )
{
if ( isConstantMap.containsKey( varName ) ) {
isConstantMap.put( varName, false );
} else {
isConstantMap.put( varName, true );
}
}
private void addTypeEqualnessCheck( TypeDefinition key, TypeDefinition type )
{
List< TypeDefinition > toBeEqualList = typesToBeEqual.get( key );
if ( toBeEqualList == null ) {
toBeEqualList = new LinkedList< TypeDefinition >();
typesToBeEqual.put( key, toBeEqualList );
}
toBeEqualList.add( type );
}
private void encounteredAssignment( VariablePathNode path )
{
encounteredAssignment( ((ConstantStringExpression)path.path().get( 0 ).key()).value() );
}
public Map< String, Boolean > isConstantMap()
{
return isConstantMap;
}
private void warning( OLSyntaxNode node, String message )
{
if ( node == null ) {
logger.warning( message );
} else {
logger.warning( node.context().sourceName() + ":" + node.context().line() + ": " + message );
}
}
private void error( OLSyntaxNode node, String message )
{
valid = false;
if ( node != null ) {
ParsingContext context = node.context();
logger.severe( context.sourceName() + ":" + context.line() + ": " + message );
} else {
logger.severe( message );
}
}
private void resolveLazyLinks()
{
for( TypeDefinitionLink l : definedTypeLinks ) {
l.setLinkedType( definedTypes.get( l.linkedTypeName() ) );
if ( l.linkedType() == null ) {
error( l, "type " + l.id() + "points to an undefined type (" + l.linkedTypeName() + ")" );
}
}
}
private void checkToBeEqualTypes()
{
for( Entry< TypeDefinition, List< TypeDefinition > > entry : typesToBeEqual.entrySet() ) {
for( TypeDefinition type : entry.getValue() ) {
if ( entry.getKey().equals( type ) == false ) {
error( type, "type " + type.id() + " has already been defined with a different structure" );
}
}
}
}
public boolean validate()
{
program.accept( this );
resolveLazyLinks();
checkToBeEqualTypes();
if ( mainDefined == false ) {
error( null, "Main procedure not defined" );
}
if ( !valid ) {
logger.severe( "Aborting: input file semantically invalid." );
return false;
}
return valid;
}
private boolean isTopLevelType = true;
public void visit( TypeInlineDefinition n )
{
checkCardinality( n );
boolean backupRootType = isTopLevelType;
if ( isTopLevelType ) {
// Check if the type has already been defined with a different structure
TypeDefinition type = definedTypes.get( n.id() );
if ( type != null ) {
addTypeEqualnessCheck( type, n );
}
}
isTopLevelType = false;
if ( n.hasSubTypes() ) {
for( Entry< String, TypeDefinition > entry : n.subTypes() ) {
entry.getValue().accept( this );
}
}
isTopLevelType = backupRootType;
if ( isTopLevelType ) {
definedTypes.put( n.id(), n );
}
}
public void visit( TypeDefinitionLink n )
{
checkCardinality( n );
if ( isTopLevelType ) {
// Check if the type has already been defined with a different structure
TypeDefinition type = definedTypes.get( n.id() );
if ( type != null ) {
addTypeEqualnessCheck( type, n );
}
definedTypes.put( n.id(), n );
}
definedTypeLinks.add( n );
}
private void checkCardinality( TypeDefinition type )
{
if ( type.cardinality().min() < 0 ) {
error( type, "type " + type.id() + " specifies an invalid minimum range value (must be positive)" );
}
if ( type.cardinality().max() < 0 ) {
error( type, "type " + type.id() + " specifies an invalid maximum range value (must be positive)" );
}
}
public void visit( SpawnStatement n )
{
n.body().accept( this );
}
public void visit (DocumentationComment n)
{}
public void visit( Program n )
{
for( OLSyntaxNode node : n.children() ) {
node.accept( this );
}
}
public void visit( VariablePathNode n )
{}
public void visit( InputPortInfo n )
{
if ( inputPorts.get( n.id() ) != null ) {
error( n, "input port " + n.id() + " has been already defined" );
}
inputPorts.put( n.id(), n );
insideInputPort = true;
Set< String > opSet = new HashSet< String >();
for( OperationDeclaration op : n.operations() ) {
if ( opSet.contains( op.id() ) ) {
error( n, "input port " + n.id() + " declares operation " + op.id() + " multiple times" );
} else {
opSet.add( op.id() );
op.accept( this );
}
}
OutputPortInfo outputPort;
for( String portName : n.aggregationList() ) {
outputPort = outputPorts.get( portName );
if ( outputPort == null ) {
error( n, "input port " + n.id() + " aggregates an undefined output port (" + portName + ")" );
}/* else {
for( OperationDeclaration op : outputPort.operations() ) {
if ( opSet.contains( op.id() ) ) {
error( n, "input port " + n.id() + " declares duplicate operation " + op.id() + " from aggregated output port " + outputPort.id() );
} else {
opSet.add( op.id() );
}
}
}*/
}
insideInputPort = false;
}
public void visit( OutputPortInfo n )
{
if ( outputPorts.get( n.id() ) != null )
error( n, "output port " + n.id() + " has been already defined" );
outputPorts.put( n.id(), n );
encounteredAssignment( n.id() );
for( OperationDeclaration op : n.operations() ) {
op.accept( this );
}
}
public void visit( OneWayOperationDeclaration n )
{
if ( definedTypes.get( n.requestType().id() ) == null ) {
error( n, "unknown type: " + n.requestType().id() + " for operation " + n.id() );
}
if ( insideInputPort ) { // Input operation
if ( oneWayOperations.containsKey( n.id() ) ) {
OneWayOperationDeclaration other = oneWayOperations.get( n.id() );
if ( n.requestType().equals( other.requestType() ) == false ) {
error( n, "input operations sharing the same name cannot declare different types (One-Way operation " + n.id() + ")" );
}
} else {
oneWayOperations.put( n.id(), n );
}
}
}
public void visit( RequestResponseOperationDeclaration n )
{
if ( definedTypes.get( n.requestType().id() ) == null ) {
error( n, "unknown type: " + n.requestType().id() + " for operation " + n.id() );
}
if ( definedTypes.get( n.responseType().id() ) == null ) {
error( n, "unknown type: " + n.responseType().id() + " for operation " + n.id() );
}
for( Entry< String, TypeDefinition > fault : n.faults().entrySet() ) {
if ( definedTypes.containsKey( fault.getValue().id() ) == false ) {
error( n, "unknown type for fault " + fault.getKey() );
}
}
if ( insideInputPort ) { // Input operation
if ( requestResponseOperations.containsKey( n.id() ) ) {
RequestResponseOperationDeclaration other = requestResponseOperations.get( n.id() );
checkEqualness( n, other );
} else {
requestResponseOperations.put( n.id(), n );
}
}
}
private void checkEqualness( RequestResponseOperationDeclaration n, RequestResponseOperationDeclaration other )
{
if ( n.requestType().equals( other.requestType() ) == false ) {
error( n, "input operations sharing the same name cannot declare different request types (Request-Response operation " + n.id() + ")" );
}
if ( n.responseType().equals( other.responseType() ) == false ) {
error( n, "input operations sharing the same name cannot declare different response types (Request-Response operation " + n.id() + ")" );
}
if ( n.faults().size() != other.faults().size() ) {
error( n, "input operations sharing the same name cannot declared different fault types (Request-Response operation " + n.id() );
}
for( Entry< String, TypeDefinition > fault : n.faults().entrySet() ) {
if ( fault.getValue() != null ) {
if ( !other.faults().containsKey( fault.getKey() ) || !other.faults().get( fault.getKey() ).equals( fault.getValue() ) ) {
error( n, "input operations sharing the same name cannot declared different fault types (Request-Response operation " + n.id() );
}
}
}
}
public void visit( DefinitionNode n )
{
if ( subroutineNames.contains( n.id() ) ) {
error( n, "Procedure " + n.id() + " uses an already defined identifier" );
} else {
subroutineNames.add( n.id() );
}
if ( "main".equals( n.id() ) ) {
mainDefined = true;
}
n.body().accept( this );
}
public void visit( ParallelStatement stm )
{
for( OLSyntaxNode node : stm.children() ) {
node.accept( this );
}
}
public void visit( SequenceStatement stm )
{
for( OLSyntaxNode node : stm.children() ) {
node.accept( this );
}
}
public void visit( NDChoiceStatement stm )
{
for( Pair< OLSyntaxNode, OLSyntaxNode > pair : stm.children() ) {
pair.key().accept( this );
pair.value().accept( this );
}
}
public void visit( NotificationOperationStatement n )
{
OutputPortInfo p = outputPorts.get( n.outputPortId() );
if ( p == null ) {
error( n, n.outputPortId() + " is not a valid output port" );
} else {
OperationDeclaration decl = p.operationsMap().get( n.id() );
if ( decl == null )
error( n, "Operation " + n.id() + " has not been declared in output port type " + p.id() );
else if ( !( decl instanceof OneWayOperationDeclaration ) )
error( n, "Operation " + n.id() + " is not a valid one-way operation in output port " + p.id() );
}
}
public void visit( SolicitResponseOperationStatement n )
{
if ( n.inputVarPath() != null ) {
encounteredAssignment( n.inputVarPath() );
}
OutputPortInfo p = outputPorts.get( n.outputPortId() );
if ( p == null )
error( n, n.outputPortId() + " is not a valid output port" );
else {
OperationDeclaration decl = p.operationsMap().get( n.id() );
if ( decl == null )
error( n, "Operation " + n.id() + " has not been declared in output port " + p.id() );
else if ( !( decl instanceof RequestResponseOperationDeclaration ) )
error( n, "Operation " + n.id() + " is not a valid request-response operation in output port " + p.id() );
}
if ( isInCorrelationSet( n.inputVarPath() ) ) {
error( n, "Receiving a message in a correlation variable is forbidden" );
}
}
public void visit( ThrowStatement n )
{
verify( n.expression() );
}
public void visit( CompensateStatement n ) {}
public void visit( InstallStatement n )
{
for( Pair< String, OLSyntaxNode > pair : n.handlersFunction().pairs() ) {
pair.value().accept( this );
}
}
public void visit( Scope n )
{
n.body().accept( this );
}
public void visit( OneWayOperationStatement n )
{
verify( n.inputVarPath() );
if ( isInCorrelationSet( n.inputVarPath() ) ) {
error( n, "Receiving a message in a correlation variable is forbidden" );
}
if ( n.inputVarPath() != null ) {
encounteredAssignment( n.inputVarPath() );
}
}
public void visit( RequestResponseOperationStatement n )
{
verify( n.inputVarPath() );
verify( n.process() );
if ( n.inputVarPath() != null ) {
encounteredAssignment( n.inputVarPath() );
}
if ( isInCorrelationSet( n.inputVarPath() ) ) {
error( n, "Receiving a message in a correlation variable is forbidden" );
}
}
public void visit( LinkInStatement n ) {}
public void visit( LinkOutStatement n ) {}
public void visit( SynchronizedStatement n )
{
n.body().accept( this );
}
public void visit( AssignStatement n )
{
encounteredAssignment( n.variablePath() );
n.variablePath().accept( this );
n.expression().accept( this );
}
private void verify( OLSyntaxNode n )
{
if ( n != null ) {
n.accept( this );
}
}
public void visit( PointerStatement n )
{
encounteredAssignment( n.leftPath() );
encounteredAssignment( n.rightPath() );
n.leftPath().accept( this );
n.rightPath().accept( this );
if ( isInCorrelationSet( n.rightPath() ) ) {
error( n, "Making an alias to a correlation variable is forbidden" );
}
}
public void visit( DeepCopyStatement n )
{
encounteredAssignment( n.leftPath() );
n.leftPath().accept( this );
n.rightPath().accept( this );
if ( isInCorrelationSet( n.leftPath() ) ) {
error( n, "Deep copy on a correlation variable is forbidden" );
}
}
public void visit( IfStatement n )
{
for( Pair< OLSyntaxNode, OLSyntaxNode > choice : n.children() ) {
verify( choice.key() );
verify( choice.value() );
}
verify( n.elseProcess() );
}
public void visit( DefinitionCallStatement n ) {}
public void visit( WhileStatement n )
{
n.condition().accept( this );
n.body().accept( this );
}
public void visit( OrConditionNode n )
{
for( OLSyntaxNode node : n.children() ) {
node.accept( this );
}
}
public void visit( AndConditionNode n )
{
for( OLSyntaxNode node : n.children() ) {
node.accept( this );
}
}
public void visit( NotConditionNode n )
{
n.condition().accept( this );
}
public void visit( CompareConditionNode n )
{
n.leftExpression().accept( this );
n.rightExpression().accept( this );
}
public void visit( ExpressionConditionNode n )
{
n.expression().accept( this );
}
public void visit( ConstantIntegerExpression n ) {}
public void visit( ConstantRealExpression n ) {}
public void visit( ConstantStringExpression n ) {}
public void visit( ProductExpressionNode n )
{
for( Pair< OperandType, OLSyntaxNode > pair : n.operands() ) {
pair.value().accept( this );
}
}
public void visit( SumExpressionNode n )
{
for( Pair< OperandType, OLSyntaxNode > pair : n.operands() ) {
pair.value().accept( this );
}
}
public void visit( VariableExpressionNode n )
{
n.variablePath().accept( this );
}
public void visit( InstallFixedVariableExpressionNode n )
{
n.variablePath().accept( this );
}
public void visit( NullProcessStatement n ) {}
public void visit( ExitStatement n ) {}
public void visit( ExecutionInfo n ) {}
public void visit( CorrelationSetInfo n )
{
VariablePathNode varPath;
List< Pair< OLSyntaxNode, OLSyntaxNode > > path;
for( List< VariablePathNode > list : n.cset() ) {
varPath = list.get( 0 );
if ( varPath.isGlobal() ) {
error( list.get( 0 ), "Correlation variables can not be global" );
}
path = varPath.path();
if ( path.size() > 1 ) {
error( varPath, "Correlation variables can not be nested paths" );
} else if ( path.get( 0 ).value() != null ) {
error( varPath, "Correlation variables can not use arrays" );
} else {
correlationSet.add( ((ConstantStringExpression)path.get( 0 ).key()).value() );
}
}
}
public void visit( RunStatement n )
{
warning( n, "Run statement is not a stable feature yet." );
}
public void visit( ValueVectorSizeExpressionNode n )
{
n.variablePath().accept( this );
}
public void visit( PreIncrementStatement n )
{
encounteredAssignment( n.variablePath() );
n.variablePath().accept( this );
}
public void visit( PostIncrementStatement n )
{
encounteredAssignment( n.variablePath() );
n.variablePath().accept( this );
}
public void visit( PreDecrementStatement n )
{
encounteredAssignment( n.variablePath() );
n.variablePath().accept( this );
}
public void visit( PostDecrementStatement n )
{
encounteredAssignment( n.variablePath() );
n.variablePath().accept( this );
}
private boolean isInCorrelationSet( VariablePathNode n )
{
if ( n != null && n.isGlobal() == false ) {
if ( correlationSet.contains( ((ConstantStringExpression)n.path().get( 0 ).key() ).value() ) ) {
return true;
}
}
return false;
}
public void visit( UndefStatement n )
{
encounteredAssignment( n.variablePath() );
n.variablePath().accept( this );
if ( isInCorrelationSet( n.variablePath() ) ) {
error( n, "Undefining a correlation variable is forbidden" );
}
}
public void visit( ForStatement n )
{
n.init().accept( this );
n.condition().accept( this );
n.post().accept( this );
n.body().accept( this );
}
public void visit( ForEachStatement n )
{
n.keyPath().accept( this );
n.targetPath().accept( this );
n.body().accept( this );
}
public void visit( IsTypeExpressionNode n )
{
n.variablePath().accept( this );
}
public void visit( TypeCastExpressionNode n )
{
n.expression().accept( this );
}
public void visit( EmbeddedServiceNode n )
{}
/**
* @todo Must check if it's inside an install function
*/
public void visit( CurrentHandlerStatement n )
{}
public void visit( InterfaceDefinition n )
{}
}
|
package org.libjtodotxt;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
public class TodoTxtHandler {
private final String newLine;
private File todoFile;
private File doneFile;
private List<Task> tasks;
private List<String> contexts;
private List<String> projects;
/**
* Constructs a TodoTxtFile initialized to the contents of the specified
* string.
*
* @param todoFile
* the todo.txt file.
* @param doneFile
* the done.txt file.
* @throws IOException
* if the reading of the string failed
* @throws ParseException
* if the parsing of the string failed
*/
public TodoTxtHandler(File todoFile, File doneFile, String newLine)
throws IOException, ParseException {
Utils.checkForNullArgument(todoFile, "todoFile");
Utils.checkForNullArgument(doneFile, "doneFile");
Utils.checkForNullArgument(newLine, "newLine");
this.todoFile = todoFile;
this.doneFile = doneFile;
this.newLine = newLine;
tasks = new ArrayList<Task>();
contexts = new ArrayList<String>();
projects = new ArrayList<String>();
parseTasks();
}
private void parseTasks() throws IOException, ParseException {
List<String> lines = Utils.readFileContent(todoFile);
for (String line : lines) {
Task task = new Task(line);
tasks.add(task);
parseContexts(task);
parseProjects(task);
}
}
/**
* Adds the specified task to the todo.txt file.
*
* @param newTask
* the task to be added.
* @throws IOException
* if writing the task to file failed
*/
public void addTask(Task newTask) throws IOException {
Utils.checkForNullArgument(newTask, "newTask");
tasks.add(newTask);
addTaskToFile(newTask, todoFile);
parseContexts(newTask);
parseProjects(newTask);
}
private void parseProjects(Task task) {
for (String project : task.getProjects()) {
if (!projects.contains(project)) {
projects.add(project);
}
}
}
private void parseContexts(Task task) {
for (String context : task.getContexts()) {
if (!contexts.contains(context)) {
contexts.add(context);
}
}
}
private void addTaskToFile(Task task, File file) throws IOException {
BufferedWriter writer = null;
try {
writer = new BufferedWriter(new FileWriter(file, true));
writer.write(task.getLine());
writer.write(newLine);
} catch (IOException e) {
throw e;
} finally {
if (writer != null) {
writer.close();
}
}
}
/**
* Removes the specified task from the todo.txt file.
*
* @param taskToRemove
* the task be to removed
* @throws IOException
* if the removal of task from the file failed
*/
public void removeTask(Task taskToRemove) throws IOException {
Utils.checkForNullArgument(taskToRemove, "task");
checkForExistingTask(taskToRemove);
tasks.remove(taskToRemove);
removeTaskFromFile(taskToRemove, todoFile);
}
private void checkForExistingTask(Task task) {
if (!tasks.contains(task)) {
throw new IllegalArgumentException("Task \"" + task.getLine()
+ "\" does not exist!");
}
}
private void removeTaskFromFile(Task task, File file) throws IOException {
BufferedWriter writer = null;
List<String> lines = Utils.readFileContent(file);
try {
writer = new BufferedWriter(new FileWriter(file, true));
for (String line : lines) {
if (!line.trim().equals(task.getLine())) {
writer.write(task.getLine());
writer.write(newLine);
}
}
} catch (IOException e) {
throw e;
} finally {
if (writer != null) {
writer.close();
}
}
}
/**
* Archives the specified task by moving it from the todo.txt to the
* done.txt file .
*
* @param taskToArchive
* the task to be archived
* @throws IOException
* if the moving of the task failed
*/
public void archiveTask(Task taskToArchive) throws IOException {
Utils.checkForNullArgument(taskToArchive, "task");
checkForExistingTask(taskToArchive);
tasks.remove(taskToArchive);
removeTaskFromFile(taskToArchive, todoFile);
addTaskToFile(taskToArchive, doneFile);
}
/**
* Returns the list of projects used by tasks in the todo.txt file.
*
* @return the list of projects
*/
public List<String> getProjects() {
return projects;
}
/**
* Returns the list of contexts used by tasks in the todo.txt file.
*
* @return the list of contexts
*/
public List<String> getContexts() {
return contexts;
}
/**
* Returns the list of tasks which have the specified project.
*
* @param project
* the project to get tasks for
* @return the list of tasks
*/
public List<String> getTasksForProject(String project) {
List<String> foundTasks = new LinkedList<String>();
for (Task task : tasks) {
if (task.getProjects().contains(project)) {
foundTasks.add(task.getLine());
}
}
return foundTasks;
}
/**
* Returns the list of tasks which have the specified context.
*
* @param context
* the context to get tasks for
* @return the list of tasks
*/
public List<String> getTasksForContext(String context) {
List<String> foundTasks = new LinkedList<String>();
for (Task task : tasks) {
if (task.getContexts().contains(context)) {
foundTasks.add(task.getLine());
}
}
return foundTasks;
}
/**
* Returns the list of tasks in the todo.txt file.
*
* @return the list of tasks
*/
public List<Task> getTasks() {
List<Task> clone = new LinkedList<Task>();
for (Task task : tasks) {
try {
clone.add(new Task(task.getLine()));
} catch (ParseException shouldNeverHappenException) {
throw new RuntimeException(shouldNeverHappenException);
}
}
return clone;
}
}
|
package net.sf.jabb.seqtx.azure;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.time.Duration;
import java.time.Instant;
import java.util.Base64;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import net.sf.jabb.azure.AzureStorageUtility;
import net.sf.jabb.seqtx.ReadOnlySequentialTransaction;
import net.sf.jabb.seqtx.SequentialTransaction;
import net.sf.jabb.seqtx.SequentialTransactionState;
import net.sf.jabb.seqtx.SequentialTransactionsCoordinator;
import net.sf.jabb.seqtx.SimpleSequentialTransaction;
import net.sf.jabb.seqtx.ex.DuplicatedTransactionIdException;
import net.sf.jabb.seqtx.ex.IllegalEndPositionException;
import net.sf.jabb.seqtx.ex.IllegalTransactionStateException;
import net.sf.jabb.seqtx.ex.TransactionStorageInfrastructureException;
import net.sf.jabb.seqtx.ex.NoSuchTransactionException;
import net.sf.jabb.seqtx.ex.NotOwningTransactionException;
import net.sf.jabb.util.attempt.AttemptStrategy;
import net.sf.jabb.util.attempt.StopStrategies;
import net.sf.jabb.util.ex.ExceptionUncheckUtility;
import net.sf.jabb.util.ex.ExceptionUncheckUtility.ConsumerThrowsExceptions;
import net.sf.jabb.util.ex.ExceptionUncheckUtility.PredicateThrowsExceptions;
import net.sf.jabb.util.parallel.BackoffStrategies;
import net.sf.jabb.util.parallel.WaitStrategies;
import net.sf.jabb.util.text.DurationFormatter;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.microsoft.azure.storage.CloudStorageAccount;
import com.microsoft.azure.storage.StorageErrorCodeStrings;
import com.microsoft.azure.storage.StorageException;
import com.microsoft.azure.storage.table.CloudTable;
import com.microsoft.azure.storage.table.CloudTableClient;
import com.microsoft.azure.storage.table.DynamicTableEntity;
import com.microsoft.azure.storage.table.TableBatchOperation;
import com.microsoft.azure.storage.table.TableOperation;
import com.microsoft.azure.storage.table.TableQuery;
import com.microsoft.azure.storage.table.TableQuery.QueryComparisons;
import com.microsoft.azure.storage.table.TableRequestOptions;
/**
* The implementation of SequentialTransactionsCoordinator that is backed by Microsoft Azure table storage.
* The existence of the underlying table is checked and ensured only once during the life time of the instance of this class.
* @author James Hu
*
*/
public class AzureSequentialTransactionsCoordinator implements SequentialTransactionsCoordinator {
static private final Logger logger = LoggerFactory.getLogger(AzureSequentialTransactionsCoordinator.class);
static protected final Base64.Encoder base64 = Base64.getUrlEncoder().withoutPadding();
/**
* The default attempt strategy for Azure operations, with maximum 90 seconds allowed in total, and 0.5 to 10 seconds backoff interval
* according to fibonacci series.
*/
static public final AttemptStrategy DEFAULT_ATTEMPT_STRATEGY = new AttemptStrategy()
.withWaitStrategy(WaitStrategies.threadSleepStrategy())
.withStopStrategy(StopStrategies.stopAfterTotalDuration(Duration.ofSeconds(90)))
.withBackoffStrategy(BackoffStrategies.fibonacciBackoff(500L, 1000L * 10));
public static final String DEFAULT_TABLE_NAME = "SequentialTransactionsCoordinator";
public static final String DUMMY_FIRST_TRANSACTION_ID = "DUMMY_FIRST_TRANSACTION_ID" + "|||||||";
protected String tableName = DEFAULT_TABLE_NAME;
protected CloudTableClient tableClient;
protected volatile SimpleSequentialTransaction lastSucceededTransactionCached;
protected volatile boolean tableExists = false;
protected AttemptStrategy attemptStrategy = DEFAULT_ATTEMPT_STRATEGY;
protected static final Predicate<Exception> ENTITY_HAS_BEEN_MODIFIED_BY_OTHERS = AzureStorageUtility::isUpdateConditionNotSatisfied;
protected static final Predicate<Exception> ENTITY_HAS_BEEN_DELETED_OR_MODIFIED_BY_OTHERS = AzureStorageUtility::isNotFoundOrUpdateConditionNotSatisfied;
public AzureSequentialTransactionsCoordinator(){
}
public AzureSequentialTransactionsCoordinator(CloudStorageAccount storageAccount, String tableName, AttemptStrategy attemptStrategy, Consumer<TableRequestOptions> defaultOptionsConfigurer){
this();
if (tableName != null){
this.tableName = tableName;
}
if (attemptStrategy != null){
this.attemptStrategy = attemptStrategy;
}
tableClient = storageAccount.createCloudTableClient();
if (defaultOptionsConfigurer != null){
defaultOptionsConfigurer.accept(tableClient.getDefaultRequestOptions());
}
}
public AzureSequentialTransactionsCoordinator(CloudStorageAccount storageAccount, String tableName, AttemptStrategy attemptStrategy){
this(storageAccount, tableName, attemptStrategy, null);
}
public AzureSequentialTransactionsCoordinator(CloudStorageAccount storageAccount, String tableName){
this(storageAccount, tableName, null, null);
}
public AzureSequentialTransactionsCoordinator(CloudStorageAccount storageAccount, Consumer<TableRequestOptions> defaultOptionsConfigurer){
this(storageAccount, null, null, defaultOptionsConfigurer);
}
public AzureSequentialTransactionsCoordinator(CloudStorageAccount storageAccount){
this(storageAccount, null, null, null);
}
public AzureSequentialTransactionsCoordinator(CloudTableClient tableClient, String tableName, AttemptStrategy attemptStrategy){
this();
if (tableName != null){
this.tableName = tableName;
}
this.tableClient = tableClient;
if (attemptStrategy != null){
this.attemptStrategy = attemptStrategy;
}
}
public AzureSequentialTransactionsCoordinator(CloudTableClient tableClient, AttemptStrategy attemptStrategy){
this(tableClient, null, attemptStrategy);
}
public AzureSequentialTransactionsCoordinator(CloudTableClient tableClient){
this(tableClient, null, null);
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public void setTableClient(CloudTableClient tableClient) {
this.tableClient = tableClient;
}
public void setTableClient(AttemptStrategy attemptStrategy) {
this.attemptStrategy = attemptStrategy;
}
/**
* Generate a 22-character presented random UUID
* @return base64 URL safe encoded UUID
*/
protected String newUniqueTransactionId(){
UUID uuid = UUID.randomUUID();
ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
bb.putLong(uuid.getMostSignificantBits());
bb.putLong(uuid.getLeastSignificantBits());
String encoded = base64.encodeToString(bb.array());
return encoded;
}
@Override
public SequentialTransaction startTransaction(String seriesId,
String previousTransactionId, String previousTransactionEndPosition,
ReadOnlySequentialTransaction transaction,
int maxInProgressTransacions, int maxRetryingTransactions)
throws TransactionStorageInfrastructureException,
DuplicatedTransactionIdException {
Validate.notNull(seriesId, "Series ID cannot be null");
Validate.notNull(transaction.getProcessorId(), "Processor ID cannot be null");
Validate.notNull(transaction.getTimeout(), "Transaction time out cannot be null");
if (transaction.getStartPosition() == null){ // startPosition is not null when restarting a specific transaction
Validate.isTrue(null == transaction.getEndPosition(), "End position must be null when start position is null");
}
if (previousTransactionId != null){
Validate.notNull(previousTransactionEndPosition, "previousTransactionEndPosition cannot be null when previousTransactionId is not null: " + previousTransactionId);
}
Validate.isTrue(maxInProgressTransacions > 0, "Maximum number of in-progress transactions must be greater than zero: %d", maxInProgressTransacions);
Validate.isTrue(maxRetryingTransactions > 0, "Maximum number of retrying transactions must be greater than zero: %d", maxRetryingTransactions);
Validate.isTrue(maxInProgressTransacions >= maxRetryingTransactions, "Maximum number of in-progress transactions must not be less than the maximum number of retrying transactions: %d, %d", maxInProgressTransacions, maxRetryingTransactions);
long startTime = System.currentTimeMillis();
// try to pick up a failed one for retrying
SequentialTransaction retryingTransaction = startAnyFailedTransaction(seriesId, transaction.getProcessorId(), transaction.getTimeout(), maxInProgressTransacions, maxRetryingTransactions);
if (retryingTransaction != null){
return retryingTransaction;
}
long finishStartAnyFailedTime = System.currentTimeMillis();
List<? extends ReadOnlySequentialTransaction> transactions = getRecentTransactionsIncludingDummy(seriesId);
if (transactions.size() > 0 && StringUtils.isNotEmpty(previousTransactionEndPosition)){
Validate.notNull(previousTransactionId, "previousTransactionId cannot be null when previousTransactionEndPosition has a value");
}
TransactionCounts counts = SequentialTransactionsCoordinator.getTransactionCounts(transactions);
ReadOnlySequentialTransaction last = transactions.size() > 0 ? transactions.get(transactions.size() - 1) : null;
long finishGetRecentTime = System.currentTimeMillis();
if (counts.getInProgress() >= maxInProgressTransacions){ // no more transaction allowed
return null;
}
if (counts.getInProgress() > 0 && last.getEndPosition() == null && last.isInProgress()){ // the last one is in-progress and is open
return null;
}
if (transaction.getStartPosition() == null){ // the client has nothing in mind, so propose a new one
return newNextTransaction(last, transaction.getProcessorId(), transaction.getTimeout());
}else{ // try to start the transaction requested by the client
if (last == null || last.getTransactionId().equals(previousTransactionId)
|| previousTransactionId == null && DUMMY_FIRST_TRANSACTION_ID.equals(last.getTransactionId()) ){
// start the requested one
SimpleSequentialTransaction newTrans = SimpleSequentialTransaction.copyOf(transaction);
newTrans.setAttempts(1);
newTrans.setStartTime(Instant.now());
newTrans.setFinishTime(null);
newTrans.setState(SequentialTransactionState.IN_PROGRESS);
String transactionId = newTrans.getTransactionId();
if (transactionId == null){
newTrans.setTransactionId(newUniqueTransactionId());
}else{
Validate.notBlank(transactionId, "Transaction ID cannot be blank: %s", transactionId);
if (transactions.stream().anyMatch(t->t.getTransactionId().equals(transactionId))){
throw new DuplicatedTransactionIdException("Transaction ID '" + transactionId + "' is duplicated");
}
}
try {
SequentialTransactionEntity createdEntity = createNewTransaction(seriesId, previousTransactionId, previousTransactionEndPosition, newTrans);
logger.debug("Created new transaction '{}' after '{}'. tryStartAnyFailed: {}, getRecent: {}, createNew(succeeded): {}",
createdEntity.keysToString(), previousTransactionId,
DurationFormatter.format(finishStartAnyFailedTime-startTime),
DurationFormatter.format(finishGetRecentTime-finishStartAnyFailedTime),
DurationFormatter.formatSince(finishGetRecentTime));
return createdEntity.toSequentialTransaction();
} catch (IllegalStateException e) { // the last one is no longer the last
logger.debug("Transaction '{}/{}' is no longer the last. tryStartAnyFailed: {}, getRecent: {}, createNew(failed): {}",
seriesId, previousTransactionId,
DurationFormatter.format(finishStartAnyFailedTime-startTime),
DurationFormatter.format(finishGetRecentTime-finishStartAnyFailedTime),
DurationFormatter.formatSince(finishGetRecentTime));
SequentialTransactionEntity latestLast = null;
try{
latestLast = fetchLastTransactionEntity(seriesId);
return newNextTransactionOrNull(latestLast, transaction.getProcessorId(), transaction.getTimeout());
}catch(Exception e1){
throw new TransactionStorageInfrastructureException("Failed to fetch latest last transaction in series '" + seriesId + "'", e1);
}
} catch (StorageException e){
throw new TransactionStorageInfrastructureException("Failed to create after the last one with ID '" + previousTransactionId + "' a new transaction: " + newTrans, e);
}
}else{
// propose a new one
return newNextTransaction(last, transaction.getProcessorId(), transaction.getTimeout());
}
}
}
/**
* Create an instance of SequentialTransaction that is the next to a specified one, or return null if it is not possible
* @param previous the previous transaction which should be the last in the series
* @param processorId ID of the processor
* @param timeout time out of the to be created transaction
* @return a new transaction, or null if previous is null or has a null end position
*/
protected SequentialTransaction newNextTransactionOrNull(ReadOnlySequentialTransaction previous, String processorId, Instant timeout){
if (previous == null || previous.getEndPosition() == null){
return null;
}else if (DUMMY_FIRST_TRANSACTION_ID.equals(previous.getTransactionId())){
return new SimpleSequentialTransaction(null, processorId, null, timeout);
}else{
return new SimpleSequentialTransaction(previous.getTransactionId(), processorId, previous.getEndPosition(), timeout);
}
}
/**
* Create an instance of SequentialTransaction that is the next to a specified one
* @param previous the previous transaction which should be the last in the series
* @param processorId ID of the processor
* @param timeout time out of the to be created transaction
* @return a new transaction
*/
protected SequentialTransaction newNextTransaction(ReadOnlySequentialTransaction previous, String processorId, Instant timeout){
if (previous == null || previous.getEndPosition() == null ||
DUMMY_FIRST_TRANSACTION_ID.equals(previous.getTransactionId())){
return new SimpleSequentialTransaction(null, processorId, null, timeout);
}else {
return new SimpleSequentialTransaction(previous.getTransactionId(), processorId, previous.getEndPosition(), timeout);
}
}
protected SequentialTransactionEntity createNewTransaction(String seriesId, String lastTransactionId, String previousTransactionEndPosition, SimpleSequentialTransaction newTrans) throws IllegalStateException, StorageException, TransactionStorageInfrastructureException{
CloudTable table = getTableReference();
SequentialTransactionEntity last = null;
if (lastTransactionId == null){ // the first one
last = fetchEntity(seriesId, DUMMY_FIRST_TRANSACTION_ID);
if (last == null){ // the actual first
// we must create a dummy last one for concurrency control
last = new SequentialTransactionEntity();
last.setSeriesId(seriesId);
last.setTransactionId(DUMMY_FIRST_TRANSACTION_ID);
last.setFirstTransaction();
last.setLastTransaction();
last.setState(SequentialTransactionState.FINISHED);
last.setStartTime(Instant.ofEpochMilli(0));
last.setFinishTime(Instant.ofEpochMilli(0));
try{
table.execute(TableOperation.insert(last));
}catch(StorageException e){
if (e.getHttpStatusCode() == 409 && StorageErrorCodeStrings.ENTITY_ALREADY_EXISTS.equals(e.getErrorCode())){ // someone is faster
throw new IllegalStateException("A new transaction is now the last one");
}else{
throw e;
}
}
}else{ // previously a first transaction aborted, left the dummy first one there
if (!last.isLastTransaction()){
throw new IllegalStateException("The transaction in series '" + seriesId + "' is no longer the last one: " + lastTransactionId);
}
}
}else{
last = fetchEntity(seriesId, lastTransactionId);
if (last == null || !last.isLastTransaction()){
throw new IllegalStateException("The transaction in series '" + seriesId + "' is no longer the last one: " + lastTransactionId);
}
if (!StringUtils.equals(previousTransactionEndPosition, last.getEndPosition())){
throw new IllegalStateException("The transaction in series '" + seriesId + "' has changed its end position from '"
+ previousTransactionEndPosition + "' to '" + last.getEndPosition() + "': " + lastTransactionId);
}
}
SequentialTransactionEntity next = SequentialTransactionEntity.fromSequentialTransaction(seriesId, newTrans, last.getTransactionId(), null);
last.setNextTransactionId(next.getTransactionId());
next.setPreviousTransactionId(last.getTransactionId());
next.setLastTransaction();
// do in a transaction: update the last, and insert the new one
TableBatchOperation batchOperation = new TableBatchOperation();
batchOperation.add(TableOperation.merge(last));
batchOperation.add(TableOperation.insert(next));
try{
table.execute(batchOperation);
}catch(StorageException e){
if (ENTITY_HAS_BEEN_DELETED_OR_MODIFIED_BY_OTHERS.test(e)){
throw new IllegalStateException("The transaction is no longer the last one: " + last.keysToString());
}else{
throw e;
}
}
return next;
// TODO: check the duplicated keys exception
}
@Override
public SequentialTransaction startAnyFailedTransaction(String seriesId, String processorId, Instant timeout, int maxInProgressTransacions, int maxRetryingTransactions)
throws TransactionStorageInfrastructureException{
Validate.notNull(seriesId, "Series ID cannot be null");
Validate.notNull(processorId, "Processor ID cannot be null");
Validate.notNull(timeout, "Transaction time out cannot be null");
Validate.isTrue(maxInProgressTransacions > 0, "Maximum number of in-progress transactions must be greater than zero: %d", maxInProgressTransacions);
Validate.isTrue(maxRetryingTransactions > 0, "Maximum number of retrying transactions must be greater than zero: %d", maxRetryingTransactions);
Validate.isTrue(maxInProgressTransacions >= maxRetryingTransactions, "Maximum number of in-progress transactions must not be less than the maximum number of retrying transactions: %d, %d", maxInProgressTransacions, maxRetryingTransactions);
try{
return new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfException(IllegalTransactionStateException.class)
.retryIfException(NoSuchTransactionException.class)
.callThrowingSuppressed(()->{return doStartAnyFailedTransaction(seriesId, processorId, timeout, maxInProgressTransacions, maxRetryingTransactions);});
}catch(Exception e){
throw new TransactionStorageInfrastructureException("Failed to start failed transaction for retrying: " + seriesId, e);
}
}
protected SequentialTransaction doStartAnyFailedTransaction(String seriesId, String processorId, Instant timeout, int maxInProgressTransacions, int maxRetryingTransactions)
throws TransactionStorageInfrastructureException, IllegalTransactionStateException, NoSuchTransactionException, Exception{
List<? extends ReadOnlySequentialTransaction> transactions = getRecentTransactionsIncludingDummy(seriesId);
TransactionCounts counts = SequentialTransactionsCoordinator.getTransactionCounts(transactions);
if (counts.getInProgress() >= maxInProgressTransacions){ // no more transaction allowed
return null;
}
if (counts.getRetrying() < maxRetryingTransactions && counts.getFailed() > 0){ // always first try to pick up a failed to retry
List<? extends ReadOnlySequentialTransaction> allFailed = transactions.stream().filter(tx->tx.isFailed()).collect(Collectors.toList());
for (ReadOnlySequentialTransaction failed: allFailed){
ReadOnlySequentialTransaction tx = failed;
String transactionKey = AzureStorageUtility.keysToString(seriesId, tx.getTransactionId());
AtomicReference<SequentialTransaction> startedTx = new AtomicReference<>(null);
try {
new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfException(ENTITY_HAS_BEEN_DELETED_OR_MODIFIED_BY_OTHERS)
.runThrowingSuppressed(()->modifyTransaction(seriesId, tx.getProcessorId(), tx.getTransactionId(),
entity->entity.getAttempts() == tx.getAttempts() && entity.retry(processorId, timeout), entity->{
CloudTable table = getTableReference();
table.execute(TableOperation.replace(entity));
startedTx.set(entity.toSequentialTransaction());
}));
return startedTx.get();
} catch (TransactionStorageInfrastructureException e){
throw e;
} catch (IllegalTransactionStateException | NoSuchTransactionException | NotOwningTransactionException e){ // picked up by someone else
continue; // try next one
} catch (Exception e){ // only possible: StorageException|RuntimeException
throw new TransactionStorageInfrastructureException("Failed to update transaction entity for retry: " + transactionKey, e);
}
}
}
return null; // no transaction can be started for retrying
}
@Override
public void finishTransaction(String seriesId, String processorId,
String transactionId, String endPosition) throws NotOwningTransactionException,
TransactionStorageInfrastructureException, IllegalTransactionStateException,
NoSuchTransactionException, IllegalEndPositionException {
//Validate.notNull(seriesId, "Series ID cannot be null");
Validate.notNull(processorId, "Processor ID cannot be null");
Validate.notNull(transactionId, "Transaction ID cannot be null");
try {
AtomicReference<String> updatedEndPosition = new AtomicReference<>(null);
new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfException(ENTITY_HAS_BEEN_DELETED_OR_MODIFIED_BY_OTHERS)
.runThrowingSuppressed(()->modifyTransaction(seriesId, processorId, transactionId,
entity->{
updatedEndPosition.set(entity.getEndPosition());
if (endPosition != null){
if (entity.isLastTransaction()){
updatedEndPosition.set(endPosition);
}else{
if (!endPosition.equals(entity.getEndPosition())){
// can't change the end position of a non-last transaction
String transactionKey = AzureStorageUtility.keysToString(entity);
throw new IllegalEndPositionException("Cannot change transaction end position from '" + entity.getEndPosition() + "' to '" + endPosition + "' because it is not the last transaction: " + transactionKey);
}
}
}
if (updatedEndPosition.get() == null){
// cannot finish an open transaction
String transactionKey = AzureStorageUtility.keysToString(entity);
throw new IllegalEndPositionException("Cannot finish transaction with a null end position: " + transactionKey);
}
return entity.finish();
}, entity->{
entity.setEndPosition(updatedEndPosition.get());
CloudTable table = getTableReference();
try{
table.execute(TableOperation.replace(entity));
}catch(StorageException e){
if (e.getHttpStatusCode() == 404){
throw new IllegalTransactionStateException("Transaction may already have been timed out or finished and then have been deleted: " + entity.keysToString());
}else{
throw e;
}
}
}));
} catch (NotOwningTransactionException | TransactionStorageInfrastructureException | IllegalTransactionStateException | IllegalEndPositionException | NoSuchTransactionException e){
throw e;
} catch (Exception e){ // only possible: StorageException|RuntimeException
throw new TransactionStorageInfrastructureException("Failed to update transaction entity state to " + SequentialTransactionState.FINISHED + ": " + AzureStorageUtility.keysToString(seriesId, transactionId), e);
}
}
@Override
public void abortTransaction(String seriesId, String processorId,
String transactionId) throws NotOwningTransactionException,
TransactionStorageInfrastructureException, IllegalTransactionStateException,
NoSuchTransactionException {
//Validate.notNull(seriesId, "Series ID cannot be null");
Validate.notNull(processorId, "Processor ID cannot be null");
Validate.notNull(transactionId, "Transaction time out cannot be null");
try {
new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfException(ENTITY_HAS_BEEN_DELETED_OR_MODIFIED_BY_OTHERS)
.runThrowingSuppressed(()->modifyTransaction(seriesId, processorId, transactionId,
entity->entity.abort(), entity->{
CloudTable table = getTableReference();
table.execute(TableOperation.replace(entity));
}));
} catch (NotOwningTransactionException | TransactionStorageInfrastructureException | IllegalTransactionStateException | NoSuchTransactionException e){
throw e;
} catch (Exception e){ // only possible: StorageException|RuntimeException
throw new TransactionStorageInfrastructureException("Failed to update transaction entity state to " + SequentialTransactionState.ABORTED + ": " + AzureStorageUtility.keysToString(seriesId, transactionId), e);
}
}
@Override
public void updateTransaction(String seriesId, String processorId,
String transactionId, String endPosition, Instant timeout, Serializable detail)
throws NotOwningTransactionException, TransactionStorageInfrastructureException,
IllegalTransactionStateException, NoSuchTransactionException, IllegalEndPositionException {
//Validate.notNull(seriesId, "Series ID cannot be null");
Validate.notNull(processorId, "Processor ID cannot be null");
Validate.isTrue(endPosition != null || timeout != null || detail != null, "End position, time out, and detail cannot all be null");
try {
new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfException(ENTITY_HAS_BEEN_DELETED_OR_MODIFIED_BY_OTHERS)
.runThrowingSuppressed(()->modifyTransaction(seriesId, processorId, transactionId,
entity->entity.isInProgress(),
entity->{
if (endPosition != null){
if (endPosition.equals(entity.getEndPosition())){
// do nothing
}else if (entity.isLastTransaction()){
entity.setEndPosition(endPosition);
}else{
String transactionKey = AzureStorageUtility.keysToString(entity);
throw new IllegalEndPositionException("Cannot change transaction end position from '" + entity.getEndPosition() + "' to '" + endPosition + "' because it is not the last transaction: " + transactionKey);
}
}
if (timeout != null){
entity.setTimeout(timeout);
}
if (detail != null){
entity.setDetail(detail);
}
CloudTable table = getTableReference();
table.execute(TableOperation.replace(entity));
}));
} catch (NotOwningTransactionException | TransactionStorageInfrastructureException | IllegalTransactionStateException | NoSuchTransactionException | IllegalEndPositionException e){
throw e;
} catch (Exception e){ // only possible: StorageException|RuntimeException
throw new TransactionStorageInfrastructureException("Failed to update transaction entity with keys: " + AzureStorageUtility.keysToString(seriesId, transactionId), e);
}
}
protected void modifyTransaction(String seriesId, String processorId, String transactionId,
PredicateThrowsExceptions<SequentialTransactionEntity> stateChecker, ConsumerThrowsExceptions<SequentialTransactionEntity> updater)
throws NotOwningTransactionException, TransactionStorageInfrastructureException,
IllegalTransactionStateException, NoSuchTransactionException, StorageException {
// update a single transaction
String transactionKey = AzureStorageUtility.keysToString(seriesId, transactionId);
SequentialTransactionEntity entity = null;
try {
if (seriesId == null){
entity = fetchEntity(transactionId);
seriesId = entity.getPartitionKey();
transactionKey = AzureStorageUtility.keysToString(seriesId, transactionId);
}else{
entity = fetchEntity(seriesId, transactionId);
}
} catch (StorageException e) {
throw new TransactionStorageInfrastructureException("Failed to fetch transaction entity with keys: " + transactionKey, e);
}
if (entity == null){
throw new NoSuchTransactionException("Transaction either does not exist or have succeeded and later been purged: " + transactionKey);
}
if (processorId != null && !processorId.equals(entity.getProcessorId())){
throw new NotOwningTransactionException("Transaction is currently owned by processor '" + entity.getProcessorId() + "', not '" + processorId + "': " + transactionKey);
}
if (ExceptionUncheckUtility.testThrowingUnchecked(stateChecker, entity)){
ExceptionUncheckUtility.acceptThrowingUnchecked(updater, entity);
}else{
throw new IllegalTransactionStateException("Transaction is currently in " + entity.getState() + " state:" + transactionKey);
}
}
@Override
public boolean isTransactionSuccessful(String seriesId, String transactionId)
throws TransactionStorageInfrastructureException {
//Validate.notNull(seriesId, "Series ID cannot be null");
Validate.notNull(transactionId, "Transaction id cannot be null");
// try cached first
/* this check is for current VM only
if (lastSucceededTransactionCached != null){
if (!beforeWhen.isAfter(lastSucceededTransactionCached.getFinishTime())){
return true;
}
}
*/
SequentialTransactionEntity entity;
try {
entity = seriesId == null? fetchEntity(transactionId) : fetchEntity(seriesId, transactionId);
return entity == null || entity.isFinished();
} catch (StorageException e) {
throw new TransactionStorageInfrastructureException("Failed to fetch transaction entity: " + AzureStorageUtility.keysToString(seriesId, transactionId), e);
}
/*
// reload
List<? extends ReadOnlySequentialTransaction> transactions = getRecentTransactionsIncludingDummy(seriesId);
if (lastSucceededTransactionCached != null){
if (!beforeWhen.isAfter(lastSucceededTransactionCached.getFinishTime())){
return true;
}
}
// check in recent transactions
Optional<? extends ReadOnlySequentialTransaction> matched = transactions.stream().filter(tx -> tx.getTransactionId().equals(transactionId)).findAny();
if (matched.isPresent()){
return matched.get().isFinished();
}
return true; // must be a very old succeeded transaction
*/
}
protected List<? extends ReadOnlySequentialTransaction> getRecentTransactionsIncludingDummy(
String seriesId) throws TransactionStorageInfrastructureException {
LinkedList<SequentialTransactionWrapper> transactionEntities = new LinkedList<>();
//AtomicInteger attempts = new AtomicInteger(0);
try{
new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfResultEquals(Boolean.FALSE) // retry until consistent but may be not up to date
.callThrowingAll(()->{
//attempts.incrementAndGet();
// get entities by seriesId
Map<String, SequentialTransactionWrapper> wrappedTransactionEntities = fetchEntities(seriesId, true);
transactionEntities.clear();
transactionEntities.addAll(toList(wrappedTransactionEntities));
// compact the list
return compact(transactionEntities);
});
}catch(TransactionStorageInfrastructureException e){
throw e;
}catch(Exception e){
throw new TransactionStorageInfrastructureException("Failed to fetch recent transactions for series '" + seriesId + "'", e);
}
//logger.debug("Attempted {} times for getting a consistent snapshot of recent transactions in series: {}", attempts.get(), seriesId);
return transactionEntities.stream().map(SequentialTransactionWrapper::getTransactionNotNull).collect(Collectors.toList());
}
@Override
public List<? extends ReadOnlySequentialTransaction> getRecentTransactions(
String seriesId) throws TransactionStorageInfrastructureException {
Validate.notNull(seriesId, "Series ID cannot be null");
List<? extends ReadOnlySequentialTransaction> transactions = getRecentTransactionsIncludingDummy(seriesId);
if (transactions.size() > 0){
if(DUMMY_FIRST_TRANSACTION_ID.equals(transactions.get(0).getTransactionId())){
transactions.remove(0);
}
}
return transactions;
}
@Override
public void clear(String seriesId) throws TransactionStorageInfrastructureException {
Validate.notNull(seriesId, "Series ID cannot be null");
// delete entities by seriesId
try{
CloudTable table = getTableReference();
AzureStorageUtility.deleteEntitiesIfExistsInBatches(table,
TableQuery.generateFilterCondition(
AzureStorageUtility.PARTITION_KEY,
QueryComparisons.EQUAL,
seriesId));
logger.debug("Deleted all transactions in series '{}' in table: {}", seriesId, table.getName());
}catch(Exception e){
throw new TransactionStorageInfrastructureException("Failed to delete entities belonging to series '" + seriesId + "' in table: " + tableName, e);
}
}
@Override
public void clearAll() throws TransactionStorageInfrastructureException {
// delete all entities
try{
CloudTable table = getTableReference();
AzureStorageUtility.deleteEntitiesIfExistsInBatches(table, (String)null);
logger.debug("Deleted all transactions in all series in table: {}", table.getName());
}catch(Exception e){
throw new TransactionStorageInfrastructureException("Failed to delete all entities in table: " + tableName, e);
}
}
protected CloudTable getTableReference() throws TransactionStorageInfrastructureException{
CloudTable table;
try {
table = tableClient.getTableReference(tableName);
} catch (Exception e) {
throw new TransactionStorageInfrastructureException("Failed to get reference for table: '" + tableName + "'", e);
}
if (!tableExists){
try {
if (AzureStorageUtility.createIfNotExists(tableClient, tableName)){
logger.debug("Created table: {}", tableName);
}
} catch (Exception e) {
throw new TransactionStorageInfrastructureException("Failed to ensure the existence of table: '" + tableName + "'", e);
}
tableExists = true;
}
return table;
}
/**
* Remove succeeded from the head (but due to concurrency, there may still be some left),
* transit those timed out to TIMED_OUT state,
* and remove the last transaction if it is a failed one with a null end position.
* @return true if data is consistent, false if data needs to be reloaded due to concurrency
* @param transactionEntities The list of transaction entities. The list may be changed inside this method.
* @throws TransactionStorageInfrastructureException if failed to update entities during the compact process
*/
protected boolean compact(LinkedList<SequentialTransactionWrapper> transactionEntities) throws TransactionStorageInfrastructureException{
// remove finished historical transactions and leave only one of them
int finished = 0; // 0 - no successful; 1 - one successful; 2 - two successful; ...
Iterator<SequentialTransactionWrapper> iterator = transactionEntities.iterator();
if (iterator.hasNext()){
SequentialTransactionWrapper wrapper;
wrapper = iterator.next();
wrapper.updateFromEntity();
if (wrapper.getTransaction().isFinished()){
finished ++;
while(iterator.hasNext()){
wrapper = iterator.next();
wrapper.updateFromEntity();
if (wrapper.getTransaction().isFinished()){
finished ++;
}else{
break;
}
}
}
}
// update lastSucceededTransactionCached
if (finished > 0){
this.lastSucceededTransactionCached = SimpleSequentialTransaction.copyOf(
transactionEntities.get(finished - 1).getTransaction());
}
// purge historical finished
CloudTable table = getTableReference();
while (finished
SequentialTransactionWrapper first = transactionEntities.getFirst();
SequentialTransactionWrapper second = first.next;
// do in a transaction: remove the first one, and update the second one
TableBatchOperation batchOperation = new TableBatchOperation();
batchOperation.add(TableOperation.delete(first.getEntity()));
second.setFirstTransaction();
batchOperation.add(TableOperation.replace(second.getEntity()));
try{
table.execute(batchOperation);
}catch(StorageException e){
if (e.getHttpStatusCode() == 404){ // the first or the second had been deleted by others
// safe to keep them in memory for now
transactionEntities.removeFirst();
break;
}
throw new TransactionStorageInfrastructureException("Failed to remove succeeded transaction entity with keys '" + first.entityKeysToString()
+ "' and make the next entity with keys '" + second.entityKeysToString()
+ "' the new first one.", e);
}
transactionEntities.removeFirst();
}
// handle time out
List<SequentialTransactionWrapper> alreadyDeleted = new LinkedList<>(); // those time out and already been deleted by others
for (SequentialTransactionWrapper wrapper: transactionEntities){
try{
if (!applyTimeout(wrapper)){
alreadyDeleted.add(wrapper);
}
}catch(StorageException e){
throw new TransactionStorageInfrastructureException("Failed to update timed out transaction entity with keys '" + wrapper.entityKeysToString()
+ "', probably it has been modified by another client.", e);
}
}
// try to remove those already deleted, but don't leave holes
while(transactionEntities.size() > 0 && alreadyDeleted.remove(transactionEntities.getFirst())){
transactionEntities.removeFirst();
}
while(transactionEntities.size() > 0 && alreadyDeleted.remove(transactionEntities.getLast())){
transactionEntities.removeLast();
}
if (alreadyDeleted.size() > 0){
return false; // needs a full reload
}
// if the last transaction is failed and is open, remove it
if (transactionEntities.size() > 0){
SequentialTransactionWrapper wrapper = transactionEntities.getLast();
SimpleSequentialTransaction tx = wrapper.getTransactionNotNull();
if (tx.isFailed() && tx.getEndPosition() == null){
// do in a batch: remove the last one, make the previous one the last
TableBatchOperation batchOperation = new TableBatchOperation();
batchOperation.add(TableOperation.delete(wrapper.getEntity()));
SequentialTransactionWrapper previousWrapper = wrapper.getPrevious();
if (previousWrapper != null){
previousWrapper.setLastTransaction();
batchOperation.add(TableOperation.replace(previousWrapper.getEntity()));
}
try {
table.execute(batchOperation);
} catch (StorageException e) {
if (e.getHttpStatusCode() != 404){ // ignore if someone already did the job
throw new TransactionStorageInfrastructureException("Failed to delete failed open range transaction entity with keys '" + wrapper.entityKeysToString()
+ "', probably it has been modified by another client.", e);
}
}
transactionEntities.removeLast();
}
}
return true;
}
protected boolean applyTimeout(SequentialTransactionWrapper wrapper) throws StorageException, IllegalStateException, TransactionStorageInfrastructureException{
CloudTable table = getTableReference();
AtomicBoolean needsReload = new AtomicBoolean(false);
return ExceptionUncheckUtility.getThrowingUnchecked(()->{
return new AttemptStrategy(attemptStrategy)
.overrideBackoffStrategy(BackoffStrategies.noBackoff())
.retryIfException(StorageException.class, e-> {
if (ENTITY_HAS_BEEN_MODIFIED_BY_OTHERS.test(e)){
needsReload.set(true);
return true;
}else{
return false;
}
})
.callThrowingSuppressed(()->{
if (needsReload.get()){
DynamicTableEntity reloaded = fetchDynamicEntity(wrapper.getSeriesId(), wrapper.getEntityTransactionId());
if (reloaded == null){
return false;
}
wrapper.setEntity(reloaded);
wrapper.updateFromEntity();
needsReload.set(false);
}
SimpleSequentialTransaction tx = wrapper.getTransactionNotNull();
if (tx.isInProgress() && tx.getTimeout().isBefore(Instant.now())){
if (tx.timeout()){
wrapper.updateToEntity();
try{
table.execute(TableOperation.replace(wrapper.getEntity()));
logger.debug("Marked transaction '{}/{}' owned by process '{}' as already timed out at {}",
wrapper.getSeriesId(), tx.getTransactionId(), tx.getProcessorId(), tx.getTimeout());
}catch(StorageException e){
if (e.getHttpStatusCode() == 404){
return false;
}else{
throw e;
}
}
}else{
throw new IllegalStateException("Transaction '" + tx.getTransactionId() + "' in series '" + wrapper.getSeriesId()
+ "' is currently in " + tx.getState() + " state and cannot be changed to TIMED_OUT state");
}
}
return true;
});
});
}
/**
* Fetch the transaction entity by seriesId and transactionId
* @param seriesId the ID of the series that the transaction belongs to
* @param transactionId the ID of the transaction
* @return the transaction entity or null if not found
* @throws TransactionStorageInfrastructureException if failed to get table reference
* @throws StorageException if other underlying error happened
*/
protected SequentialTransactionEntity fetchEntity(String seriesId, String transactionId) throws TransactionStorageInfrastructureException, StorageException{
CloudTable table = getTableReference();
SequentialTransactionEntity entity = null;
try{
entity = table.execute(TableOperation.retrieve(seriesId, transactionId, SequentialTransactionEntity.class)).getResultAsType();
}catch(StorageException e){
if (e.getHttpStatusCode() != 404){
throw e;
}
}
return entity;
}
/**
* Fetch the transaction entity by transactionId only
* @param transactionId the ID of the transaction
* @return the transaction entity or null if not found
* @throws TransactionStorageInfrastructureException if failed to get table reference
*/
protected SequentialTransactionEntity fetchEntity(String transactionId) throws TransactionStorageInfrastructureException{
CloudTable table = getTableReference();
SequentialTransactionEntity entity = null;
entity = AzureStorageUtility.retrieveByRowKey(table, transactionId, SequentialTransactionEntity.class);
return entity;
}
/**
* Fetch the transaction entity as DynamicTableEntity by seriesId and transactionId
* @param seriesId the ID of the series that the transaction belongs to
* @param transactionId the ID of the transaction
* @return the transaction entity as DynamicTableEntity or null if not found
* @throws TransactionStorageInfrastructureException if failed to get table reference
* @throws StorageException if other underlying error happened
*/
protected DynamicTableEntity fetchDynamicEntity(String seriesId, String transactionId) throws TransactionStorageInfrastructureException, StorageException{
CloudTable table = getTableReference();
DynamicTableEntity entity = null;
try{
entity = table.execute(TableOperation.retrieve(seriesId, transactionId, DynamicTableEntity.class)).getResultAsType();
}catch(StorageException e){
if (e.getHttpStatusCode() != 404){
throw e;
}
}
return entity;
}
/**
* Fetch the transaction entity as DynamicTableEntity by transactionId only
* @param transactionId the ID of the transaction
* @return the transaction entity as DynamicTableEntity or null if not found
* @throws TransactionStorageInfrastructureException if failed to get table reference
*/
protected DynamicTableEntity fetchDynamicEntity(String transactionId) throws TransactionStorageInfrastructureException{
CloudTable table = getTableReference();
DynamicTableEntity entity = null;
entity = AzureStorageUtility.retrieveByRowKey(table, transactionId);
return entity;
}
protected SequentialTransactionEntity fetchLastTransactionEntity(String seriesId) throws TransactionStorageInfrastructureException, StorageException{
CloudTable table = getTableReference();
SequentialTransactionEntity result = null;
TableQuery<SequentialTransactionEntity> query = TableQuery.from(SequentialTransactionEntity.class).
where(TableQuery.combineFilters(
TableQuery.generateFilterCondition(
AzureStorageUtility.PARTITION_KEY,
QueryComparisons.EQUAL,
seriesId),
TableQuery.Operators.AND,
TableQuery.generateFilterCondition(
"Next",
QueryComparisons.EQUAL,
"")
));
for (SequentialTransactionEntity entity: table.execute(query)){
if (result != null){
throw new TransactionStorageInfrastructureException("Corrupted data for series '" + seriesId + "' in table " + tableName
+ ", there are at least two last transactions: " + result.keysToString() + ", " + entity.keysToString());
}
result = entity;
}
return result;
}
/**
* Fetch all entities belonging to a series into a map of SequentialTransactionEntityWrapper indexed by transaction ID
* @param seriesId the ID of the series
* @param putAdditionalFirstTransactionEntry When true, one additional entry will be put into the result map. The entry will have a key of null,
* and the value will be the wrapper of the first transaction.
* @return a map of SequentialTransactionEntityWrapper indexed by transaction ID, if putAdditionalFirstTransactionEntry argument is true
* there will be one more additional entry for the first transaction.
* @throws TransactionStorageInfrastructureException if failed to fetch entities
*/
protected Map<String, SequentialTransactionWrapper> fetchEntities(String seriesId, boolean putAdditionalFirstTransactionEntry) throws TransactionStorageInfrastructureException{
// fetch entities by seriesId
Map<String, SequentialTransactionWrapper> map = new HashMap<>();
try{
CloudTable table = getTableReference();
TableQuery<DynamicTableEntity> query = TableQuery.from(DynamicTableEntity.class).
where(TableQuery.generateFilterCondition(
AzureStorageUtility.PARTITION_KEY,
QueryComparisons.EQUAL,
seriesId));
for (DynamicTableEntity entity: table.execute(query)){
SequentialTransactionWrapper wrapper = new SequentialTransactionWrapper(entity);
/*
if (wrapper.isFirstTransaction()){
wrapper.setFirstTransaction();
}
if (wrapper.isLastTransaction()){
wrapper.setLastTransaction();
}*/
map.put(wrapper.getEntity().getRowKey(), wrapper); // indexed by transaction id
}
}catch(Exception e){
throw new TransactionStorageInfrastructureException("Failed to fetch entities belonging to series '" + seriesId + "' in table " + tableName, e);
}
// sanity check
int numFirst = 0;
int numLast = 0;
for (SequentialTransactionWrapper wrapper: map.values()){
if (wrapper.isFirstTransaction()){
numFirst ++;
}
if (wrapper.isLastTransaction()){
numLast ++;
}
}
if (!(numFirst == 0 && numLast == 0 || numFirst == 1 && numLast == 1)){
String errorMsg = "Corrupted data for series '" + seriesId + "' in table " + tableName
+ ", number of first transaction(s): " + numFirst + ", number of last transaction(s): " + numLast;
logger.error(errorMsg + ", transactions: {}", toList(map));
throw new TransactionStorageInfrastructureException(errorMsg);
}
// link them up, with sanity check
SequentialTransactionWrapper first = null;
for (SequentialTransactionWrapper wrapper: map.values()){
if (wrapper.isFirstTransaction()){
first = wrapper;
}else{
String previousId = wrapper.getPreviousTransactionId();
SequentialTransactionWrapper previous = map.get(previousId);
if (previous == null){
throw new TransactionStorageInfrastructureException("Corrupted data for series '" + seriesId + "' in table " + tableName
+ ", previous transaction ID '" + previousId + "' of transaction '" + wrapper.getEntity().getRowKey() + "' cannot be found");
}
wrapper.setPrevious(previous);
}
if (!wrapper.isLastTransaction()){
String nextId = wrapper.getNextTransactionId();
SequentialTransactionWrapper next = map.get(nextId);
if (next == null){
throw new TransactionStorageInfrastructureException("Corrupted data for series '" + seriesId + "' in table " + tableName
+ ", next transaction ID '" + nextId + "' of transaction '" + wrapper.getEntity().getRowKey() + "' cannot be found");
}
wrapper.setNext(next);
}
}
if (putAdditionalFirstTransactionEntry && first != null){
map.put(null, first);
}
return map;
}
/**
* Convert the map of SequentialTransactionEntityWrapper into a list of SequentialTransactionWrapper.
* @param map the map returned by {@link #fetchEntities(String, boolean)}
* @return The list, with oldest transaction as the first element and latest transaction as the last element
*/
protected LinkedList<SequentialTransactionWrapper> toList(Map<String, SequentialTransactionWrapper> map){
LinkedList<SequentialTransactionWrapper> list = new LinkedList<>();
SequentialTransactionWrapper first = map.get(null);
if (first == null){
Optional<SequentialTransactionWrapper> optionalFirst =
map.values().stream().filter(wrapper -> wrapper.isFirstTransaction()).findFirst();
first = optionalFirst.orElse(null);
}
if (first != null){
SequentialTransactionWrapper wrapper = first;
do{
list.add(wrapper);
wrapper = wrapper.getNext();
}while(wrapper != null);
}
return list;
}
}
|
package nl.wiegman.homecontrol.services.service;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import net.fortuna.ical4j.data.CalendarBuilder;
import net.fortuna.ical4j.data.ParserException;
import net.fortuna.ical4j.model.Calendar;
import net.fortuna.ical4j.model.component.CalendarComponent;
import net.fortuna.ical4j.model.component.VEvent;
import nl.wiegman.homecontrol.services.apimodel.AfvalInzameling;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.Consts;
import org.apache.http.NameValuePair;
import org.apache.http.client.CookieStore;
import org.apache.http.client.config.CookieSpecs;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.impl.client.BasicCookieStore;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.*;
@Api(value=AfvalInzamelingService.SERVICE_PATH, description="Geeft informatie over de afval inzameling")
@Component
@Path(AfvalInzamelingService.SERVICE_PATH)
public class AfvalInzamelingService {
public static final String SERVICE_PATH = "afvalinzameling";
public static final Map<String, AfvalInzameling.AfvalType> CALENDAR_TO_SERVICE_TYPE_MAP = new HashMap<>();
private static byte[] cachedCalendar = null;
private final Logger logger = LoggerFactory.getLogger(AfvalInzamelingService.class);
static {
CALENDAR_TO_SERVICE_TYPE_MAP.put("Restafval wordt opgehaald", AfvalInzameling.AfvalType.REST);
CALENDAR_TO_SERVICE_TYPE_MAP.put("GFT wordt opgehaald", AfvalInzameling.AfvalType.GFT);
CALENDAR_TO_SERVICE_TYPE_MAP.put("Plastic verpakkingen of PMD wordt opgehaald", AfvalInzameling.AfvalType.PLASTIC);
CALENDAR_TO_SERVICE_TYPE_MAP.put("Inzameling Sallcon wordt opgehaald", AfvalInzameling.AfvalType.SALLCON);
}
@ApiOperation(value = "Geeft de eerstvolgende afval inzameling(en) terug. Als er op de huidige datum inzameling(en) gepland zijn, dan worden deze terug gegeven.")
@GET
@Path("volgende")
@Produces(MediaType.APPLICATION_JSON)
public AfvalInzameling ophalenVolgendeAfvalInzameling() throws IOException, ParserException {
logger.info("start ophalenVolgendeAfvalInzameling()");
AfvalInzameling volgendeInzameling = new AfvalInzameling();
Calendar calendar = getCalendar();
if (calendar != null) {
List<VEvent> firstEventsFromNow = getNextEvents(calendar);
volgendeInzameling.setAfvalTypes(new ArrayList<>());
for (VEvent event : firstEventsFromNow) {
volgendeInzameling.setDatum(event.getStartDate().getDate().getTime());
volgendeInzameling.getAfvalTypes().add(CALENDAR_TO_SERVICE_TYPE_MAP.get(event.getDescription().getValue()));
}
}
logger.info("end ophalenVolgendeAfvalInzameling() result=" + ReflectionToStringBuilder.toString(volgendeInzameling));
return volgendeInzameling;
}
private Calendar getCalendar() {
Calendar result = null;
if (cachedCalendar == null) {
try {
cachedCalendar = downloadLatestCalendar();
} catch (IOException e) {
logger.error("Unable to download the latest afvalkalendar", e);
}
}
if (cachedCalendar != null) {
CalendarBuilder builder = new CalendarBuilder();
try {
result = builder.build(new ByteArrayInputStream(cachedCalendar));
} catch (IOException | ParserException e) {
logger.error("Unable to parse the latest afvalkalendar");
}
} else {
logger.error("Unable to download the latest afvalkalendar");
}
return result;
}
private List<VEvent> getNextEvents(Calendar calendar) {
Date firstDayFromTodayWithAtLeastOneEvent = findFirstDayFromTodayWithAtLeastOneEvent(calendar);
return getAllEventsOnDay(firstDayFromTodayWithAtLeastOneEvent, calendar);
}
private List<VEvent> getAllEventsOnDay(Date day, Calendar calendar) {
List<VEvent> eventsOnDay = new ArrayList<>();
for (CalendarComponent calComp : calendar.getComponents()) {
if (calComp instanceof VEvent) {
VEvent event = (VEvent) calComp;
net.fortuna.ical4j.model.Date eventDate = event.getStartDate().getDate();
if (DateUtils.isSameDay(eventDate, day)) {
eventsOnDay.add(event);
}
}
}
return eventsOnDay;
}
private Date findFirstDayFromTodayWithAtLeastOneEvent(Calendar calendar) {
Date today = new Date();
today = DateUtils.truncate(today, java.util.Calendar.DATE);
VEvent firstEventFromNow = null;
for (CalendarComponent calComp : calendar.getComponents()) {
if (calComp instanceof VEvent) {
VEvent event = (VEvent) calComp;
net.fortuna.ical4j.model.Date eventDate = event.getStartDate().getDate();
if (eventDate.after(today)
&& (firstEventFromNow==null || eventDate.before(firstEventFromNow.getStartDate().getDate()))) {
firstEventFromNow = event;
}
}
}
return firstEventFromNow.getStartDate().getDate();
}
private byte[] downloadLatestCalendar() throws IOException {
byte[] result = null;
// Use custom cookie store if necessary.
CookieStore cookieStore = new BasicCookieStore();
// Create global request configuration
RequestConfig defaultRequestConfig = RequestConfig.custom()
.setCookieSpec(CookieSpecs.DEFAULT)
.setExpectContinueEnabled(true)
.build();
// Create an HttpClient with the given custom dependencies and configuration.
CloseableHttpClient httpclient = HttpClients.custom()
.setDefaultCookieStore(cookieStore)
.setDefaultRequestConfig(defaultRequestConfig)
.build();
try {
// Execution context can be customized locally.
HttpClientContext context = HttpClientContext.create();
// Contextual attributes set the local context level will take
// precedence over those set at the client level.
context.setCookieStore(cookieStore);
HttpPost login = new HttpPost("http://kalender.afvalvrij.nl/Afvalkalender/login.php");
List <NameValuePair> nvps = new ArrayList <NameValuePair>();
nvps.add(new BasicNameValuePair("postcode", "7425 RH"));
nvps.add(new BasicNameValuePair("huisnummer", "71"));
nvps.add(new BasicNameValuePair("toon", ""));
login.setEntity(new UrlEncodedFormEntity(nvps, Consts.UTF_8));
CloseableHttpResponse response = httpclient.execute(login, context);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != 302) {
throw new IOException("Invalid statuscode (expected 302): " + statusCode);
}
HttpGet downloadIcs = new HttpGet("http://kalender.afvalvrij.nl/Afvalkalender/download_ical.php?p=7425%20RH&h=71&t=");
response = httpclient.execute(downloadIcs, context);
try {
statusCode = response.getStatusLine().getStatusCode();
if (statusCode != 200) {
throw new IOException("Invalid statuscode (expected 200): " + statusCode);
}
result = EntityUtils.toByteArray(response.getEntity());
} finally {
response.close();
}
} finally {
httpclient.close();
}
return result;
}
}
|
package io.miti.beetle.processor;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import io.miti.beetle.cache.DBTypeCache;
import io.miti.beetle.cache.UserDBCache;
import io.miti.beetle.dbutil.ConnManager;
import io.miti.beetle.dbutil.Database;
import io.miti.beetle.exporters.CsvDBFileWriter;
import io.miti.beetle.exporters.DBFileWriter;
import io.miti.beetle.exporters.JsonDBFileWriter;
import io.miti.beetle.exporters.MarkdownDBFileWriter;
import io.miti.beetle.exporters.SQLDBFileWriter;
import io.miti.beetle.exporters.TabDBFileWriter;
import io.miti.beetle.exporters.TomlDBFileWriter;
import io.miti.beetle.exporters.XmlDBFileWriter;
import io.miti.beetle.exporters.YamlDBFileWriter;
import io.miti.beetle.model.ContentType;
import io.miti.beetle.model.DbType;
import io.miti.beetle.model.Session;
import io.miti.beetle.model.UserDb;
import io.miti.beetle.util.FakeSpecParser;
import io.miti.beetle.util.Logger;
public final class DataProcessor
{
private Session session = null;
private int runCount = 1;
public DataProcessor() {
super();
}
public DataProcessor(final Session pSession) {
session = pSession;
}
public void setRunCount(final int nRunCount) {
runCount = nRunCount;
}
public void run() {
// Check the session
if (session == null) {
Logger.error("Error: The session is null or invalid");
return;
}
// Check for SQL imports
if (session.getSourceTypeId() == ContentType.SQL.getId()) {
importSQL();
} else if (session.getSourceTypeId() == ContentType.FAKE.getId()) {
saveFakeData();
} else {
Logger.error("Error: Only SQL imports are supported for now; type = " + session.getSourceTypeId());
}
}
public void saveFakeData() {
final ContentType cType = ContentType.getById(session.getTargetTypeId());
if ((cType != ContentType.JSON) && (cType != ContentType.CSV) &&
(cType != ContentType.YAML) && (cType != ContentType.TOML) &&
(cType != ContentType.XML) && (cType != ContentType.SQL_FILE) &&
(cType != ContentType.MARKDOWN) && (cType != ContentType.TSV) &&
(cType != ContentType.JAVA)) {
Logger.error("Only supported export formats: CSV, JSON, YAML, TOML, XML, SQL, TSV, Markdown, Java");
return;
}
// Parse the specification in the source name.
// It should store the list of column names, their class type, and a pointer to
// the function call to generate the fake data for that column
final FakeSpecParser spec = new FakeSpecParser();
if (!spec.parse(session.getSourceName())) {
Logger.error("Invalid specification for fake data");
return;
}
// Configure the data target
final DBFileWriter writer = getFileWriter(cType,
session.getTargetName(), session.getTargetData(), spec);
// Write the header
writer.writeHeader();
// Iterate over the data for exporting
for (int i = 0; i < runCount; ++i) {
// Write out the data
writer.writeObject(spec);
}
// Write the footer
writer.writeFooter();
// Force out any pending data
writer.writeString(true);
}
public void importSQL() {
final ContentType cType = ContentType.getById(session.getTargetTypeId());
if ((cType != ContentType.JSON) && (cType != ContentType.CSV) &&
(cType != ContentType.YAML) && (cType != ContentType.TOML) &&
(cType != ContentType.XML) && (cType != ContentType.SQL_FILE) &&
(cType != ContentType.MARKDOWN) && (cType != ContentType.TSV) &&
(cType != ContentType.JAVA)) {
Logger.error("Only supported export formats: CSV, JSON, YAML, TOML, XML, SQL, TSV, Markdown, Java");
return;
}
// Find the user DB with the specified ID
final UserDb userDb = UserDBCache.get().find(session.getSourceId());
if (userDb == null) {
Logger.error("Error: Invalid database ID in the session");
return;
}
// Make sure the JDBC DB's driver class is loaded
final DbType dbType = DBTypeCache.get().find(userDb.getDbTypeId());
ConnManager.get().addDriverClass(dbType);
// Open a connection to the database
Logger.debug("Initializing the database " + userDb.getUrl());
ConnManager.get().init(userDb.getUrl(), userDb.getUserId(), userDb.getUserPw());
if (!ConnManager.get().create()) {
Logger.error("Unable to connect to database " + userDb.getUrl());
return;
}
// Get the metadata
PreparedStatement stmt = null;
try {
// Prepare the statement
stmt = ConnManager.get().getConn().prepareStatement(session.getSourceName());
// Verify it's not null
if (stmt != null) {
// Execute the statement and check the result
final boolean result = stmt.execute();
if (!result) {
Logger.error("The statement did not execute correctly");
} else {
// Get the result set of executing the query
ResultSet rs = stmt.getResultSet();
// Verify the result set is not null
if (rs != null) {
// Get the metadata
ResultSetMetaData rsmd = rs.getMetaData();
// Configure the data target
final DBFileWriter writer = getFileWriter(cType,
session.getTargetName(), session.getTargetData(), rsmd);
// Write the header
writer.writeHeader();
// Iterate over the data for exporting
Database.executeSelect(rs, writer);
// Write the footer
writer.writeFooter();
// Force out any pending data
writer.writeString(true);
// Close the result set
rs.close();
rs = null;
} else {
Logger.error("The database result set is null");
}
}
// Close the statement
stmt.close();
stmt = null;
} else {
Logger.error("The database statement is null");
}
} catch (SQLException e) {
Logger.error("SQL Exception: " + e.getMessage());
e.printStackTrace();
}
// Close the connection
ConnManager.get().close();
}
private static DBFileWriter getFileWriter(final ContentType cType,
final String outName,
final String outData,
final ResultSetMetaData rsmd) {
// Create the appropriate file writer object
if (cType == ContentType.JSON) {
return new JsonDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.CSV) {
return new CsvDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.TSV) {
return new TabDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.YAML) {
return new YamlDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.TOML) {
return new TomlDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.SQL_FILE) {
return new SQLDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.MARKDOWN) {
return new MarkdownDBFileWriter(outName, outData, rsmd);
} else if (cType == ContentType.XML) {
return new XmlDBFileWriter(outName, outData, rsmd);
} else {
return null;
}
}
private static DBFileWriter getFileWriter(final ContentType cType,
final String outName,
final String outData,
final FakeSpecParser spec) {
// Create the appropriate file writer object
if (cType == ContentType.JSON) {
return new JsonDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.CSV) {
return new CsvDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.TSV) {
return new TabDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.YAML) {
return new YamlDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.TOML) {
return new TomlDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.SQL_FILE) {
return new SQLDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.MARKDOWN) {
return new MarkdownDBFileWriter(outName, outData, spec);
} else if (cType == ContentType.XML) {
return new XmlDBFileWriter(outName, outData, spec);
} else {
return null;
}
}
}
|
package azkaban.executor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Execution options for submitted flows and scheduled flows
*/
public class ExecutionOptions {
public static final String CONCURRENT_OPTION_SKIP="skip";
public static final String CONCURRENT_OPTION_PIPELINE="pipeline";
public static final String CONCURRENT_OPTION_IGNORE="ignore";
private boolean notifyOnFirstFailure = true;
private boolean notifyOnLastFailure = false;
private boolean failureEmailsOverride = false;
private boolean successEmailsOverride = false;
private ArrayList<String> failureEmails = new ArrayList<String>();
private ArrayList<String> successEmails = new ArrayList<String>();
private Integer pipelineLevel = null;
private Integer pipelineExecId = null;
private Integer queueLevel = 0;
private String concurrentOption = CONCURRENT_OPTION_IGNORE;
private Map<String, String> flowParameters = new HashMap<String, String>();
public enum FailureAction {
FINISH_CURRENTLY_RUNNING,
CANCEL_ALL,
FINISH_ALL_POSSIBLE
}
private FailureAction failureAction = FailureAction.FINISH_CURRENTLY_RUNNING;
private Set<String> initiallyDisabledJobs = new HashSet<String>();
public void setFlowParameters(Map<String,String> flowParam) {
flowParameters.get(flowParam);
}
public Map<String,String> getFlowParameters() {
return flowParameters;
}
public void setFailureEmails(Collection<String> emails) {
failureEmails.addAll(emails);
}
public boolean isFailureEmailsOverridden() {
return this.failureEmailsOverride;
}
public boolean isSuccessEmailsOverridden() {
return this.successEmailsOverride;
}
public void setSuccessEmailsOverridden(boolean override) {
this.successEmailsOverride = override;
}
public void setFailureEmailsOverridden(boolean override) {
this.failureEmailsOverride = override;
}
public List<String> getFailureEmails() {
return failureEmails;
}
public void setSuccessEmails(Collection<String> emails) {
successEmails.addAll(emails);
}
public List<String> getSuccessEmails() {
return successEmails;
}
public boolean getNotifyOnFirstFailure() {
return notifyOnFirstFailure;
}
public boolean getNotifyOnLastFailure() {
return notifyOnLastFailure;
}
public void setNotifyOnFirstFailure(boolean notify) {
this.notifyOnFirstFailure = notify;
}
public void setNotifyOnLastFailure(boolean notify) {
this.notifyOnLastFailure = notify;
}
public FailureAction getFailureAction() {
return failureAction;
}
public void setFailureAction(FailureAction action) {
failureAction = action;
}
public void setConcurrentOption(String concurrentOption) {
this.concurrentOption = concurrentOption;
}
public String getConcurrentOption() {
return concurrentOption;
}
public Integer getPipelineLevel() {
return pipelineLevel;
}
public Integer getPipelineExecutionId() {
return pipelineExecId;
}
public void setPipelineLevel(Integer level) {
pipelineLevel = level;
}
public void setPipelineExecutionId(Integer id) {
this.pipelineExecId = id;
}
public Integer getQueueLevel() {
return queueLevel;
}
public List<String> getDisabledJobs() {
return new ArrayList<String>(initiallyDisabledJobs);
}
public void setDisabledJobs(List<String> disabledJobs) {
initiallyDisabledJobs = new HashSet<String>(disabledJobs);
}
public Map<String,Object> toObject() {
HashMap<String,Object> flowOptionObj = new HashMap<String,Object>();
flowOptionObj.put("flowParameters", this.flowParameters);
flowOptionObj.put("notifyOnFirstFailure", this.notifyOnFirstFailure);
flowOptionObj.put("notifyOnLastFailure", this.notifyOnLastFailure);
flowOptionObj.put("successEmails", successEmails);
flowOptionObj.put("failureEmails", failureEmails);
flowOptionObj.put("failureAction", failureAction.toString());
flowOptionObj.put("pipelineLevel", pipelineLevel);
flowOptionObj.put("pipelineExecId", pipelineExecId);
flowOptionObj.put("queueLevel", queueLevel);
flowOptionObj.put("concurrentOption", concurrentOption);
flowOptionObj.put("disabled", initiallyDisabledJobs);
flowOptionObj.put("failureEmailsOverride", failureEmailsOverride);
flowOptionObj.put("successEmailsOverride", successEmailsOverride);
return flowOptionObj;
}
@SuppressWarnings("unchecked")
public static ExecutionOptions createFromObject(Object obj) {
if (obj == null || !(obj instanceof Map)) {
return null;
}
Map<String,Object> optionsMap = (Map<String,Object>)obj;
ExecutionOptions options = new ExecutionOptions();
if (optionsMap.containsKey("flowParameters")) {
options.flowParameters = new HashMap<String, String>((Map<String,String>)optionsMap.get("flowParameters"));
}
// Failure notification
if (optionsMap.containsKey("notifyOnFirstFailure")) {
options.notifyOnFirstFailure = (Boolean)optionsMap.get("notifyOnFirstFailure");
}
if (optionsMap.containsKey("notifyOnLastFailure")) {
options.notifyOnLastFailure = (Boolean)optionsMap.get("notifyOnLastFailure");
}
if (optionsMap.containsKey("concurrentOption")) {
options.concurrentOption = (String)optionsMap.get("concurrentOption");
}
if (optionsMap.containsKey("disabled")) {
options.initiallyDisabledJobs = new HashSet<String>((List<String>)optionsMap.get("disabled"));
}
// Failure action
if (optionsMap.containsKey("failureAction")) {
options.failureAction = FailureAction.valueOf((String)optionsMap.get("failureAction"));
}
options.pipelineLevel = (Integer)optionsMap.get("pipelineLevel");
options.pipelineExecId = (Integer)optionsMap.get("pipelineExecId");
options.queueLevel = (Integer)optionsMap.get("queueLevel");
// Success emails
if (optionsMap.containsKey("successEmails")) {
options.setSuccessEmails((List<String>)optionsMap.get("successEmails"));
}
// Failure emails
if (optionsMap.containsKey("failureEmails")) {
options.setFailureEmails((List<String>)optionsMap.get("failureEmails"));
}
if (optionsMap.containsKey("successEmailsOverride")) {
options.setSuccessEmailsOverridden((Boolean)optionsMap.get("successEmailsOverride"));
}
if (optionsMap.containsKey("failureEmailsOverride")) {
options.setFailureEmailsOverridden((Boolean)optionsMap.get("failureEmailsOverride"));
}
return options;
}
}
|
package org.broadinstitute.hellbender.utils.variant;
import htsjdk.variant.vcf.VCFFilterHeaderLine;
import htsjdk.variant.vcf.VCFFormatHeaderLine;
import htsjdk.variant.vcf.VCFHeaderLine;
import htsjdk.variant.vcf.VCFHeaderLineCount;
import htsjdk.variant.vcf.VCFHeaderLineType;
import htsjdk.variant.vcf.VCFInfoHeaderLine;
import htsjdk.variant.vcf.VCFStandardHeaderLines;
import org.broadinstitute.hellbender.utils.Utils;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.broadinstitute.hellbender.utils.variant.GATKVCFConstants.*;
/**
* This class contains the {@link VCFHeaderLine} definitions for the annotation keys in {@link GATKVCFConstants}.
* VCF-standard header lines are in {@link VCFStandardHeaderLines}, in htsjdk
*/
public class GATKVCFHeaderLines {
public static VCFInfoHeaderLine getInfoLine(final String id) { return infoLines.get(id); }
public static VCFFormatHeaderLine getFormatLine(final String id) { return formatLines.get(id); }
public static VCFFilterHeaderLine getFilterLine(final String id) { return filterLines.get(id); }
private static final Map<String, VCFInfoHeaderLine> infoLines = new LinkedHashMap<>(60);
private static final Map<String, VCFFormatHeaderLine> formatLines = new LinkedHashMap<>(25);
private static final Map<String, VCFFilterHeaderLine> filterLines = new LinkedHashMap<>(2);
private static void addFormatLine(final VCFFormatHeaderLine line) {
Utils.nonNull(line);
formatLines.put(line.getID(), line);
}
private static void addInfoLine(final VCFInfoHeaderLine line) {
Utils.nonNull(line);
infoLines.put(line.getID(), line);
}
private static void addFilterLine(final VCFFilterHeaderLine line) {
Utils.nonNull(line);
filterLines.put(line.getID(), line);
}
static {
addFilterLine(new VCFFilterHeaderLine(LOW_QUAL_FILTER_NAME, "Low quality"));
// M2-related filters
addFilterLine(new VCFFilterHeaderLine(CLUSTERED_EVENTS_FILTER_NAME, "Clustered events observed in the tumor"));
addFilterLine(new VCFFilterHeaderLine(GERMLINE_RISK_FILTER_NAME, "Evidence indicates this site is germline, not somatic"));
addFilterLine(new VCFFilterHeaderLine(PON_FILTER_NAME, "Blacklisted site in panel of normals"));
addFilterLine(new VCFFilterHeaderLine(TUMOR_LOD_FILTER_NAME, "Tumor does not meet likelihood threshold"));
addFilterLine(new VCFFilterHeaderLine(STR_CONTRACTION_FILTER_NAME, "Site filtered due to contraction of short tandem repeat region"));
addFilterLine(new VCFFilterHeaderLine(MULTIALLELIC_FILTER_NAME, "Site filtered because too many alt alleles pass tumor LOD"));
addFilterLine(new VCFFilterHeaderLine(STRAND_ARTIFACT_FILTER_NAME, "Evidence for alt allele comes from one read direction only"));
addFilterLine(new VCFFilterHeaderLine(ARTIFACT_IN_NORMAL_FILTER_NAME, "artifact_in_normal"));
addFilterLine(new VCFFilterHeaderLine(MEDIAN_BASE_QUALITY_DIFFERENCE_FILTER_NAME, "ref - alt median base quality"));
addFilterLine(new VCFFilterHeaderLine(MEDIAN_MAPPING_QUALITY_DIFFERENCE_FILTER_NAME, "ref - alt median mapping quality"));
addFilterLine(new VCFFilterHeaderLine(MEDIAN_CLIPPING_DIFFERENCE_FILTER_NAME, "ref - alt median clipping"));
addFilterLine(new VCFFilterHeaderLine(MEDIAN_FRAGMENT_LENGTH_DIFFERENCE_FILTER_NAME, "abs(ref - alt) median fragment length"));
addFilterLine(new VCFFilterHeaderLine(READ_POSITION_FILTER_NAME, "median distance of alt variants from end of reads"));
addFilterLine(new VCFFilterHeaderLine(CONTAMINATION_FILTER_NAME, "contamination"));
addFilterLine(new VCFFilterHeaderLine(DUPLICATED_EVIDENCE_FILTER_NAME, "evidence for alt allele is overrepresented by apparent duplicates"));
addFormatLine(new VCFFormatHeaderLine(ALLELE_BALANCE_KEY, 1, VCFHeaderLineType.Float, "Allele balance for each het genotype"));
addFormatLine(new VCFFormatHeaderLine(MAPPING_QUALITY_ZERO_BY_SAMPLE_KEY, 1, VCFHeaderLineType.Integer, "Number of Mapping Quality Zero Reads per sample"));
addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_COUNT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Maximum likelihood expectation (MLE) for the alternate allele count, in the same order as listed, for each individual sample"));
addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_FRACTION_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Maximum likelihood expectation (MLE) for the alternate allele fraction, in the same order as listed, for each individual sample"));
addFormatLine(new VCFFormatHeaderLine(STRAND_COUNT_BY_SAMPLE_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "Number of reads on the forward and reverse strand supporting each allele (including reference)"));
addFormatLine(new VCFFormatHeaderLine(STRAND_BIAS_BY_SAMPLE_KEY, 4, VCFHeaderLineType.Integer, "Per-sample component statistics which comprise the Fisher's Exact Test to detect strand bias."));
addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_COUNT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Maximum likelihood expectation (MLE) for the alternate allele count, in the same order as listed, for each individual sample"));
addFormatLine(new VCFFormatHeaderLine(MLE_PER_SAMPLE_ALLELE_FRACTION_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Maximum likelihood expectation (MLE) for the alternate allele fraction, in the same order as listed, for each individual sample"));
addFormatLine(new VCFFormatHeaderLine(PL_FOR_ALL_SNP_ALLELES_KEY, 10, VCFHeaderLineType.Integer, "Phred-scaled genotype likelihoods for all 4 possible bases regardless of whether there is statistical evidence for them. Ordering is always PL for AA AC CC GA GC GG TA TC TG TT."));
addFormatLine(new VCFFormatHeaderLine(HAPLOTYPE_CALLER_PHASING_ID_KEY, 1, VCFHeaderLineType.String, "Physical phasing ID information, where each unique ID within a given sample (but not across samples) connects records within a phasing group"));
addFormatLine(new VCFFormatHeaderLine(HAPLOTYPE_CALLER_PHASING_GT_KEY, 1, VCFHeaderLineType.String, "Physical phasing haplotype information, describing how the alternate alleles are phased in relation to one another"));
addFormatLine(new VCFFormatHeaderLine(MIN_DP_FORMAT_KEY, 1, VCFHeaderLineType.Integer, "Minimum DP observed within the GVCF block"));
addFormatLine(new VCFFormatHeaderLine(REFERENCE_GENOTYPE_QUALITY, 1, VCFHeaderLineType.Integer, "Unconditional reference genotype confidence, encoded as a phred quality -10*log10 p(genotype call is wrong)"));
addFormatLine(new VCFFormatHeaderLine(TRANSMISSION_PROBABILITY_KEY, 1, VCFHeaderLineType.Integer, "Phred score of the genotype combination and phase given that the genotypes are correct"));
addFormatLine(new VCFFormatHeaderLine(RBP_HAPLOTYPE_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Read-backed phasing haplotype identifiers"));
addFormatLine(new VCFFormatHeaderLine(AVG_INTERVAL_DP_BY_SAMPLE_KEY, 1, VCFHeaderLineType.Float, "Average sample depth across the interval. Sum of the sample specific depth in all loci divided by interval size."));
addFormatLine(new VCFFormatHeaderLine(LOW_COVERAGE_LOCI, 1, VCFHeaderLineType.Integer, "Number of loci for this sample, in this interval with low coverage (below the minimum coverage) but not zero."));
addFormatLine(new VCFFormatHeaderLine(ZERO_COVERAGE_LOCI, 1, VCFHeaderLineType.Integer, "Number of loci for this sample, in this interval with zero coverage."));
addFormatLine(new VCFFormatHeaderLine(PHRED_SCALED_POSTERIORS_KEY, VCFHeaderLineCount.G, VCFHeaderLineType.Integer, "Phred-scaled Posterior Genotype Probabilities"));
addFormatLine(new VCFFormatHeaderLine(JOINT_LIKELIHOOD_TAG_NAME, 1, VCFHeaderLineType.Integer, "Phred-scaled joint likelihood of the genotype combination (before applying family priors)"));
addFormatLine(new VCFFormatHeaderLine(JOINT_POSTERIOR_TAG_NAME, 1, VCFHeaderLineType.Integer, "Phred-scaled joint posterior probability of the genotype combination (after applying family priors)"));
// M2-related format lines
addFormatLine(new VCFFormatHeaderLine(ALLELE_FRACTION_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele fractions of alternate alleles in the tumor"));
addFormatLine(new VCFFormatHeaderLine(OXOG_ALT_F1R2_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F1R2 pair orientation supporting the alternate allele"));
addFormatLine(new VCFFormatHeaderLine(OXOG_ALT_F2R1_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F2R1 pair orientation supporting the alternate allele"));
addFormatLine(new VCFFormatHeaderLine(OXOG_REF_F1R2_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F1R2 pair orientation supporting the reference allele"));
addFormatLine(new VCFFormatHeaderLine(OXOG_REF_F2R1_KEY, 1, VCFHeaderLineType.Integer, "Count of reads in F2R1 pair orientation supporting the reference allele"));
addFormatLine(new VCFFormatHeaderLine(OXOG_FRACTION_KEY, 1, VCFHeaderLineType.Float, "Fraction of alt reads indicating OxoG error"));
addInfoLine(new VCFInfoHeaderLine(MLE_ALLELE_COUNT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Maximum likelihood expectation (MLE) for the allele counts (not necessarily the same as the AC), for each ALT allele, in the same order as listed"));
addInfoLine(new VCFInfoHeaderLine(MLE_ALLELE_FREQUENCY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Maximum likelihood expectation (MLE) for the allele frequency (not necessarily the same as the AF), for each ALT allele, in the same order as listed"));
addInfoLine(new VCFInfoHeaderLine(DOWNSAMPLED_KEY, 0, VCFHeaderLineType.Flag, "Were any of the samples downsampled?"));
addInfoLine(new VCFInfoHeaderLine(ALLELE_BALANCE_HET_KEY, 1, VCFHeaderLineType.Float, "Allele Balance for heterozygous calls (ref/(ref+alt))"));
addInfoLine(new VCFInfoHeaderLine(ALLELE_BALANCE_HOM_KEY, 1, VCFHeaderLineType.Float, "Allele Balance for homozygous calls (A/(A+O)) where A is the allele (ref or alt) and O is anything other"));
addInfoLine(new VCFInfoHeaderLine(NON_DIPLOID_RATIO_KEY, 1, VCFHeaderLineType.Float, "Overall non-diploid ratio (alleles/(alleles+non-alleles))"));
addInfoLine(new VCFInfoHeaderLine(BASE_COUNTS_KEY, 4, VCFHeaderLineType.Integer, "Counts of each base"));
addInfoLine(new VCFInfoHeaderLine(LOW_MQ_KEY, 3, VCFHeaderLineType.Float, "3-tuple: <fraction of reads with MQ=0>,<fraction of reads with MQ<=10>,<total number of reads>"));
addInfoLine(new VCFInfoHeaderLine(N_BASE_COUNT_KEY, 1, VCFHeaderLineType.Float, "Percentage of N bases in the pileup"));
addInfoLine(new VCFInfoHeaderLine(BASE_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities"));
addInfoLine(new VCFInfoHeaderLine(AS_BASE_QUAL_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific Z-score from Wilcoxon rank sum test of each Alt Vs. Ref base qualities"));
addInfoLine(new VCFInfoHeaderLine(AS_RAW_BASE_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.String, "raw data for allele specific rank sum test of base qualities"));
addInfoLine(new VCFInfoHeaderLine(CLIPPING_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref number of hard clipped bases"));
addInfoLine(new VCFInfoHeaderLine(FISHER_STRAND_KEY, 1, VCFHeaderLineType.Float, "Phred-scaled p-value using Fisher's exact test to detect strand bias"));
addInfoLine(new VCFInfoHeaderLine(AS_FISHER_STRAND_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific phred-scaled p-value using Fisher's exact test to detect strand bias of each alt allele"));
addInfoLine(new VCFInfoHeaderLine(AS_SB_TABLE_KEY, 1, VCFHeaderLineType.String, "Allele-specific forward/reverse read counts for strand bias tests"));
addInfoLine(new VCFInfoHeaderLine(GC_CONTENT_KEY, 1, VCFHeaderLineType.Float, "GC content around the variant (see docs for window size details)"));
addInfoLine(new VCFInfoHeaderLine(NOCALL_CHROM_KEY, 1, VCFHeaderLineType.Integer, "Number of no-called samples"));
addInfoLine(new VCFInfoHeaderLine(GQ_MEAN_KEY, 1, VCFHeaderLineType.Float, "Mean of all GQ values"));
addInfoLine(new VCFInfoHeaderLine(GQ_STDEV_KEY, 1, VCFHeaderLineType.Float, "Standard deviation of all GQ values"));
addInfoLine(new VCFInfoHeaderLine(HAPLOTYPE_SCORE_KEY, 1, VCFHeaderLineType.Float, "Consistency of the site with at most two segregating haplotypes"));
addInfoLine(new VCFInfoHeaderLine(HARDY_WEINBERG_KEY, 1, VCFHeaderLineType.Float, "Phred-scaled p-value for Hardy-Weinberg violation"));
addInfoLine(new VCFInfoHeaderLine(HOMOPOLYMER_RUN_KEY, 1, VCFHeaderLineType.Integer, "Largest Contiguous Homopolymer Run of Variant Allele In Either Direction"));
addInfoLine(new VCFInfoHeaderLine(INBREEDING_COEFFICIENT_KEY, 1, VCFHeaderLineType.Float, "Inbreeding coefficient as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation"));
addInfoLine(new VCFInfoHeaderLine(AS_INBREEDING_COEFFICIENT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specific inbreeding coefficient as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation"));
addInfoLine(new VCFInfoHeaderLine(EXCESS_HET_KEY, 1, VCFHeaderLineType.Float, "Phred-scaled p-value for exact test of excess heterozygosity"));
addInfoLine(new VCFInfoHeaderLine(AS_HETEROZYGOSITY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific heterozygosity as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation; relate to inbreeding coefficient"));
addInfoLine(new VCFInfoHeaderLine(LIKELIHOOD_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref haplotype likelihoods"));
addInfoLine(new VCFInfoHeaderLine(MAP_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities"));
addInfoLine(new VCFInfoHeaderLine(AS_MAP_QUAL_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific Z-score From Wilcoxon rank sum test of each Alt vs. Ref read mapping qualities"));
addInfoLine(new VCFInfoHeaderLine(RAW_RMS_MAPPING_QUALITY_KEY, 1, VCFHeaderLineType.Float, "Raw data for RMS Mapping Quality"));
addInfoLine(new VCFInfoHeaderLine(AS_RAW_RMS_MAPPING_QUALITY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specfic raw data for RMS Mapping Quality"));
addInfoLine(new VCFInfoHeaderLine(AS_RMS_MAPPING_QUALITY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specific RMS Mapping Quality"));
addInfoLine(new VCFInfoHeaderLine(AS_RAW_MAP_QUAL_RANK_SUM_KEY, 1, VCFHeaderLineType.String, "Allele-specfic raw data for Mapping Quality Rank Sum"));
addInfoLine(new VCFInfoHeaderLine(AS_MAP_QUAL_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele-specific Mapping Quality Rank Sum"));
addInfoLine(new VCFInfoHeaderLine(AS_FILTER_STATUS_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.String, "Filter status for each allele, as assessed by ApplyRecalibration. Note that the VCF filter field will reflect the most lenient/sensitive status across all alleles."));
addInfoLine(new VCFInfoHeaderLine(AS_CULPRIT_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.String, "For each alt allele, the annotation which was the worst performing in the Gaussian mixture model, likely the reason why the variant was filtered out"));
addInfoLine(new VCFInfoHeaderLine(AS_VQS_LOD_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.String, "For each alt allele, the log odds of being a true variant versus being false under the trained gaussian mixture model"));
addInfoLine(new VCFInfoHeaderLine(MENDEL_VIOLATION_LR_KEY, 1, VCFHeaderLineType.Float, "Mendelian violation likelihood ratio: L[MV] - L[No MV]"));
addInfoLine(new VCFInfoHeaderLine(HI_CONF_DENOVO_KEY, 1, VCFHeaderLineType.String, "High confidence possible de novo mutation (GQ >= 20 for all trio members)=[comma-delimited list of child samples]"));
addInfoLine(new VCFInfoHeaderLine(LO_CONF_DENOVO_KEY, 1, VCFHeaderLineType.String, "Low confidence possible de novo mutation (GQ >= 10 for child, GQ > 0 for parents)=[comma-delimited list of child samples]"));
addInfoLine(new VCFInfoHeaderLine(QUAL_BY_DEPTH_KEY, 1, VCFHeaderLineType.Float, "Variant Confidence/Quality by Depth"));
addInfoLine(new VCFInfoHeaderLine(AS_QUAL_BY_DEPTH_KEY, 1, VCFHeaderLineType.Float, "Allele-specific Variant Confidence/Quality by Depth"));
addInfoLine(new VCFInfoHeaderLine(AS_QUAL_KEY, 1, VCFHeaderLineType.Float, "Allele-specific Variant Qual Score"));
addInfoLine(new VCFInfoHeaderLine(READ_POS_RANK_SUM_KEY, 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias"));
addInfoLine(new VCFInfoHeaderLine(AS_READ_POS_RANK_SUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "allele specific Z-score from Wilcoxon rank sum test of each Alt vs. Ref read position bias"));
addInfoLine(new VCFInfoHeaderLine(AS_RAW_READ_POS_RANK_SUM_KEY, 1, VCFHeaderLineType.String, "allele specific raw data for rank sum test of read position bias"));
addInfoLine(new VCFInfoHeaderLine(SAMPLE_LIST_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "List of polymorphic samples"));
addInfoLine(new VCFInfoHeaderLine(SPANNING_DELETIONS_KEY, 1, VCFHeaderLineType.Float, "Fraction of Reads Containing Spanning Deletions"));
addInfoLine(new VCFInfoHeaderLine(STRAND_ODDS_RATIO_KEY, 1, VCFHeaderLineType.Float, "Symmetric Odds Ratio of 2x2 contingency table to detect strand bias"));
addInfoLine(new VCFInfoHeaderLine(AS_STRAND_ODDS_RATIO_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Allele specific strand Odds Ratio of 2x|Alts| contingency table to detect allele specific strand bias"));
addInfoLine(new VCFInfoHeaderLine(STR_PRESENT_KEY, 0, VCFHeaderLineType.Flag, "Variant is a short tandem repeat"));
addInfoLine(new VCFInfoHeaderLine(REPEAT_UNIT_KEY, 1, VCFHeaderLineType.String, "Tandem repeat unit (bases)"));
addInfoLine(new VCFInfoHeaderLine(REPEATS_PER_ALLELE_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "Number of times tandem repeat unit is repeated, for each allele (including reference)"));
addInfoLine(new VCFInfoHeaderLine(TRANSMISSION_DISEQUILIBRIUM_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Test statistic from Wittkowski transmission disequilibrium test."));
addInfoLine(new VCFInfoHeaderLine(VARIANT_TYPE_KEY, 1, VCFHeaderLineType.String, "Variant type description"));
addInfoLine(new VCFInfoHeaderLine(NUMBER_OF_DISCOVERED_ALLELES_KEY, 1, VCFHeaderLineType.Integer, "Number of alternate alleles discovered (but not necessarily genotyped) at this site"));
addInfoLine(new VCFInfoHeaderLine(REFSAMPLE_DEPTH_KEY, 1, VCFHeaderLineType.Integer, "Total reference sample depth"));
addInfoLine(new VCFInfoHeaderLine(ORIGINAL_AC_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Integer, "Original AC"));
addInfoLine(new VCFInfoHeaderLine(ORIGINAL_AF_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Original AF"));
addInfoLine(new VCFInfoHeaderLine(ORIGINAL_AN_KEY, 1, VCFHeaderLineType.Integer, "Original AN"));
addInfoLine(new VCFInfoHeaderLine(ORIGINAL_DP_KEY, 1, VCFHeaderLineType.Integer, "Original DP"));
addInfoLine(new VCFInfoHeaderLine(ORIGINAL_CONTIG_KEY, 1, VCFHeaderLineType.String, "Original contig name for the record"));
addInfoLine(new VCFInfoHeaderLine(ORIGINAL_START_KEY, 1, VCFHeaderLineType.Integer, "Original start position for the record"));
addInfoLine(new VCFInfoHeaderLine(VQS_LOD_KEY, 1, VCFHeaderLineType.Float, "Log odds of being a true variant versus being false under the trained gaussian mixture model"));
addInfoLine(new VCFInfoHeaderLine(CULPRIT_KEY, 1, VCFHeaderLineType.String, "The annotation which was the worst performing in the Gaussian mixture model, likely the reason why the variant was filtered out"));
addInfoLine(new VCFInfoHeaderLine(POSITIVE_LABEL_KEY, 1, VCFHeaderLineType.Flag, "This variant was used to build the positive training set of good variants"));
addInfoLine(new VCFInfoHeaderLine(NEGATIVE_LABEL_KEY, 1, VCFHeaderLineType.Flag, "This variant was used to build the negative training set of bad variants"));
addInfoLine(new VCFInfoHeaderLine(RBP_INCONSISTENT_KEY, 0, VCFHeaderLineType.Flag, "Are the reads significantly haplotype-inconsistent?"));
addInfoLine(new VCFInfoHeaderLine(GENOTYPE_AND_VALIDATE_STATUS_KEY, 1, VCFHeaderLineType.String, "Value from the validation VCF"));
addInfoLine(new VCFInfoHeaderLine(AVG_INTERVAL_DP_KEY, 1, VCFHeaderLineType.Float, "Average depth across the interval. Sum of the depth in a loci divided by interval size."));
addInfoLine(new VCFInfoHeaderLine(INTERVAL_GC_CONTENT_KEY, 1, VCFHeaderLineType.Float, "GC Content of the interval"));
addInfoLine(new VCFInfoHeaderLine(GENOTYPE_PRIOR_KEY, VCFHeaderLineCount.G, VCFHeaderLineType.Integer, "Genotype Likelihood Prior"));
// M2-related info lines
addInfoLine(new VCFInfoHeaderLine(EVENT_COUNT_IN_HAPLOTYPE_KEY, 1, VCFHeaderLineType.Integer, "Number of events in this haplotype"));
addInfoLine(new VCFInfoHeaderLine(NORMAL_LOD_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Normal LOD score"));
addInfoLine(new VCFInfoHeaderLine(TUMOR_LOD_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Tumor LOD score"));
addInfoLine(new VCFInfoHeaderLine(IN_PON_VCF_ATTRIBUTE, 0, VCFHeaderLineType.Flag, "site found in panel of normals"));
addInfoLine(new VCFInfoHeaderLine(POPULATION_AF_VCF_ATTRIBUTE, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "population allele frequencies of alt alleles"));
addInfoLine(new VCFInfoHeaderLine(GERMLINE_POSTERIORS_VCF_ATTRIBUTE, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "Posterior probability for alt allele to be germline variants"));
addInfoLine(new VCFInfoHeaderLine(NORMAL_ARTIFACT_LOD_ATTRIBUTE, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "log odds of artifact in normal with same allele fraction as tumor"));
}
}
|
// samskivert library - useful routines for java programs
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.jdbc;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.SQLException;
import com.samskivert.jdbc.ColumnDefinition;
import com.samskivert.util.ArrayUtil;
/**
* Handles liaison for HSQLDB.
*/
public class HsqldbLiaison extends BaseLiaison
{
// from DatabaseLiaison
public boolean matchesURL (String url)
{
return url.startsWith("jdbc:hsqldb");
}
// from DatabaseLiaison
public String columnSQL (String column)
{
return "\"" + column + "\"";
}
// from DatabaseLiaison
public String tableSQL (String table)
{
return "\"" + table + "\"";
}
// from DatabaseLiaison
public String indexSQL (String index)
{
return "\"" + index + "\"";
}
// from DatabaseLiaison
public void createGenerator (Connection conn, String tableName,
String columnName, int initValue)
throws SQLException
{
// HSQL's IDENTITY() does not create any database entities
}
// from DatabaseLiaison
public void deleteGenerator (Connection conn, String tableName, String columnName)
throws SQLException
{
// HSQL's IDENTITY() does not create any database entities that we need to delete
}
// from DatabaseLiaison
public int lastInsertedId (Connection conn, String table, String column) throws SQLException
{
// HSQL does not keep track of per-table-and-column insertion data, so we are pretty much
// going on blind faith here that we're fetching the right ID. In the overwhelming number
// of cases that will be so, but it's still not pretty.
Statement stmt = null;
try {
stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("call IDENTITY()");
return rs.next() ? rs.getInt(1) : -1;
} finally {
JDBCUtil.close(stmt);
}
}
// from DatabaseLiaison
public boolean isTransientException (SQLException sqe)
{
return false; // no known transient exceptions for HSQLDB
}
// from DatabaseLiaison
public boolean isDuplicateRowException (SQLException sqe)
{
// Violation of unique constraint SYS_PK_51: duplicate value(s) for column(s) FOO
String msg = sqe.getMessage();
return (msg != null && msg.indexOf("duplicate value(s)") != -1);
}
// BaseLiaison's implementation of table creation accepts unique constraints both as
// part of the column definition and as a separate argument, and merrily passes this
// duality onto the database. Postgres and MySQL both handle this fine but HSQL seems
// to simply not allow uniqueness in the column definitions. So, for HSQL, we transfer
// uniqueness from the ColumnDefinitions to the uniqueConstraintColumns before we pass
// it in to the super implementation.
// TODO: Consider making this the general MO instead of a subclass override. In fact
// it may be that uniqueness should be removed from ColumnDefinition.
@Override // from DatabaseLiaison
public boolean createTableIfMissing (
Connection conn, String table, String[] columns, ColumnDefinition[] definitions,
String[][] uniqueConstraintColumns, String[] primaryKeyColumns)
throws SQLException
{
if (columns.length != definitions.length) {
throw new IllegalArgumentException("Column name and definition number mismatch");
}
// make a set of unique constraints already provided
Set<List<String>> uColSet = new HashSet<List<String>>();
if (uniqueConstraintColumns != null) {
for (String[] uCols : uniqueConstraintColumns) {
uColSet.add(Arrays.asList(uCols));
}
}
// go through the columns and find any that are unique; these we replace with a
// non-unique variant, and instead add a new entry to the table unique constraint
ColumnDefinition[] newDefinitions = new ColumnDefinition[definitions.length];
for (int ii = 0; ii < definitions.length; ii ++) {
ColumnDefinition def = definitions[ii];
if (def.unique) {
// let's be nice and not mutate the caller's object
newDefinitions[ii] = new ColumnDefinition(
def.type, def.nullable, false, def.defaultValue);
// if a uniqueness constraint for this column was not in the
// uniqueConstraintColumns parameter, add such an entry
if (!uColSet.contains(Collections.singletonList(columns[ii]))) {
String[] newConstraint = new String[] { columns[ii] };
uniqueConstraintColumns = (uniqueConstraintColumns == null) ?
new String[][] { newConstraint } :
ArrayUtil.append(uniqueConstraintColumns, newConstraint);
}
} else {
newDefinitions[ii] = def;
}
};
// now call the real implementation with our modified data
return super.createTableIfMissing(
conn, table, columns, newDefinitions, uniqueConstraintColumns, primaryKeyColumns);
}
@Override // from DatabaseLiaison
protected String expandDefinition (
String type, boolean nullable, boolean unique, String defaultValue)
{
StringBuilder builder = new StringBuilder(type);
// append the default value if one was specified
if (defaultValue != null) {
if ("IDENTITY".equals(defaultValue)) {
// this is a blatant hack, we need this method to join Depot's SQLBuilder
builder.append(" GENERATED BY DEFAULT AS IDENTITY (START WITH 1)");
} else {
builder.append(" DEFAULT ").append(defaultValue);
}
}
if (!nullable) {
builder.append(" NOT NULL");
}
if (unique) {
throw new IllegalArgumentException("HSQL can't deal with column uniqueness here");
}
return builder.toString();
}
}
|
package com.google.refine.importers;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.poi.common.usermodel.Hyperlink;
import org.apache.poi.hssf.usermodel.HSSFDateUtil;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.json.JSONArray;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.refine.ProjectMetadata;
import com.google.refine.importing.ImportingJob;
import com.google.refine.importing.ImportingUtilities;
import com.google.refine.model.Cell;
import com.google.refine.model.Project;
import com.google.refine.model.Recon;
import com.google.refine.model.Recon.Judgment;
import com.google.refine.model.ReconCandidate;
import com.google.refine.util.JSONUtilities;
public class ExcelImporter extends TabularImportingParserBase {
static final Logger logger = LoggerFactory.getLogger(ExcelImporter.class);
public ExcelImporter() {
super(true);
}
@Override
public JSONObject createParserUIInitializationData(
ImportingJob job, List<JSONObject> fileRecords, String format) {
JSONObject options = super.createParserUIInitializationData(job, fileRecords, format);
boolean xmlBased = "text/xml/xlsx".equals(format);
JSONUtilities.safePut(options, "xmlBased", xmlBased);
JSONArray sheetRecords = new JSONArray();
JSONUtilities.safePut(options, "sheetRecords", sheetRecords);
try {
if (fileRecords.size() > 0) {
JSONObject firstFileRecord = fileRecords.get(0);
File file = ImportingUtilities.getFile(job, firstFileRecord);
InputStream is = new FileInputStream(file);
try {
Workbook wb = xmlBased ?
new XSSFWorkbook(is) :
new HSSFWorkbook(new POIFSFileSystem(is));
int sheetCount = wb.getNumberOfSheets();
boolean hasData = false;
for (int i = 0; i < sheetCount; i++) {
Sheet sheet = wb.getSheetAt(i);
int rows = sheet.getLastRowNum() - sheet.getFirstRowNum() + 1;
JSONObject sheetRecord = new JSONObject();
JSONUtilities.safePut(sheetRecord, "name", sheet.getSheetName());
JSONUtilities.safePut(sheetRecord, "rows", rows);
if (hasData) {
JSONUtilities.safePut(sheetRecord, "selected", false);
} else if (rows > 1) {
JSONUtilities.safePut(sheetRecord, "selected", true);
hasData = true;
}
JSONUtilities.append(sheetRecords, sheetRecord);
}
} finally {
is.close();
}
}
} catch (IOException e) {
logger.error("Error generating parser UI initialization data for Excel file", e);
}
return options;
}
@Override
public void parseOneFile(
Project project,
ProjectMetadata metadata,
ImportingJob job,
String fileSource,
InputStream inputStream,
int limit,
JSONObject options,
List<Exception> exceptions
) {
boolean xmlBased = JSONUtilities.getBoolean(options, "xmlBased", false);
Workbook wb = null;
try {
wb = xmlBased ?
new XSSFWorkbook(inputStream) :
new HSSFWorkbook(new POIFSFileSystem(inputStream));
} catch (IOException e) {
exceptions.add(new ImportException(
"Attempted to parse as an Excel file but failed. " +
"Try to use Excel to re-save the file as a different Excel version or as TSV and upload again.",
e
));
return;
} catch (ArrayIndexOutOfBoundsException e){
exceptions.add(new ImportException(
"Attempted to parse file as an Excel file but failed. " +
"This is probably caused by a corrupt excel file, or due to the file having previously been created or saved by a non-Microsoft application. " +
"Please try opening the file in Microsoft Excel and resaving it, then try re-uploading the file. " +
"See https://issues.apache.org/bugzilla/show_bug.cgi?id=48261 for further details",
e
));
return;
}
int[] sheets = JSONUtilities.getIntArray(options, "sheets");
for (int sheetIndex : sheets) {
final Sheet sheet = wb.getSheetAt(sheetIndex);
final int lastRow = sheet.getLastRowNum();
TableDataReader dataReader = new TableDataReader() {
int nextRow = 0;
Map<String, Recon> reconMap = new HashMap<String, Recon>();
@Override
public List<Object> getNextRowOfCells() throws IOException {
if (nextRow > lastRow) {
return null;
}
List<Object> cells = new ArrayList<Object>();
org.apache.poi.ss.usermodel.Row row = sheet.getRow(nextRow++);
if (row != null) {
short lastCell = row.getLastCellNum();
for (short cellIndex = 0; cellIndex < lastCell; cellIndex++) {
Cell cell = null;
org.apache.poi.ss.usermodel.Cell sourceCell = row.getCell(cellIndex);
if (sourceCell != null) {
cell = extractCell(sourceCell, reconMap);
}
cells.add(cell);
}
}
return cells;
}
};
TabularImportingParserBase.readTable(
project,
metadata,
job,
dataReader,
fileSource + "#" + sheet.getSheetName(),
limit,
options,
exceptions
);
}
}
static protected Serializable extractCell(org.apache.poi.ss.usermodel.Cell cell) {
int cellType = cell.getCellType();
if (cellType == org.apache.poi.ss.usermodel.Cell.CELL_TYPE_FORMULA) {
cellType = cell.getCachedFormulaResultType();
}
if (cellType == org.apache.poi.ss.usermodel.Cell.CELL_TYPE_ERROR ||
cellType == org.apache.poi.ss.usermodel.Cell.CELL_TYPE_BLANK) {
return null;
}
Serializable value = null;
if (cellType == org.apache.poi.ss.usermodel.Cell.CELL_TYPE_BOOLEAN) {
value = cell.getBooleanCellValue();
} else if (cellType == org.apache.poi.ss.usermodel.Cell.CELL_TYPE_NUMERIC) {
double d = cell.getNumericCellValue();
if (HSSFDateUtil.isCellDateFormatted(cell)) {
value = HSSFDateUtil.getJavaDate(d);
// TODO: If we had a time datatype, we could use something like the following
// to distinguish times from dates (although Excel doesn't really make the distinction)
// Another alternative would be to look for values < 0.60
// String format = cell.getCellStyle().getDataFormatString();
// if (!format.contains("d") && !format.contains("m") && !format.contains("y") ) {
// // It's just a time
} else {
value = d;
}
} else {
String text = cell.getStringCellValue();
if (text.length() > 0) {
value = text;
}
}
return value;
}
static protected Cell extractCell(org.apache.poi.ss.usermodel.Cell cell, Map<String, Recon> reconMap) {
Serializable value = extractCell(cell);
if (value != null) {
Recon recon = null;
Hyperlink hyperlink = cell.getHyperlink();
if (hyperlink != null) {
String url = hyperlink.getAddress();
if (url != null && (url.startsWith("http:
url.startsWith("https:
final String sig = "freebase.com/view";
int i = url.indexOf(sig);
if (i > 0) {
String id = url.substring(i + sig.length());
int q = id.indexOf('?');
if (q > 0) {
id = id.substring(0, q);
}
int h = id.indexOf('
if (h > 0) {
id = id.substring(0, h);
}
if (reconMap.containsKey(id)) {
recon = reconMap.get(id);
recon.judgmentBatchSize++;
} else {
recon = new Recon(0, null, null);
recon.service = "import";
recon.match = new ReconCandidate(id, value.toString(), new String[0], 100);
recon.matchRank = 0;
recon.judgment = Judgment.Matched;
recon.judgmentAction = "auto";
recon.judgmentBatchSize = 1;
recon.addCandidate(recon.match);
reconMap.put(id, recon);
}
}
}
}
return new Cell(value, recon);
} else {
return null;
}
}
}
|
package org.codelibs.elasticsearch.web.transformer;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.codelibs.elasticsearch.web.WebRiverConstants;
import org.codelibs.elasticsearch.web.config.RiverConfig;
import org.codelibs.elasticsearch.web.util.ParameterUtil;
import org.elasticsearch.client.Client;
import org.elasticsearch.index.query.QueryBuilders;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.seasar.framework.beans.BeanDesc;
import org.seasar.framework.beans.factory.BeanDescFactory;
import org.seasar.framework.beans.util.Beans;
import org.seasar.framework.container.SingletonS2Container;
import org.seasar.framework.container.annotation.tiger.InitMethod;
import org.seasar.framework.util.MethodUtil;
import org.seasar.framework.util.StringUtil;
import org.seasar.robot.RobotCrawlAccessException;
import org.seasar.robot.entity.AccessResultData;
import org.seasar.robot.entity.ResponseData;
import org.seasar.robot.entity.ResultData;
import org.seasar.robot.transformer.impl.XpathTransformer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ScrapingTransformer extends
org.seasar.robot.transformer.impl.HtmlTransformer {
private static final String ARRAY_PROPERTY = "[]";
private static final Logger logger = LoggerFactory
.getLogger(XpathTransformer.class);
private static final String[] queryTypes = new String[] { "className",
"data", "html", "id", "ownText", "tagName", "text", "val" };
public String[] copiedResonseDataFields = new String[] { "url",
"parentUrl", "httpStatusCode", "method", "charSet",
"contentLength", "mimeType", "executionTime", "lastModified" };
protected RiverConfig riverConfig;
@InitMethod
public void init() {
riverConfig = SingletonS2Container.getComponent(RiverConfig.class);
}
@Override
protected void storeData(final ResponseData responseData,
final ResultData resultData) {
final Map<String, Map<String, Object>> scrapingRuleMap = riverConfig
.getPropertyMapping(responseData);
if (scrapingRuleMap == null) {
return;
}
org.jsoup.nodes.Document document = null;
String charsetName = responseData.getCharSet();
if (charsetName == null) {
charsetName = "UTF-8";
}
try {
document = Jsoup.parse(responseData.getResponseBody(), charsetName,
responseData.getUrl());
} catch (final IOException e) {
throw new RobotCrawlAccessException("Could not parse "
+ responseData.getUrl(), e);
}
final Map<String, Object> dataMap = new LinkedHashMap<String, Object>();
final SimpleDateFormat sdf = new SimpleDateFormat(
WebRiverConstants.DATE_TIME_FORMAT);
dataMap.put("timestamp", sdf.format(new Date()));
Beans.copy(responseData, dataMap)
.includes(copiedResonseDataFields)
.dateConverter(WebRiverConstants.DATE_TIME_FORMAT,
"lastModified").excludesNull().excludesWhitespace()
.execute();
for (final Map.Entry<String, Map<String, Object>> entry : scrapingRuleMap
.entrySet()) {
final Map<String, Object> params = entry.getValue();
final boolean isTrimSpaces = ParameterUtil.getValue(params,
"trimSpaces", Boolean.FALSE).booleanValue();
final boolean isArray = ParameterUtil.getValue(params, "isArray",
Boolean.FALSE).booleanValue();
final List<String> strList = new ArrayList<String>();
final BeanDesc elementDesc = BeanDescFactory
.getBeanDesc(Element.class);
final String value = ParameterUtil.getValue(params, "value", null);
if (StringUtil.isNotBlank(value)) {
strList.add(trimSpaces(value, isTrimSpaces));
}
for (final String queryType : queryTypes) {
final String query = ParameterUtil.getValue(params, queryType,
null);
if (StringUtil.isNotBlank(query)) {
final Element[] elements = getElements(
new Element[] { document }, query);
for (final Element element : elements) {
final Method queryMethod = elementDesc
.getMethod(queryType);
strList.add(trimSpaces((String) MethodUtil.invoke(
queryMethod, element, new Object[0]),
isTrimSpaces));
}
break;
}
}
addPropertyData(dataMap, entry.getKey(), isArray ? strList
: StringUtils.join(strList, " "));
}
storeIndex(responseData, dataMap);
}
protected Element[] getElements(final Element[] elements, final String query) {
Element[] targets = elements;
final Pattern pattern = Pattern
.compile(":eq\\(([0-9]+)\\)|:lt\\(([0-9]+)\\)|:gt\\(([0-9]+)\\)");
final Matcher matcher = pattern.matcher(query);
final StringBuffer buf = new StringBuffer();
while (matcher.find()) {
final String value = matcher.group();
matcher.appendReplacement(buf, "");
if (buf.charAt(buf.length() - 1) != ' ') {
try {
final int index = Integer.parseInt(matcher.group(1));
final List<Element> elementList = new ArrayList<Element>();
final String childQuery = buf.toString();
for (final Element element : targets) {
final Elements childElements = element
.select(childQuery);
if (value.startsWith(":eq")) {
if (index < childElements.size()) {
elementList.add(childElements.get(index));
}
} else if (value.startsWith(":lt")) {
for (int i = 0; i < childElements.size()
&& i < index; i++) {
elementList.add(childElements.get(i));
}
} else if (value.startsWith(":gt")) {
for (int i = index + 1; i < childElements.size(); i++) {
elementList.add(childElements.get(i));
}
}
}
targets = elementList.toArray(new Element[elementList
.size()]);
buf.setLength(0);
} catch (final NumberFormatException e) {
logger.warn("Invalid number: " + query, e);
buf.append(value);
}
} else {
buf.append(value);
}
}
matcher.appendTail(buf);
final String lastQuery = buf.toString();
if (StringUtil.isNotBlank(lastQuery)) {
final List<Element> elementList = new ArrayList<Element>();
for (final Element element : targets) {
final Elements childElements = element.select(lastQuery);
for (int i = 0; i < childElements.size(); i++) {
elementList.add(childElements.get(i));
}
}
targets = elementList.toArray(new Element[elementList.size()]);
}
return targets;
}
protected String trimSpaces(final String value, final boolean trimSpaces) {
if (value == null) {
return null;
}
if (trimSpaces) {
return value.replaceAll("\\s+", " ").trim();
}
return value;
}
protected void addPropertyData(final Map<String, Object> dataMap,
final String key, final Object value) {
Map<String, Object> currentDataMap = dataMap;
final String[] keys = key.split("\\.");
for (int i = 0; i < keys.length - 1; i++) {
final String currentKey = keys[i];
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) currentDataMap
.get(currentKey);
if (map == null) {
map = new LinkedHashMap<String, Object>();
currentDataMap.put(currentKey, map);
}
currentDataMap = map;
}
currentDataMap.put(keys[keys.length - 1], value);
}
protected void storeIndex(final ResponseData responseData,
final Map<String, Object> dataMap) {
final String sessionId = responseData.getSessionId();
final String indexName = riverConfig.getIndexName(sessionId);
final String typeName = riverConfig.getTypeName(sessionId);
final boolean overwrite = riverConfig.isOverwrite(sessionId);
final Client client = riverConfig.getClient();
if (logger.isDebugEnabled()) {
logger.debug("Index: " + indexName + ", sessionId: " + sessionId
+ ", Data: " + dataMap);
}
if (overwrite) {
client.prepareDeleteByQuery(indexName)
.setQuery(
QueryBuilders.termQuery("url",
responseData.getUrl())).execute()
.actionGet();
client.admin().indices().prepareRefresh(indexName).execute()
.actionGet();
}
@SuppressWarnings("unchecked")
Map<String, Object> arrayDataMap = (Map<String, Object>) dataMap
.remove(ARRAY_PROPERTY);
if (arrayDataMap != null) {
Map<String, Object> flatArrayDataMap = new LinkedHashMap<String, Object>();
convertFlatMap("", arrayDataMap, flatArrayDataMap);
int maxSize = 0;
for (Map.Entry<String, Object> entry : flatArrayDataMap.entrySet()) {
Object value = entry.getValue();
if (value instanceof List) {
@SuppressWarnings("rawtypes")
int size = ((List) value).size();
if (size > maxSize) {
maxSize = size;
}
}
}
for (int i = 0; i < maxSize; i++) {
Map<String, Object> newDataMap = new LinkedHashMap<String, Object>();
newDataMap.put("position", i);
deepCopy(dataMap, newDataMap);
newDataMap.putAll(dataMap);
for (Map.Entry<String, Object> entry : flatArrayDataMap
.entrySet()) {
Object value = entry.getValue();
if (value instanceof List) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) value;
if (i < list.size()) {
addPropertyData(newDataMap, entry.getKey(),
list.get(i));
}
} else if (i == 0) {
addPropertyData(newDataMap, entry.getKey(), value);
}
}
storeIndex(client, indexName, typeName, newDataMap);
}
} else {
storeIndex(client, indexName, typeName, dataMap);
}
}
protected void storeIndex(final Client client, final String indexName,
final String typeName, final Map<String, Object> dataMap) {
try {
final String content = riverConfig.getObjectMapper()
.writeValueAsString(dataMap);
client.prepareIndex(indexName, typeName).setRefresh(true)
.setSource(content).execute().actionGet();
} catch (final Exception e) {
logger.warn("Could not write a content into index.", e);
}
}
protected void deepCopy(Map<String, Object> oldMap,
Map<String, Object> newMap) {
Map<String, Object> flatMap = new LinkedHashMap<String, Object>();
convertFlatMap("", oldMap, flatMap);
for (Map.Entry<String, Object> entry : flatMap.entrySet()) {
addPropertyData(newMap, entry.getKey(), entry.getValue());
}
}
@SuppressWarnings("unchecked")
protected void convertFlatMap(String prefix, Map<String, Object> oldMap,
Map<String, Object> newMap) {
for (Map.Entry<String, Object> entry : oldMap.entrySet()) {
Object value = entry.getValue();
if (value instanceof Map) {
convertFlatMap(prefix + entry.getKey() + ".",
(Map<String, Object>) value, newMap);
} else {
newMap.put(prefix + entry.getKey(), value);
}
}
}
/**
* Returns data as XML content of String.
*
* @return XML content of String.
*/
@Override
public Object getData(final AccessResultData accessResultData) {
return null;
}
}
|
package com.threerings.gwt.ui;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.HasAlignment;
import com.google.gwt.user.client.ui.Widget;
/**
* Extends {@link FlexTable} and provides a fluent interface for adjusting the styles of cells.
*/
public class FluentTable extends FlexTable
{
/** Used to format cells. Returned by all methods that configure cells. */
public static class Cell
{
/** The row we're formatting. */
public final int row;
/** The column we're formatting. */
public final int column;
/** Sets the text of this cell to the string value of the supplied object. */
public Cell setText (Object text, String... styles) {
_table.setText(row, column, String.valueOf(text));
return setStyles(styles);
}
/** Sets the HTML in this cell to the supplied value. Be careful! */
public Cell setHTML (String text, String... styles) {
_table.setHTML(row, column, String.valueOf(text));
return setStyles(styles);
}
/** Sets the contents of this cell to the specified widget. */
public Cell setWidget (Widget widget, String... styles) {
_table.setWidget(row, column, widget);
return setStyles(styles);
}
/** Sets the contents of this cell to a FlowPanel that contains the specified widgets. */
public Cell setWidgets (Widget... widgets) {
_table.setWidget(row, column, Widgets.newFlowPanel(widgets));
return this;
}
/** Makes the cell we're formatting align top. */
public Cell alignTop () {
_table.getFlexCellFormatter().setVerticalAlignment(
row, column, HasAlignment.ALIGN_TOP);
return this;
}
/** Makes the cell we're formatting align bottom. */
public Cell alignBottom () {
_table.getFlexCellFormatter().setVerticalAlignment(
row, column, HasAlignment.ALIGN_BOTTOM);
return this;
}
/** Makes the cell we're formatting align middle. */
public Cell alignMiddle () {
_table.getFlexCellFormatter().setVerticalAlignment(
row, column, HasAlignment.ALIGN_MIDDLE);
return this;
}
/** Makes the cell we're formatting align left. */
public Cell alignLeft () {
_table.getFlexCellFormatter().setHorizontalAlignment(
row, column, HasAlignment.ALIGN_LEFT);
return this;
}
/** Makes the cell we're formatting align right. */
public Cell alignRight () {
_table.getFlexCellFormatter().setHorizontalAlignment(
row, column, HasAlignment.ALIGN_RIGHT);
return this;
}
/** Makes the cell we're formatting align center. */
public Cell alignCenter () {
_table.getFlexCellFormatter().setHorizontalAlignment(
row, column, HasAlignment.ALIGN_CENTER);
return this;
}
/** Sets the rowspan of the cell we're formatting. */
public Cell setRowSpan (int rowSpan) {
_table.getFlexCellFormatter().setRowSpan(row, column, rowSpan);
return this;
}
/** Sets the colspan of the cell we're formatting. */
public Cell setColSpan (int colSpan) {
_table.getFlexCellFormatter().setColSpan(row, column, colSpan);
return this;
}
/** Configures the specified style names on our cell. The first style is set as the primary
* style and additional styles are added onto that. */
public Cell setStyles (String... styles)
{
int idx = 0;
for (String style : styles) {
if (idx++ == 0) {
_table.getFlexCellFormatter().setStyleName(row, column, style);
} else {
_table.getFlexCellFormatter().addStyleName(row, column, style);
}
}
return this;
}
protected Cell (FluentTable table, int row, int column)
{
_table = table;
this.row = row;
this.column = column;
}
protected FluentTable _table;
}
/**
* Creates an empty table with no styles and the default cell padding and spacing.
*/
public FluentTable ()
{
}
/**
* Creates a table with the specified styles and the default cell padding and spacing.
*/
public FluentTable (String... styles)
{
Widgets.setStyleNames(this, styles);
}
/**
* Creates a table with the specified cell pading and spacing and no styles.
*/
public FluentTable (int cellPadding, int cellSpacing)
{
setCellPadding(cellPadding);
setCellSpacing(cellSpacing);
}
/**
* Creates a table with the specified styles and cell padding and spacing.
*/
public FluentTable (int cellPadding, int cellSpacing, String... styles)
{
this(styles);
setCellPadding(cellPadding);
setCellSpacing(cellSpacing);
}
/**
* Returns the specified cell.
*/
public Cell at (int row, int column)
{
return new Cell(this, row, column);
}
/**
* Returns a {@link Cell} at the current row count and column zero (effectively adding a row to
* the table).
*/
public Cell add ()
{
return new Cell(this, getRowCount(), 0);
}
}
|
package org.cyclops.integrateddynamics.client.render.model;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.renderer.block.model.BakedQuad;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.World;
import org.cyclops.cyclopscore.client.model.DelegatingChildDynamicItemAndBlockModel;
import org.cyclops.cyclopscore.helper.RenderHelpers;
import org.cyclops.integrateddynamics.item.ItemFacade;
import java.util.List;
/**
* Dynamic model for facade items.
* @author rubensworks
*/
public class FacadeModel extends DelegatingChildDynamicItemAndBlockModel {
public static IBakedModel emptyModel;
public FacadeModel() {
super(null);
}
public FacadeModel(IBakedModel baseModel) {
super(baseModel);
}
public FacadeModel(IBakedModel baseModel, IBlockState blockState, EnumFacing facing, long rand) {
super(baseModel, blockState, facing, rand);
}
public FacadeModel(IBakedModel baseModel, ItemStack itemStack, World world, EntityLivingBase entity) {
super(baseModel, itemStack, world, entity);
}
@SuppressWarnings("unchecked")
@Override
public List<BakedQuad> getGeneralQuads() {
try {
return baseModel.getQuads(this.blockState, getRenderingSide(), this.rand);
} catch (Exception e) {
return emptyModel.getQuads(this.blockState, getRenderingSide(), this.rand);
}
}
@Override
public IBakedModel handleBlockState(IBlockState state, EnumFacing side, long rand) {
return null;
}
@Override
public IBakedModel handleItemState(ItemStack itemStack, World world, EntityLivingBase entity) {
IBlockState blockState = ItemFacade.getInstance().getFacadeBlock(itemStack);
if(blockState == null) {
return new FacadeModel(emptyModel, itemStack, world, entity);
}
IBakedModel bakedModel = RenderHelpers.getBakedModel(blockState);
bakedModel = bakedModel.getOverrides().handleItemState(bakedModel,
ItemFacade.getInstance().getFacadeBlockItem(itemStack), world, entity);
return new FacadeModel(bakedModel, itemStack, world, entity);
}
@Override
public TextureAtlasSprite getParticleTexture() {
return RenderHelpers.getBakedModel(Blocks.STONE.getDefaultState()).getParticleTexture();
}
}
|
// Narya library - tools for developing networked games
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.util;
import java.text.MessageFormat;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import com.samskivert.text.MessageUtil;
import com.samskivert.util.StringUtil;
import static com.threerings.NaryaLog.log;
/**
* A message bundle provides an easy mechanism by which to obtain
* translated message strings from a resource bundle. It uses the {@link
* MessageFormat} class to substitute arguments into the translation
* strings. Message bundles would generally be obtained via the {@link
* MessageManager}, but could be constructed individually if so desired.
*/
public class MessageBundle
{
/**
* Call this to "taint" any string that has been entered by an entity
* outside the application so that the translation code knows not to
* attempt to translate this string when doing recursive translations
* (see {@link #xlate}).
*/
public static String taint (Object text)
{
return MessageUtil.taint(text);
}
/**
* Composes a message key with an array of arguments. The message can
* subsequently be translated in a single call using {@link #xlate}.
*/
public static String compose (String key, Object[] args)
{
return MessageUtil.compose(key, args);
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with a single argument.
*/
public static String compose (String key, Object arg)
{
return compose(key, new Object[] { arg });
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with two arguments.
*/
public static String compose (String key, Object arg1, Object arg2)
{
return compose(key, new Object[] { arg1, arg2 });
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with three arguments.
*/
public static String compose (
String key, Object arg1, Object arg2, Object arg3)
{
return compose(key, new Object[] { arg1, arg2, arg3 });
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with a single argument that will be automatically tainted (see
* {@link #taint}).
*/
public static String tcompose (String key, Object arg)
{
return compose(key, new Object[] { taint(arg) });
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with two arguments that will be automatically tainted (see {@link
* #taint}).
*/
public static String tcompose (String key, Object arg1, Object arg2)
{
return compose(key, new Object[] { taint(arg1), taint(arg2) });
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with three arguments that will be automatically tainted (see {@link
* #taint}).
*/
public static String tcompose (
String key, Object arg1, Object arg2, Object arg3)
{
return compose(key, new Object[] {
taint(arg1), taint(arg2), taint(arg3) });
}
/**
* A convenience method for calling {@link #compose(String,Object[])}
* with an array of arguments that will be automatically tainted (see
* {@link #taint}).
*/
public static String tcompose (String key, Object[] args)
{
return MessageUtil.tcompose(key, args);
}
/**
* Returns a fully qualified message key which, when translated by
* some other bundle, will know to resolve and utilize the supplied
* bundle to translate this particular key.
*/
public static String qualify (String bundle, String key)
{
return MessageUtil.qualify(bundle, key);
}
/**
* Returns the bundle name from a fully qualified message key.
*
* @see #qualify
*/
public static String getBundle (String qualifiedKey)
{
return MessageUtil.getBundle(qualifiedKey);
}
/**
* Returns the unqualified portion of the key from a fully qualified
* message key.
*
* @see #qualify
*/
public static String getUnqualifiedKey (String qualifiedKey)
{
return MessageUtil.getUnqualifiedKey(qualifiedKey);
}
/**
* Initializes the message bundle which will obtain localized messages
* from the supplied resource bundle. The path is provided purely for
* reporting purposes.
*/
public void init (MessageManager msgmgr, String path,
ResourceBundle bundle, MessageBundle parent)
{
_msgmgr = msgmgr;
_path = path;
_bundle = bundle;
_parent = parent;
}
/**
* Obtains the translation for the specified message key. No arguments
* are substituted into the translated string. If a translation
* message does not exist for the specified key, an error is logged
* and the key itself is returned so that the caller need not worry
* about handling a null response.
*/
public String get (String key)
{
// if this string is tainted, we don't translate it, instead we
// simply remove the taint character and return it to the caller
if (key.startsWith(MessageUtil.TAINT_CHAR)) {
return key.substring(1);
}
String msg = getResourceString(key);
return (msg != null) ? msg : key;
}
/**
* Returns true if we have a translation mapping for the supplied key,
* false if not.
*/
public boolean exists (String key)
{
return getResourceString(key, false) != null;
}
/**
* Get a String from the resource bundle, or null if there was an error.
*/
protected String getResourceString (String key)
{
return getResourceString(key, true);
}
/**
* Get a String from the resource bundle, or null if there was an
* error.
*
* @param key the resource key.
* @param reportMissing whether or not the method should log an error
* if the resource didn't exist.
*/
protected String getResourceString (String key, boolean reportMissing)
{
try {
if (_bundle != null) {
return _bundle.getString(key);
}
} catch (MissingResourceException mre) {
// fall through and try the parent
}
// if we have a parent, try getting the string from them
if (_parent != null) {
String value = _parent.getResourceString(key, false);
if (value != null) {
return value;
}
// if we didn't find it in our parent, we want to fall
// through and report missing appropriately
}
if (reportMissing) {
log.warning("Missing translation message " +
"[bundle=" + _path + ", key=" + key + "].");
Thread.dumpStack();
}
return null;
}
/**
* Obtains the translation for the specified message key. The
* specified argument is substituted into the translated string.
*
* <p> See {@link #get(String,Object[])} for notes on handling
* plurals.
*
* <p> See {@link MessageFormat} for more information on how the
* substitution is performed. If a translation message does not exist
* for the specified key, an error is logged and the key itself (plus
* the argument) is returned so that the caller need not worry about
* handling a null response.
*/
public String get (String key, Object arg1)
{
return get(key, new Object[] { arg1 });
}
/**
* Obtains the translation for the specified message key. The
* specified arguments are substituted into the translated string.
*
* <p> See {@link #get(String,Object[])} for notes on handling
* plurals.
*
* <p> See {@link MessageFormat} for more information on how the
* substitution is performed. If a translation message does not exist
* for the specified key, an error is logged and the key itself (plus
* the arguments) is returned so that the caller need not worry about
* handling a null response.
*/
public String get (String key, Object arg1, Object arg2)
{
return get(key, new Object[] { arg1, arg2 });
}
/**
* Obtains the translation for the specified message key. The
* specified arguments are substituted into the translated string.
*
* <p> See {@link #get(String,Object[])} for notes on handling
* plurals.
*
* <p> See {@link MessageFormat} for more information on how the
* substitution is performed. If a translation message does not exist
* for the specified key, an error is logged and the key itself (plus
* the arguments) is returned so that the caller need not worry about
* handling a null response.
*/
public String get (String key, Object arg1, Object arg2, Object arg3)
{
return get(key, new Object[] { arg1, arg2, arg3 });
}
/**
* Obtains the translation for the specified message key. The
* specified arguments are substituted into the translated string.
*
* <p> If the first argument in the array is an {@link Integer}
* object, a translation will be selected accounting for plurality in
* the following manner. Assume a message key of
* <code>m.widgets</code>, the following translations should be
* defined:
* <pre>
* m.widgets.0 = no widgets.
* m.widgets.1 = {0} widget.
* m.widgets.n = {0} widgets.
* </pre>
*
* The specified argument is substituted into the translated string as
* appropriate. Consider using:
*
* <pre>
* m.widgets.n = {0,number,integer} widgets.
* </pre>
*
* to obtain proper insertion of commas and dots as appropriate for
* the locale.
*
* <p> See {@link MessageFormat} for more information on how the
* substitution is performed. If a translation message does not exist
* for the specified key, an error is logged and the key itself (plus
* the arguments) is returned so that the caller need not worry about
* handling a null response.
*/
public String get (String key, Object[] args)
{
// if this is a qualified key, we need to pass the buck to the
// appropriate message bundle
if (key.startsWith(MessageUtil.QUAL_PREFIX)) {
MessageBundle qbundle = _msgmgr.getBundle(getBundle(key));
return qbundle.get(getUnqualifiedKey(key), args);
}
// look up our message string, selecting the proper plurality
// string if our first argument is an Integer
String msg = getResourceString(key + getSuffix(args), false);
// if the base key is not found, look to see if we should try to
// convert our first argument to an Integer and try again
if (msg == null) {
if (getResourceString(key + ".n", false) != null) {
try {
// args could be a String[], we need to turn it into
// an Object[]
Object[] newargs = new Object[args.length];
System.arraycopy(args, 1, newargs, 1, args.length - 1);
newargs[0] = new Integer(args[0].toString());
args = newargs;
msg = getResourceString(key + getSuffix(args));
} catch (Exception e) {
log.warning("Failure doing automatic plural handling " +
"[bundle=" + _path + ", key=" + key +
", args=" + StringUtil.toString(args) +
", error=" + e + "].");
}
} else {
log.warning("Missing translation message " +
"[bundle=" + _path + ", key=" + key + "].");
Thread.dumpStack();
}
}
return (msg != null) ?
MessageFormat.format(MessageUtil.escape(msg), args)
: (key + StringUtil.toString(args));
}
/**
* A helper function for {@link #get(String,Object[])} that allows us
* to automatically perform plurality processing if our first argument
* is an {@link Integer}.
*/
protected String getSuffix (Object[] args)
{
if (args[0] instanceof Integer) {
switch (((Integer)args[0]).intValue()) {
case 0: return ".0";
case 1: return ".1";
default: return ".n";
}
}
return "";
}
/**
* Obtains the translation for the specified compound message key. A
* compound key contains the message key followed by a tab separated
* list of message arguments which will be subsituted into the
* translation string.
*
* <p> See {@link MessageFormat} for more information on how the
* substitution is performed. If a translation message does not exist
* for the specified key, an error is logged and the key itself (plus
* the arguments) is returned so that the caller need not worry about
* handling a null response.
*/
public String xlate (String compoundKey)
{
// if this is a qualified key, we need to pass the buck to the
// appropriate message bundle; we have to do it here because we
// want the compound arguments of this key to be translated in the
// context of the containing message bundle qualification
if (compoundKey.startsWith(MessageUtil.QUAL_PREFIX)) {
MessageBundle qbundle = _msgmgr.getBundle(getBundle(compoundKey));
return qbundle.xlate(getUnqualifiedKey(compoundKey));
}
// to be more efficient about creating unnecessary objects, we
// do some checking before splitting
int tidx = compoundKey.indexOf('|');
if (tidx == -1) {
return get(compoundKey);
} else {
String key = compoundKey.substring(0, tidx);
String argstr = compoundKey.substring(tidx+1);
String[] args = StringUtil.split(argstr, "|");
// unescape and translate the arguments
for (int i = 0; i < args.length; i++) {
// if the argument is tainted, do no further translation
// (it might contain |s or other fun stuff)
if (args[i].startsWith(MessageUtil.TAINT_CHAR)) {
args[i] = MessageUtil.unescape(args[i].substring(1));
} else {
args[i] = xlate(MessageUtil.unescape(args[i]));
}
}
return get(key, args);
}
}
/**
* Returns a string representation of this instance.
*/
public String toString ()
{
return "[bundle=" + _bundle + ", path=" + _path + "]";
}
/** The message manager via whom we'll resolve fully qualified
* translation strings. */
protected MessageManager _msgmgr;
/** The path that identifies the resource bundle we are using to
* obtain our messages. */
protected String _path;
/** The resource bundle from which we obtain our messages. */
protected ResourceBundle _bundle;
/** Our parent bundle if we're not the global bundle. */
protected MessageBundle _parent;
}
|
package com.googlecode.networklog;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckedTextView;
import android.widget.ImageView;
import android.widget.ListView;
import android.util.Log;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.FileReader;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
public class SelectToastApps
{
Context context;
ArrayList<AppItem> appData;
CustomAdapter adapter;
HashMap<String, String> apps;
AlertDialog dialog;
class AppItem {
Drawable icon;
String name;
String packageName;
boolean enabled;
}
public static File getSaveFile(Context context) {
return new File(context.getDir("data", Context.MODE_PRIVATE), "blockedtoasts.txt");
}
public static HashMap<String, String> loadBlockedApps(Context context) {
File file = getSaveFile(context);
HashMap<String, String> map = new HashMap<String, String>();
if(!file.exists()) {
return map;
}
try {
BufferedReader br = new BufferedReader(new FileReader(file));
String line;
while ((line = br.readLine()) != null) {
map.put(line, line);
}
br.close();
} catch(Exception e) {
Log.w("NetworkLog", "Exception loading toast apps: " + e);
SysUtils.showError(context, "Error loading blocked notifications", e.getMessage());
return map;
}
return map;
}
public static void saveBlockedApps(Context context, HashMap<String, String> map) {
File file = getSaveFile(context);
try {
PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(file)));
for(String key : map.keySet()) {
writer.println(key);
}
writer.close();
} catch(Exception e) {
Log.w("NetworkLog", "Exception saving toast apps: " + e);
SysUtils.showError(context, "Error saving blocked notifications", e.getMessage());
}
}
protected static class SortAppsByName implements Comparator<AppItem> {
public int compare(AppItem o1, AppItem o2) {
return o1.name.compareToIgnoreCase(o2.name);
}
}
public void showDialog(final Context context) {
showDialog(context, null);
}
public void showDialog(final Context context, ArrayList<AppItem> data)
{
this.context = context;
LayoutInflater inflater = (LayoutInflater) context.getSystemService(Activity.LAYOUT_INFLATER_SERVICE);
View view = inflater.inflate(R.layout.select_apps, null);
if(data == null) {
appData = new ArrayList<AppItem>();
for(ApplicationsTracker.AppEntry app : ApplicationsTracker.installedApps) {
AppItem item = new AppItem();
item.name = app.name;
item.packageName = app.packageName;
appData.add(item);
}
Collections.sort(appData, new SortAppsByName());
apps = loadBlockedApps(context);
if(apps != null) {
for(AppItem item : appData) {
if(apps.get(item.packageName) != null) {
item.enabled = true;
}
}
}
} else {
appData = data;
}
ListView listView = (ListView) view.findViewById(R.id.select_apps);
adapter = new CustomAdapter(context, R.layout.select_apps_item, appData);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new CustomOnItemClickListener());
listView.setFastScrollEnabled(true);
((Button) view.findViewById(R.id.select_all)).setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
for(AppItem item : appData) {
item.enabled = true;
}
adapter.notifyDataSetChanged();
}
});
((Button) view.findViewById(R.id.select_none)).setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
for(AppItem item : appData) {
item.enabled = false;
}
adapter.notifyDataSetChanged();
}
});
Resources res = context.getResources();
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(res.getString(R.string.pref_toast_block_apps))
.setView(view)
.setCancelable(true)
.setNegativeButton(res.getString(R.string.cancel), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.dismiss();
NetworkLog.selectToastApps = null;
}
})
.setPositiveButton(res.getString(R.string.done), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
apps = new HashMap<String, String>();
for(AppItem item : appData) {
if(item.enabled == true) {
apps.put(item.packageName, item.packageName);
}
}
saveBlockedApps(context, apps);
NetworkLogService.toastBlockedApps = apps;
dialog.dismiss();
NetworkLog.selectToastApps = null;
}
});
dialog = builder.create();
dialog.show();
}
private class CustomOnItemClickListener implements OnItemClickListener {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
AppItem item = appData.get(position);
item.enabled = !item.enabled;
CheckedTextView ctv = (CheckedTextView) view.findViewById(R.id.select_apps_name);
ctv.setChecked(item.enabled);
}
}
private class CustomAdapter extends ArrayAdapter<AppItem> {
LayoutInflater inflater = (LayoutInflater) context.getSystemService(Activity.LAYOUT_INFLATER_SERVICE);
public CustomAdapter(Context context, int resource, List<AppItem> objects) {
super(context, resource, objects);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
ImageView icon;
CheckedTextView name;
AppItem item = getItem(position);
if(convertView == null) {
convertView = inflater.inflate(R.layout.select_apps_item, null);
holder = new ViewHolder(convertView);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
icon = holder.getIcon();
icon.setTag(item.packageName);
icon.setImageDrawable(ApplicationsTracker.loadIcon(context, icon, item.packageName));
name = holder.getName();
name.setText(item.name);
name.setChecked(item.enabled);
return convertView;
}
}
private class ViewHolder {
private View view;
private ImageView icon;
private CheckedTextView name;
public ViewHolder(View view) {
this.view = view;
}
public ImageView getIcon() {
if(icon == null) {
icon = (ImageView) view.findViewById(R.id.select_apps_icon);
}
return icon;
}
public CheckedTextView getName() {
if(name == null) {
name = (CheckedTextView) view.findViewById(R.id.select_apps_name);
}
return name;
}
}
}
|
// Narya library - tools for developing networked games
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.util;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.Enumeration;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import com.samskivert.text.MessageUtil;
import com.samskivert.util.StringUtil;
import static com.threerings.NaryaLog.log;
/**
* A message bundle provides an easy mechanism by which to obtain translated message strings from
* a resource bundle. It uses the {@link MessageFormat} class to substitute arguments into the
* translation strings. Message bundles would generally be obtained via the {@link MessageManager},
* but could be constructed individually if so desired.
*/
public class MessageBundle
{
/**
* Call this to "taint" any string that has been entered by an entity outside the application
* so that the translation code knows not to attempt to translate this string when doing
* recursive translations (see {@link #xlate}).
*/
public static String taint (Object text)
{
return MessageUtil.taint(text);
}
/**
* Composes a message key with a single argument. The message can subsequently be translated
* in a single call using {@link #xlate}.
*/
public static String compose (String key, Object arg)
{
return MessageUtil.compose(key, new Object[] { arg });
}
/**
* Composes a message key with an array of arguments. The message can subsequently be
* translated in a single call using {@link #xlate}.
*/
public static String compose (String key, Object... args)
{
return MessageUtil.compose(key, args);
}
/**
* Composes a message key with an array of arguments. The message can subsequently be
* translated in a single call using {@link #xlate}.
*/
public static String compose (String key, String... args)
{
return MessageUtil.compose(key, args);
}
/**
* A convenience method for calling {@link #compose(String,Object[])} with an array of
* arguments that will be automatically tainted (see {@link #taint}).
*/
public static String tcompose (String key, Object... args)
{
return MessageUtil.tcompose(key, args);
}
/**
* Required for backwards compatibility. Alas.
*/
public static String tcompose (String key, Object arg)
{
return MessageUtil.tcompose(key, new Object[] { arg });
}
/**
* Required for backwards compatibility. Alas.
*/
public static String tcompose (String key, Object arg1, Object arg2)
{
return MessageUtil.tcompose(key, new Object[] { arg1, arg2 });
}
/**
* A convenience method for calling {@link #compose(String,String[])} with an array of
* arguments that will be automatically tainted (see {@link #taint}).
*/
public static String tcompose (String key, String... args)
{
return MessageUtil.tcompose(key, args);
}
/**
* Returns a fully qualified message key which, when translated by some other bundle, will
* know to resolve and utilize the supplied bundle to translate this particular key.
*/
public static String qualify (String bundle, String key)
{
return MessageUtil.qualify(bundle, key);
}
/**
* Returns the bundle name from a fully qualified message key.
*
* @see #qualify
*/
public static String getBundle (String qualifiedKey)
{
return MessageUtil.getBundle(qualifiedKey);
}
/**
* Returns the unqualified portion of the key from a fully qualified message key.
*
* @see #qualify
*/
public static String getUnqualifiedKey (String qualifiedKey)
{
return MessageUtil.getUnqualifiedKey(qualifiedKey);
}
/**
* Initializes the message bundle which will obtain localized messages from the supplied
* resource bundle. The path is provided purely for reporting purposes.
*/
public void init (MessageManager msgmgr, String path,
ResourceBundle bundle, MessageBundle parent)
{
_msgmgr = msgmgr;
_path = path;
_bundle = bundle;
_parent = parent;
}
/**
* Obtains the translation for the specified message key. No arguments are substituted into
* the translated string. If a translation message does not exist for the specified key, an
* error is logged and the key itself is returned so that the caller need not worry about
* handling a null response.
*/
public String get (String key)
{
// if this string is tainted, we don't translate it, instead we
// simply remove the taint character and return it to the caller
if (MessageUtil.isTainted(key)) {
return MessageUtil.untaint(key);
}
String msg = getResourceString(key);
return (msg != null) ? msg : key;
}
/**
* Adds all messages whose key starts with the specified prefix to the supplied collection.
*
* @param includeParent if true, messages from our parent bundle (and its parent bundle, all
* the way up the chain will be included).
*/
public void getAll (String prefix, Collection<String> messages, boolean includeParent)
{
Enumeration<String> iter = _bundle.getKeys();
while (iter.hasMoreElements()) {
String key = iter.nextElement();
if (key.startsWith(prefix)) {
messages.add(get(key));
}
}
if (includeParent && _parent != null) {
_parent.getAll(prefix, messages, includeParent);
}
}
/**
* Adds all keys for messages whose key starts with the specified prefix to the supplied
* collection.
*
* @param includeParent if true, messages from our parent bundle (and its parent bundle, all
* the way up the chain will be included).
*/
public void getAllKeys (String prefix, Collection<String> keys, boolean includeParent)
{
Enumeration<String> iter = _bundle.getKeys();
while (iter.hasMoreElements()) {
String key = iter.nextElement();
if (key.startsWith(prefix)) {
keys.add(key);
}
}
if (includeParent && _parent != null) {
_parent.getAllKeys(prefix, keys, includeParent);
}
}
/**
* Returns true if we have a translation mapping for the supplied key, false if not.
*/
public boolean exists (String key)
{
return getResourceString(key, false) != null;
}
/**
* Get a String from the resource bundle, or null if there was an error.
*/
public String getResourceString (String key)
{
return getResourceString(key, true);
}
/**
* Get a String from the resource bundle, or null if there was an error.
*
* @param key the resource key.
* @param reportMissing whether or not the method should log an error if the resource didn't
* exist.
*/
public String getResourceString (String key, boolean reportMissing)
{
try {
if (_bundle != null) {
return _bundle.getString(key);
}
} catch (MissingResourceException mre) {
// fall through and try the parent
}
// if we have a parent, try getting the string from them
if (_parent != null) {
String value = _parent.getResourceString(key, false);
if (value != null) {
return value;
}
// if we didn't find it in our parent, we want to fall
// through and report missing appropriately
}
if (reportMissing) {
log.warning("Missing translation message", "bundle", _path, "key", key, new Exception());
}
return null;
}
/**
* Obtains the translation for the specified message key. The specified arguments are
* substituted into the translated string.
*
* <p> If the first argument in the array is an {@link Integer} object, a translation will be
* selected accounting for plurality in the following manner. Assume a message key of
* <code>m.widgets</code>, the following translations should be defined: <pre> m.widgets.0 =
* no widgets. m.widgets.1 = {0} widget. m.widgets.n = {0} widgets. </pre>
*
* The specified argument is substituted into the translated string as appropriate. Consider
* using:
*
* <pre> m.widgets.n = {0,number,integer} widgets. </pre>
*
* to obtain proper insertion of commas and dots as appropriate for the locale.
*
* <p> See {@link MessageFormat} for more information on how the substitution is performed. If
* a translation message does not exist for the specified key, an error is logged and the key
* itself (plus the arguments) is returned so that the caller need not worry about handling a
* null response.
*/
public String get (String key, Object... args)
{
// if this is a qualified key, we need to pass the buck to the
// appropriate message bundle
if (key.startsWith(MessageUtil.QUAL_PREFIX)) {
MessageBundle qbundle = _msgmgr.getBundle(getBundle(key));
return qbundle.get(getUnqualifiedKey(key), args);
}
// Select the proper suffix if our first argument can be coaxed into an integer
String suffix = getSuffix(args);
String msg = getResourceString(key + suffix, false);
if (msg == null) {
// Playing with fire: This only works because it's the same "" reference we return
// from getSuffix()
// Don't try this at home. Keep out of reach of children. If swallowed, consult
// StringUtil.isBlank()
if (suffix != "") {
// Try the original key
msg = getResourceString(key, false);
}
if (msg == null) {
log.warning("Missing translation message", "bundle", _path, "key", key,
new Exception());
// return something bogus
return (key + StringUtil.toString(args));
}
}
return MessageFormat.format(MessageUtil.escape(msg), args);
}
/**
* Obtains the translation for the specified message key. The specified arguments are
* substituted into the translated string.
*/
public String get (String key, String... args)
{
return get(key, (Object[]) args);
}
/**
* A helper function for {@link #get(String,Object[])} that allows us to automatically perform
* plurality processing if our first argument can be coaxed to an {@link Integer}.
*/
public String getSuffix (Object[] args)
{
if (args.length > 0 && args[0] != null) {
try {
int count = (args[0] instanceof Integer) ? (Integer)args[0] :
Integer.parseInt(args[0].toString());
switch (count) {
case 0: return ".0";
case 1: return ".1";
default: return ".n";
}
} catch (NumberFormatException e) {
// Fall out
}
}
return "";
}
/**
* Obtains the translation for the specified compound message key. A compound key contains the
* message key followed by a tab separated list of message arguments which will be substituted
* into the translation string.
*
* <p> See {@link MessageFormat} for more information on how the substitution is performed. If
* a translation message does not exist for the specified key, an error is logged and the key
* itself (plus the arguments) is returned so that the caller need not worry about handling a
* null response.
*/
public String xlate (String compoundKey)
{
// if this is a qualified key, we need to pass the buck to the appropriate message bundle;
// we have to do it here because we want the compound arguments of this key to be
// translated in the context of the containing message bundle qualification
if (compoundKey.startsWith(MessageUtil.QUAL_PREFIX)) {
MessageBundle qbundle = _msgmgr.getBundle(getBundle(compoundKey));
return qbundle.xlate(getUnqualifiedKey(compoundKey));
}
// to be more efficient about creating unnecessary objects, we
// do some checking before splitting
int tidx = compoundKey.indexOf('|');
if (tidx == -1) {
return get(compoundKey);
} else {
String key = compoundKey.substring(0, tidx);
String argstr = compoundKey.substring(tidx+1);
String[] args = StringUtil.split(argstr, "|");
// unescape and translate the arguments
for (int i = 0; i < args.length; i++) {
// if the argument is tainted, do no further translation
// (it might contain |s or other fun stuff)
if (MessageUtil.isTainted(args[i])) {
args[i] = MessageUtil.unescape(MessageUtil.untaint(args[i]));
} else {
args[i] = xlate(MessageUtil.unescape(args[i]));
}
}
return get(key, (Object[]) args);
}
}
@Override
public String toString ()
{
return "[bundle=" + _bundle + ", path=" + _path + "]";
}
/** The message manager via whom we'll resolve fully qualified translation strings. */
protected MessageManager _msgmgr;
/** The path that identifies the resource bundle we are using to obtain our messages. */
protected String _path;
/** The resource bundle from which we obtain our messages. */
protected ResourceBundle _bundle;
/** Our parent bundle if we're not the global bundle. */
protected MessageBundle _parent;
}
|
package com.gvalidate.validate;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.Properties;
import javax.servlet.http.HttpServletRequest;
import com.gvalidate.utils.PropertyUtils;
public class ParameterValidator {
private static final Logger logger = Logger.getLogger(ParameterValidator.class
.getName());
public static boolean validate(Map<String,String> m){
boolean status = false;
for(Entry<String,String> entry: m.entrySet()){
status = validate(entry.getKey(),entry.getValue());
if(!status)
break;
}
return status;
}
public static boolean validate(String key, String value) {
boolean status=false;
String checkForPresence=PropertyUtils.getProperty(key+".regex");
if(checkForPresence!=null){
ParameterType param=new ParameterType(key);
logger.log(Level.INFO, "Validating "+key+" against "+param.regex);
if(!value.matches(param.regex))
{
// Failed Case
logger.log(Level.INFO, "Validation failed for "+key+" against "+param.regex);
throw new ValidationException(param);
}
else
status=true;
}else
{
//Validate against Default values
ParameterType param=new ParameterType("Default");
logger.log(Level.INFO, "Validating "+key+" against Default value="+param.regex);
if(!value.matches(param.regex))
{
// Failed Case
logger.log(Level.INFO, "Validation failed for "+key+" against "+param.regex);
throw new ValidationException(param);
}
else
status=true;
}
return status;
}
public static boolean validate(HttpServletRequest request){
boolean status=false;
Map<String,String[]> requestParameterMap = new HashMap<String, String[]>(request.getParameterMap());
for (Entry<String, String[]> entry : requestParameterMap.entrySet()) {
status=validate(entry.getKey(), entry.getValue());
if(!status)
break;
}
return status;
}
private static boolean validate(String key, String[] value) {
// TODO Loop on all values and check for validation of each.
return ParameterValidator.validate(key, value[0]);
}
}
|
package com.jcw.andriod.fileListView;
/*
* Author - Woodruff
* todo -- add up option as top list item
*/
import android.content.Context;
import android.os.Environment;
import android.util.AttributeSet;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListAdapter;
import android.widget.ListView;
import java.io.File;
import android.graphics.*;
import android.util.*;
public class FileListView extends ListView {
//feel free to change this to any existing directory
public File baseDirectory = Environment.getExternalStorageDirectory();
//holds the list of extensions that cna be used
//length 0 by default to allow everything
public String[] fileExtensions = new String[0];
//if this string is non-empty, only files containing the text in
//this string will be displayed
//Also empty by default
public String searchText = "";
//default mode is alphabetical
public SortingMode sortingMode = SortingMode.Alphabetical;
//when true, only files that are actually directories will be shown
public boolean directoriesOnly = false;
private FileSelectListener listener;
public FileListView(Context context) {
super(context);
init();
}
public FileListView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public FileListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
private void init() {
final ListAdapter adapter = getCurrentAdapter();
this.setAdapter(adapter);
this.setCacheColorHint(Color.TRANSPARENT);
this.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int index, long id) {
if (index == 0) {//this is the up item
File up = baseDirectory.getParentFile();
if (up == null || up.listFiles() == null) {//already top level directory
return;
} else {
baseDirectory = up;
setAdapter(getCurrentAdapter());
}
return;
}
baseDirectory = new File(baseDirectory + "/" + ((FileListItemView) view).getRepresentedDir());
if (!baseDirectory.isDirectory()) {
if (listener != null) {
listener.fileSelected(baseDirectory);
}
//this is so the user can continue browsing with the same
//instace even after they selected a file
baseDirectory = new File(baseDirectory.getParent());
} else {
//reset the adapter with a new directory
ListAdapter adapter = getCurrentAdapter();
setAdapter(adapter);
}
}
});
}
/*
returns a list adapter containing all the files/folders
that are in the current directory
*/
private ListAdapter getCurrentAdapter() {
//gets the list of files
File[] files = FileUtils.listFiles(baseDirectory);
File[] directories = FileUtils.listDirectories(baseDirectory);
//filters out unwanted files
//first get rid of the directories
File[] filteredFiles = filterSort(files);
File[] filteredDirectories = filterSort(directories);
File[] upIncluded = new File[filteredDirectories.length + 1];
upIncluded[0] = new File("...");
for (int i = 0; i < filteredDirectories.length; i++) {
upIncluded[i + 1] = filteredDirectories[i];
}
File[] joined = ListUtils.join(upIncluded, filteredFiles);
return new FileListAdapter(getContext(), joined);
}
private File[] filterSort(File[] files) {
File[] filteredFiles = ListUtils.directoriesOnly(
//then anything that doesn't contain the search term
ListUtils.search(
//finally anything that doesn't match the specified extensions
ListUtils.filterExtensions(files, fileExtensions),
searchText), directoriesOnly);
//sorts the remaining files accoring to current mode
return this.sortingMode.sort(filteredFiles);
}
public void loadLastDir() {
File newFile = baseDirectory.getParentFile();
if (newFile == null) //this is the top directory -- no parent
return;
baseDirectory = newFile;
}
public void setExtensions(String[] extensions) {
this.fileExtensions = extensions;
}
public void searchWith(String text) {
this.searchText = text;
}
public void setDirectoriesOnly(boolean directoriesOnly) {
this.directoriesOnly = directoriesOnly;
}
public void setSortingMode(SortingMode newMode) {
this.sortingMode = newMode;
}
public void refresh() {
setAdapter(getCurrentAdapter());
}
public void setFileSelectedListener(FileSelectListener listener) {
this.listener = listener;
}
public interface FileSelectListener {
public void fileSelected(File selected);
}
public enum SortingMode {
Alphabetical,
OldestNewest,
NewestOldest;
SortingMode() {
}
public File[] sort(File[] list) {
switch (this) {
case Alphabetical:
return ListUtils.sortByName(list);
case OldestNewest:
return ListUtils.sortByDate(list);
case NewestOldest:
return ListUtils.sortNewestOldest(list);
default:
throw new Error("If you add a new type to sorting mode, you need to update the" +
" SortingMode.sort method to handle this new option");
}
}
}
}
|
package com.namelessmc.NamelessAPI;
import java.net.URL;
import java.util.Date;
import java.util.UUID;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.namelessmc.NamelessAPI.utils.NamelessRequestUtil;
import com.namelessmc.NamelessAPI.utils.NamelessRequestUtil.Request;
public final class NamelessPlayer {
private String userName;
private String displayName;
private UUID uuid;
private int groupID;
private int reputation;
private Date registeredDate;
private boolean exists;
private boolean validated;
private boolean banned;
private URL baseUrl;
private JsonParser parser;
/**
* Creates a new NamelessPlayer object. This constructor should not be called in the main server thread.
* @param uuid
* @param baseUrl Base API URL: <i>http(s)://yoursite.com/api/v1/API_KEY<i>
* @see #NamelessPlayer(String, URL)
*/
public NamelessPlayer(UUID uuid, URL baseUrl) {
parser = new JsonParser();
this.baseUrl = baseUrl;
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "get", "uuid=" + NamelessAPI.urlEncodeString(uuid.toString()));
init(request);
}
/**
* Creates a new NamelessPlayer object. This constructor should not be called in the main server thread.
* @param username
* @param baseUrl
* @see #NamelessPlayer(UUID, URL)
*/
public NamelessPlayer(String username, URL baseUrl) {
this.parser = new JsonParser();
this.baseUrl = baseUrl;
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "get", "username=" + NamelessAPI.urlEncodeString(username));
init(request);
}
private void init(Request request) {
if (!request.hasSucceeded()) {
exists = false;
return;
}
//No errors, parse response
JsonObject response = request.getResponse();
JsonObject message = parser.parse(response.get("message").getAsString()).getAsJsonObject();
exists = true;
// Convert UNIX timestamp to date
Date registered = new Date(Long.parseLong(message.get("registered").toString().replaceAll("^\"|\"$", "")) * 1000);
// Display get user.
userName = message.get("username").getAsString();
displayName = message.get("displayname").getAsString();
uuid = UUID.fromString(addDashesToUUID(message.get("uuid").getAsString()));
groupID = message.get("group_id").getAsInt();
registeredDate = registered;
reputation = message.get("reputation").getAsInt();
validated = message.get("validated").getAsString().equals("1");
banned = message.get("banned").getAsString().equals("1");
}
public static String addDashesToUUID(String uuid) {
StringBuffer sb = new StringBuffer(uuid);
sb.insert(8, "-");
sb = new StringBuffer(sb.toString());
sb.insert(13, "-");
sb = new StringBuffer(sb.toString());
sb.insert(18, "-");
sb = new StringBuffer(sb.toString());
sb.insert(23, "-");
return sb.toString();
}
/**
* @return The Minecraft username associated with the provided UUID. This is not always the name displayed on the website.
* @see #getDisplayName()
*/
public String getUsername() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return userName;
}
/**
* @return The name this player uses on the website. This is not always the same as their Minecraft username.
* @see #getUsername()
*/
public String getDisplayName() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return displayName;
}
/**
* @return Minecraft UUID of this player.
* @see #getUsername()
*/
public UUID getUniqueId() {
return uuid;
}
/**
* @return A numerical group id.
*/
public int getGroupID() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return groupID;
}
/**
* @return The user's site reputation.
*/
public int getReputations() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return reputation;
}
/**
* @return The date the user registered on the website.
*/
public Date getRegisteredDate() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return registeredDate;
}
/**
* @return Whether an account associated with the UUID exists.
* @see #getUUID()
*/
public boolean exists() {
return exists;
}
/**
* @return Whether this account has been validated. An account is validated when a password is set.
*/
public boolean isValidated() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return validated;
}
/**
* @return Whether this account is banned from the website.
*/
public boolean isBanned() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return banned;
}
/**
* @return Number of alerts
* @see #getMessageCount()
* @throws NamelessException
*/
public int getAlertCount() throws NamelessException {
String postString = "uuid=" + NamelessAPI.urlEncodeString(uuid.toString());
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "getNotifications", postString);
if (!request.hasSucceeded()) {
throw new NamelessException(request.getException());
}
JsonObject response = request.getResponse();
JsonObject message = parser.parse(response.get("message").getAsString()).getAsJsonObject();
return message.get("alerts").getAsInt();
}
/**
* @return Number of unread private messages
* @see #getAlertCount()
* @throws NamelessException
*/
public int getMessageCount() throws NamelessException {
String postString = "uuid=" + NamelessAPI.urlEncodeString(uuid.toString());
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "getNotifications", postString);
if (!request.hasSucceeded()) {
throw new NamelessException(request.getException());
}
JsonObject response = request.getResponse();
JsonObject message = parser.parse(response.get("message").getAsString()).getAsJsonObject();
return message.get("messages").getAsInt();
}
/**
* Sets the players group
* @param groupId Numerical ID associated with a group
* @throws NamelessException
*/
public void setGroup(int groupId) throws NamelessException {
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "setGroup", "uuid=" + NamelessAPI.urlEncodeString(uuid.toString()) + "?group_id=");
if (!request.hasSucceeded()) {
throw new NamelessException(request.getException());
}
}
/**
* Changes the players username on the website. You should check if another account with the name <i>newUserName</i> exists before calling this method.
* @param newUserName
* @throws NamelessException
*/
public void updateUsername(String newUserName) throws NamelessException {
String encodedUuid = NamelessAPI.urlEncodeString(uuid.toString());
String encodedName = NamelessAPI.urlEncodeString(newUserName);
String postString = "id=" + encodedUuid + "?new_username=" + encodedName;
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "updateUsername", postString);
if (!request.hasSucceeded()) {
throw new NamelessException(request.getException());
}
}
/**
* Registers a new account. The player will be sent an email to set a password.
* @param minecraftName In-game name for this player
* @param email Email address
* @throws NamelessException
*/
public void register(String minecraftName, String email) throws NamelessException {
String encodedUuid = NamelessAPI.urlEncodeString(uuid.toString());
String encodedName = NamelessAPI.urlEncodeString(minecraftName);
String encodedEmail = NamelessAPI.urlEncodeString(email);
String postString = String.format("username=%s&uuid=%s&email=%s", encodedUuid, encodedName, encodedEmail);
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "register", postString);
if (!request.hasSucceeded()) {
String errorMessage = request.getException().getMessage();
if (errorMessage.contains("Username") || errorMessage.contains("UUID") || errorMessage.contains("Email")) {
throw new IllegalArgumentException(errorMessage);
} else {
throw new NamelessException(request.getException());
}
}
}
/**
* Reports a player
* @param reportedUuid UUID of the reported player
* @param reportedUsername In-game name of the reported player
* @param reason Reason why this player has been reported
* @throws NamelessException
*/
public void reportPlayer(UUID reportedUuid, String reportedUsername, String reason) throws NamelessException {
String encodedReporterUuid = NamelessAPI.urlEncodeString(uuid.toString());
String encodedReportedUuid = NamelessAPI.urlEncodeString(reportedUuid.toString());
String encodedName = NamelessAPI.urlEncodeString(reportedUsername);
String encodedReason = NamelessAPI.urlEncodeString(reason);
String postString = String.format("reporter_uuid=%s?reported_uuid=%s?reported_username=%s?content=%s",
encodedReporterUuid, encodedReportedUuid, encodedName, encodedReason);
Request request = NamelessRequestUtil.sendPostRequest(baseUrl, "createReport", postString);
if (!request.hasSucceeded()) {
throw new NamelessException(request.getException());
}
}
}
|
package com.phantommentalists.Twenty14;
import edu.wpi.first.wpilibj.CANJaguar;
import edu.wpi.first.wpilibj.can.CANTimeoutException;
import edu.wpi.first.wpilibj.Solenoid;
/**
*
* @author jcurtiss001
*/
public class DriveMotor {
public CANJaguar motor;
protected Solenoid shifter;
public DriveMotor(int driveCANID, int Solenoidchannel) throws CANTimeoutException {
motor = new CANJaguar(driveCANID, CANJaguar.ControlMode.kPercentVbus);
motor.configMaxOutputVoltage(Parameters.maxMotorVoltage);
motor.configNeutralMode(CANJaguar.NeutralMode.kBrake);
shifter = new Solenoid(Solenoidchannel);
setGear(Gear.kHigh);
}
public void set(double setpoint) throws CANTimeoutException {
motor.setX(setpoint);
}
/**
* setGear
*
* This method sets gear to low or high based on a boolean value
* (true/false)
*
* @param gear - true equals shifting to low gear and false equals shifting
* to high gear
*/
public void setGear(Gear gear) {
if (gear == Gear.kLow) {
shifter.set(false);
} else {
shifter.set(true);
}
}
public boolean isLowGear() {
if (shifter.get() == false) //FIX ME!!! Verify true is really high gear
{
return true;
} else {
return false;
}
}
/**
* isHighGear
*
* This method returns true if in high gear or false if in low gear
*
* @return
*/
public boolean isHighGear() {
if (!shifter.get()) //FIX ME!!! Verify false is really low gear
{
return false;
} else {
return true;
}
}
/**
*
* @return
*/
public Gear getGear() {
if (isHighGear()) {
return Gear.kHigh;
}
return Gear.kLow;
}
public static class Gear {
private static final int kLowValue = 1;
private static final int kHighValue = 2;
private final int value;
public static final Gear kLow = new Gear(Gear.kLowValue);
public static final Gear kHigh = new Gear(Gear.kHighValue);
protected Gear(int gear) {
this.value = gear;
}
}
}
|
package org.projectspinoza.twitterswissarmyknife;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.Set;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.projectspinoza.twitterswissarmyknife.command.BaseCommand;
import org.projectspinoza.twitterswissarmyknife.command.CommandStreamStatuses;
import org.projectspinoza.twitterswissarmyknife.command.TsakCommand;
import org.projectspinoza.twitterswissarmyknife.streaming.TwitterStreamingExcecutor;
import org.projectspinoza.twitterswissarmyknife.util.TsakResponse;
import org.reflections.Reflections;
import twitter4j.Twitter;
import twitter4j.TwitterException;
import twitter4j.TwitterFactory;
import twitter4j.conf.ConfigurationBuilder;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.ParameterException;
/**
* TwitterSwissArmyKnife
* A simple command line utility that allows a user to interact with Twitter's public API.
* it is very simple, straight forward, easy to use and flexible API. It
* provides Method chaining, re-usability, flexibility and simplicity.
*
* @author org.projectspinoza
* @version v1.0.0
*
*/
public class TwitterSwissArmyKnife {
private static Logger log = LogManager.getRootLogger();
private static final String COMMANDS_SCAN_PACKAGE = "org.projectspinoza.twitterswissarmyknife.command";
private static TwitterSwissArmyKnife tsakInstance = null;
private TsakCommand tsakCommand;
private JCommander subCommander;
private TsakResponse tsakResponse;
private boolean authorize;
private ConfigurationBuilder configurationBuilder;
private Twitter twitter;
/**
* Default private constructor for TwitterSwissArmyKnife.
* Prepares TsakCommand by calling its default constructor, you may reset it later.
*
*/
private TwitterSwissArmyKnife() {
tsakCommand = new TsakCommand();
}
/**
* returns TwitterSwissArmyKnife's static instance
* @return tsakInstance
*/
public synchronized static TwitterSwissArmyKnife getInstance() {
if (tsakInstance == null) {
tsakInstance = new TwitterSwissArmyKnife();
}
return tsakInstance;
}
/**
* returns True if user has authorization to access twitterAPI false other wise.
* @return authorize
*/
public boolean isAuthorized() {
return authorize;
}
/**
* sets OR overrides default tsakCommand.
* @param tsakCommands
* @return tsakInstance
*/
public TwitterSwissArmyKnife setTsakCommand(TsakCommand tsakCommands) {
log.info("setting tsakCommand");
tsakInstance.tsakCommand = tsakCommands;
return tsakInstance;
}
/**
* returns result e.g. the generated response of the executed command.
* @return tsakResponse
*/
public TsakResponse getResult() {
return tsakResponse;
}
/**
* returns twitter instance
* @return twitter
*/
public Twitter getTwitterInstance() {
return twitter;
}
/**
* writes the result (generated data of the executed command) to the output file.
* @return tsakInstance
*/
public TwitterSwissArmyKnife write(){
BufferedWriter bufferedWriter = null;
try{
BaseCommand baseCommand = getSubCommand(subCommander.getParsedCommand());
bufferedWriter = new BufferedWriter(new FileWriter(new File(baseCommand.getOutputFile())));
baseCommand.write(tsakResponse, bufferedWriter);
tsakResponse = null;
if(bufferedWriter != null){ bufferedWriter.close();}
}catch(IOException ioex){
log.debug(ioex.getMessage());
}catch(NullPointerException npex){
log.debug(npex.getMessage());
}
return tsakInstance;
}
/**
* executes twitter streaming command.
* @throws IOException
*/
public void executeStreamingCommand(String parsedCommand) throws IOException {
CommandStreamStatuses streamStatuses = (CommandStreamStatuses) getSubCommand(parsedCommand);
(new TwitterStreamingExcecutor()).execute(configurationBuilder, streamStatuses);
}
/**
* executes dump command.
* @throws IOException
* @throws TwitterException
*/
public void executeDumpCommand(BaseCommand baseCommand) throws TwitterException {
if (!isAuthorized()) {
authorizeUser();
}
if (isAuthorized()) {
tsakResponse = baseCommand.execute(getTwitterInstance());
if(tsakResponse != null){
showRateLimitStatus(tsakResponse.getRemApiLimits());
}
} else {
log.error("User not authorized!");
}
}
public TwitterSwissArmyKnife executeCommand(String[] args)
throws TwitterException, ParameterException, IOException, InstantiationException, IllegalAccessException {
if (args == null) {
log.debug("Need help?? run > tsak <commandName> --help");
return tsakInstance;
}
JCommander rootCommander = new JCommander();
rootCommander.addCommand("tsak", tsakCommand);
subCommander = rootCommander.getCommands().get("tsak");
activateSubCommands();
rootCommander.parse(args);
String parsedCommand = subCommander.getParsedCommand();
BaseCommand baseCommand = getSubCommand(parsedCommand);
if(baseCommand.needHelp()){
subCommander.usage(parsedCommand);
return tsakInstance;
}
if (!isAuthorized()) {
setConfigurationBuilder(rootCommander);
}
if (parsedCommand.equals("streamStatuses")) {
executeStreamingCommand(parsedCommand);
} else {
executeDumpCommand(baseCommand);
}
return tsakInstance;
}
/**
* authorizes user with the provided credentials.
*
* @throws TwitterException
*/
private void authorizeUser() throws TwitterException {
twitter = new TwitterFactory(getConfigurationBuilder().build()).getInstance();
twitter.verifyCredentials();
authorize = true;
}
/**
* Sets twitter configuration builder
* @param rootCommander
* @throws IOException
*/
private void setConfigurationBuilder(JCommander rootCommander) throws IOException {
if (!setCredentials(rootCommander)) {
log.error("Credentials not provided!");
authorize = false;
return;
}
configurationBuilder = new ConfigurationBuilder();
configurationBuilder.setDebugEnabled(true)
.setOAuthConsumerKey(tsakCommand.getConsumerKey())
.setOAuthConsumerSecret(tsakCommand.getConsumerSecret())
.setOAuthAccessToken(tsakCommand.getAccessToken())
.setOAuthAccessTokenSecret(tsakCommand.getAccessSecret());
}
/**
* returns twitter configuration builder.
*
* @return
*/
private ConfigurationBuilder getConfigurationBuilder() {
return configurationBuilder;
}
/**
* sets/verifies provided twitter credentials and returns True on Success and False on Failure.
*
* @param rootCommander
* @return boolean TRUE/FALSE
* @throws IOException
*/
private boolean setCredentials(JCommander rootCommander) throws IOException {
if (!rootCommander.getParsedCommand().equals("tsak")) {
log.info("Invalid Command: " + rootCommander.getParsedCommand());
return false;
}
if (tsakCommand.getConsumerKey() == null
|| tsakCommand.getConsumerSecret() == null
|| tsakCommand.getAccessToken() == null
|| tsakCommand.getAccessSecret() == null) {
String env_var = System.getenv("TSAK_CONF");
if (env_var == null || env_var.isEmpty()) {
log.error("Environment variable not set. TSAK_CONF {}");
return false;
}
File propConfFile = new File(env_var + File.separator + "tsak.properties");
if (!propConfFile.exists()) {
log.error("tsak.properties file does not exist in: " + env_var);
return false;
}
Properties prop = new Properties();
InputStream propInstream = new FileInputStream(propConfFile);
prop.load(propInstream);
propInstream.close();
tsakCommand.setConsumerKey(prop.getProperty("consumerKey").trim());
tsakCommand.setConsumerSecret(prop.getProperty("consumerSecret").trim());
tsakCommand.setAccessToken(prop.getProperty("accessToken").trim());
tsakCommand.setAccessSecret(prop.getProperty("accessSecret").trim());
}
return true;
}
/**
* activates/prepares all of the commands for execution.
*
*/
public void activateSubCommands() throws InstantiationException, IllegalAccessException{
Reflections reflections = new Reflections(COMMANDS_SCAN_PACKAGE);
Set<Class<? extends BaseCommand>> tsakCommandSet = reflections.getSubTypesOf(BaseCommand.class);
for (Class<?> commandClazz : tsakCommandSet) {
this.subCommander.addCommand(commandClazz.newInstance());
}
}
/**
* returns parsedCommand e.g. the provided command.
*
* @param parsedCommand
* @return BaseCommand
*/
public BaseCommand getSubCommand(String parsedCommand) {
return (BaseCommand) subCommander.getCommands().get(parsedCommand).getObjects().get(0);
}
/**
* prints RateLimitStatus for specific command.
* @param remApiLimits
*/
public void showRateLimitStatus(int remApiLimits) {
log.info("
log.info("DONE!!! REMAINING TWITTER API CALLS: [" + remApiLimits + "]");
log.info("
}
}
|
package com.provinggrounds.match3things.game;
import java.util.Collection;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import com.provinggrounds.match3things.util.Direction;
/*
* Represents a game grid. Contains a rectangular grid of numbers.
* All numbers must be positive integers greater than 0, between 1 and numObjectTypes(both inclusive)
*/
public class Grid {
private static final int NUM_MATCHING_SETS = 4;
private static final int NUMBER_ELEMENTS_MATCHING_SET = 3;
private int width;
private int height;
/**
* Represents number of unique object types
*/
int numObjectTypes;
Integer[] gameGrid;
static final Random randomNumberGenerator = new Random();
Grid(int width, int height, int numObjectTypes) {
this.width = width;
this.height = height;
this.numObjectTypes = numObjectTypes;
}
public static Grid createGrid(int width, int height, int numObjectTypes) {
Grid newGrid = new Grid(width, height, numObjectTypes);
newGrid.initGameGridArray();
newGrid.fillGameGrid();
return newGrid;
}
/*
* Creates grid, allocates storage
*/
private void initGameGridArray() {
gameGrid = new Integer[height*width];
}
/*
* Initialize/fill game grid with random objects
*/
private void fillGameGrid() {
//create matching sets first, then fill in remaining elements with random numbers
//pick 4 random positions
Set<Coord> matchingSetStartPoints = getRandomGridPositions(NUM_MATCHING_SETS);
for(Coord currentPoint : matchingSetStartPoints ) {
//decide match set orientation - vertical/horizontal, up/down/left/right
Direction matchSetDirection = getRandomMatchSetDirection(currentPoint);
//decide block type
int blockType = randomNumberGenerator.nextInt(numObjectTypes) + 1;
//generate block coords and fill in grid
fillMatchingSet(currentPoint, matchSetDirection, blockType);
}
for(int counter = 0; counter < gameGrid.length; counter++) {
if(gameGrid[counter] == null)
gameGrid[counter] = randomNumberGenerator.nextInt(numObjectTypes) + 1;
}
}
private void fillMatchingSet(Coord currentPoint, Direction matchSetDirection, int blockType) {
//generate set coords based on starting point and match set direction
Coord[] matchingSetCoords = new Coord[NUMBER_ELEMENTS_MATCHING_SET];
matchingSetCoords[0] = currentPoint;
switch(matchSetDirection) {
case UP:
for(int counter = 1; counter<NUMBER_ELEMENTS_MATCHING_SET; counter++) {
Coord currentPointCoords = new Coord(currentPoint.x, currentPoint.y - counter);
matchingSetCoords[counter] = currentPointCoords;
}
break;
case DOWN:
for(int counter = 1; counter<NUMBER_ELEMENTS_MATCHING_SET; counter++) {
Coord currentPointCoords = new Coord(currentPoint.x, currentPoint.y + counter);
matchingSetCoords[counter] = currentPointCoords;
}
break;
case LEFT:
for(int counter = 1; counter<NUMBER_ELEMENTS_MATCHING_SET; counter++) {
Coord currentPointCoords = new Coord(currentPoint.x - counter, currentPoint.y);
matchingSetCoords[counter] = currentPointCoords;
}
break;
case RIGHT:
for(int counter = 1; counter<NUMBER_ELEMENTS_MATCHING_SET; counter++) {
Coord currentPointCoords = new Coord(currentPoint.x + counter, currentPoint.y);
matchingSetCoords[counter] = currentPointCoords;
}
break;
default:
break;
}
for(Coord elementCoord : matchingSetCoords) {
gameGrid[width*elementCoord.y + elementCoord.x] = blockType;
}
}
/*
* Determine which directions are valid (match set must stay within
* grid boundary), given a starting point and Grid dimensions
* and randomly pick a valid direction
*/
private Direction getRandomMatchSetDirection(Coord matchSetStartingPoint) {
Direction[] validDirections = getValidMatchSetDirections(matchSetStartingPoint);
//return random valid direction
return validDirections[randomNumberGenerator.nextInt(validDirections.length)];
}
private Direction[] getValidMatchSetDirections(Coord matchSetStartingPoint) {
Set<Direction> validDirectionSet = new HashSet<Direction>();
//check UP
if(matchSetStartingPoint.y - 2 >= 0) validDirectionSet.add(Direction.UP);
//check DOWN
if(matchSetStartingPoint.y + 2 < height) validDirectionSet.add(Direction.DOWN);
//check LEFT
if(matchSetStartingPoint.x - 2 >= 0) validDirectionSet.add(Direction.LEFT);
//check RIGHT
if(matchSetStartingPoint.x + 2 < width) validDirectionSet.add(Direction.RIGHT);
return validDirectionSet.toArray(new Direction[validDirectionSet.size()]);
}
private Set<Coord> getRandomGridPositions(int numElements) {
Set<Coord> gridPositions = new HashSet<Coord>();
for(int counter=0; counter<numElements; counter++) {
int x = randomNumberGenerator.nextInt(width);
int y = randomNumberGenerator.nextInt(height);
Coord currentPoint = new Coord(x, y);
gridPositions.add(currentPoint);
}
return gridPositions;
}
public Collection<MatchingSet> findMatches() {
Set<MatchingSet> matchingSets = new HashSet<MatchingSet>();
//find horizontal matches
matchingSets.addAll(findHorizontalMatches());
//find vertical matches
matchingSets.addAll(findVerticalMatches());
//mark all matching blocks deleted (make them 0)
//markMatchingBlocksDeleted(matchingSets);
return matchingSets;
}
private Collection<MatchingSet> findVerticalMatches() {
// for each row, find matches
Set<MatchingSet> verticalMatchingSets = new HashSet<MatchingSet>();
for (int columnCounter = 0; columnCounter < width; columnCounter++) {
verticalMatchingSets.addAll(findMatchesInColumn(columnCounter));
}
return verticalMatchingSets;
}
private Collection<MatchingSet> findMatchesInColumn(final int columnNumber) {
Set<MatchingSet> matchingSetsInColumn = new HashSet<MatchingSet>();
int currentIndex = 0;
while(currentIndex < height) {
int currentSequenceNumber = gameGrid[currentIndex*width+columnNumber];
int begIndex = currentIndex;
int currentSequenceLength = 1;
currentIndex++;
while(currentIndex < height && gameGrid[currentIndex*width+columnNumber].equals(currentSequenceNumber)) {
currentIndex++;
currentSequenceLength++;
}
if(currentSequenceLength >= NUMBER_ELEMENTS_MATCHING_SET) { //if matching set, save it
int endIndex = currentIndex - 1;
matchingSetsInColumn.add(createMatchingSetInColumn(begIndex, endIndex, columnNumber, currentSequenceNumber));
}
}
return matchingSetsInColumn;
}
private MatchingSet createMatchingSetInColumn(int begIndex, int endIndex, int columnNumber, int blockType) {
MatchingSet matchingSet = new MatchingSet();
int sequenceLength = endIndex - begIndex + 1;
Coord[] blockCoords = new Coord[sequenceLength];
matchingSet.setBlockType(blockType);
for(int counter=begIndex; counter<=endIndex; counter++) {
Coord newCoord = new Coord(columnNumber, counter);
blockCoords[counter-begIndex] = newCoord;
}
matchingSet.setCoordinates(blockCoords);
return matchingSet;
}
private Collection<MatchingSet> findHorizontalMatches() {
//for each row, find matches
Set<MatchingSet> horizontalMatchingSets = new HashSet<MatchingSet>();
for(int rowCounter=0; rowCounter<height; rowCounter++) {
horizontalMatchingSets.addAll(findMatchesInRow(rowCounter));
}
return horizontalMatchingSets;
}
private Collection<MatchingSet> findMatchesInRow(int rowNumber) {
Set<MatchingSet> matchingSetsInRow = new HashSet<MatchingSet>();
int beginningOfRow = rowNumber*width;
int endOfRow = (rowNumber+1)*width - 1;
int currentIndex = beginningOfRow;
while(currentIndex <= endOfRow) {
int currentSequenceNumber = gameGrid[currentIndex];
int begIndex = currentIndex;
int currentSequenceLength = 1;
currentIndex++;
while(currentIndex <= endOfRow && gameGrid[currentIndex].equals(currentSequenceNumber)) {
currentIndex++;
currentSequenceLength++;
}
int endIndex = currentIndex - 1;
if(currentSequenceLength >= NUMBER_ELEMENTS_MATCHING_SET) { //if matching set, save it
matchingSetsInRow.add(createMatchingSetInRow(begIndex - beginningOfRow, endIndex - beginningOfRow, rowNumber, currentSequenceNumber));
}
}
return matchingSetsInRow;
}
private MatchingSet createMatchingSetInRow(int beg, int end, int rowNumber,
int blockType) {
MatchingSet matchingSet = new MatchingSet();
int sequenceLength = end - beg + 1;
Coord[] blockCoords = new Coord[sequenceLength];
matchingSet.setBlockType(blockType);
for(int counter=beg; counter<=end; counter++) {
Coord newCoord = new Coord(counter, rowNumber);
blockCoords[counter-beg] = newCoord;
}
matchingSet.setCoordinates(blockCoords);
return matchingSet;
}
public Integer[] getGameGrid() {
return gameGrid;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public void markMatchingBlocksDeleted(Collection<MatchingSet> matchingSets) {
for(MatchingSet ms : matchingSets ) {
for( Coord position : ms.getCoordinates() ) {
gameGrid[position.y*width + position.x] = 0;
}
}
}
}
|
package org.ngsutils.cli.fastq;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMFileHeader.SortOrder;
import net.sf.samtools.SAMFileWriter;
import net.sf.samtools.SAMFileWriterFactory;
import net.sf.samtools.SAMProgramRecord;
import net.sf.samtools.SAMRecord;
import org.ngsutils.NGSUtils;
import org.ngsutils.cli.AbstractCommand;
import org.ngsutils.cli.Command;
import org.ngsutils.fastq.FastqRead;
import org.ngsutils.fastq.FastqReader;
import org.ngsutils.support.IterUtils;
import com.lexicalscope.jewel.cli.ArgumentValidationException;
import com.lexicalscope.jewel.cli.CommandLineInterface;
import com.lexicalscope.jewel.cli.Option;
import com.lexicalscope.jewel.cli.Unparsed;
@CommandLineInterface(application = "ngsutilsj fastq-bam")
@Command(name = "fastq-bam", desc = "Converts a FASTQ file (or two paired files) into an unmapped BAM file", cat="fastq")
public class FastqToBam extends AbstractCommand {
private FastqReader[] readers = null;
private String outputFilename = null;
private String tmpDir = null;
private boolean calcMD5 = false;
private boolean force = false;
private boolean comments = false;
private boolean serial = false;
private int compressionLevel = 6; // sam.jar default is 5, but 6 is the standard default
public FastqToBam() {
}
@Unparsed(name="FILE1 FILE2")
public void setFilenames(List<File> files) throws IOException {
if (files.size() == 2) {
this.readers = new FastqReader[2];
this.readers[0] = new FastqReader(files.get(0));
this.readers[1] = new FastqReader(files.get(1));
} else if (files.size() == 1) {
this.readers = new FastqReader[1];
this.readers[0] = new FastqReader(files.get(0));
} else {
System.err.println("You must supply one or two FASTQ files to convert!");
System.exit(1);
}
}
@Option(description = "Output filename (Default: stdout)", shortName = "o", defaultValue="-", longName = "output")
public void setOutputFilename(String outFilename) {
this.outputFilename = outFilename;
}
@Option(description = "Automatically write an MD5 file", defaultToNull=true, longName = "md5")
public void setCalcMD5(boolean val) {
this.calcMD5 = val;
}
@Option(description = "Add paired FASTQ files serially, rather than interleaved", longName = "serial")
public void setSerial(boolean val) {
this.serial = val;
}
@Option(description = "Force overwriting output file", longName = "force")
public void setForce(boolean val) {
this.force = val;
}
@Option(description = "Compression-level: fast (1)", longName = "fast")
public void setFast(boolean val) {
if (val) {
compressionLevel = 1;
}
}
@Option(description = "Compression-level: best (9)", longName = "best")
public void setBest(boolean val) {
if (val) {
compressionLevel = 9;
}
}
@Option(description = "Include comments field from FASTQ file", longName = "comments")
public void setComments(boolean val) {
this.comments = val;
}
@Option(description = "Write temporary files here", longName="tmpdir", defaultToNull=true)
public void setTmpDir(String tmpDir) {
this.tmpDir = tmpDir;
}
@Override
public void exec() throws IOException {
if (readers == null) {
throw new ArgumentValidationException("You must supply two FASTQ files to merge.");
}
SAMFileWriterFactory factory = new SAMFileWriterFactory();
File outfile = null;
OutputStream outStream = null;
if (outputFilename.equals("-")) {
outStream = new BufferedOutputStream(System.out);
} else {
outfile = new File(outputFilename);
if (outfile.exists() && !force) {
System.err.println("The output file: "+outputFilename+" exists!\nYou must set the --force option to overwrite the output file.");
System.exit(1);
}
if (calcMD5) {
factory.setCreateMd5File(true);
}
}
if (verbose) {
for (FastqReader reader: readers) {
System.err.println("Input: "+reader.getFilename());
}
if (comments) {
System.err.println("Including comments");
}
if (compressionLevel == 1) {
System.err.println("Compression: fast");
}
if (compressionLevel == 9) {
System.err.println("Compression: best");
}
}
if (tmpDir != null) {
factory.setTempDirectory(new File(tmpDir));
} else if (outfile == null || outfile.getParent() == null) {
factory.setTempDirectory(new File(".").getCanonicalFile());
} else if (outfile!=null) {
factory.setTempDirectory(outfile.getParentFile());
}
final SAMFileHeader header = new SAMFileHeader();
header.setSortOrder(SortOrder.unsorted);
SAMProgramRecord pg = NGSUtils.buildSAMProgramRecord("fastq-bam");
header.addProgramRecord(pg);
final SAMFileWriter out;
if (outfile != null) {
if (verbose) {
System.err.println("Output: "+outfile);
}
out = factory.makeBAMWriter(header, true, outfile, compressionLevel);
} else {
if (verbose) {
System.err.println("Output: stdout");
}
out = factory.makeSAMWriter(header, true, outStream);
}
long i = 0;
if (readers.length == 1) {
for (FastqRead read : readers[0]) {
if (verbose) {
i++;
if (i % 100000 == 0) {
System.err.println("Read: " + i);
}
}
SAMRecord record = new SAMRecord(header);
record.setReadPairedFlag(false);
record.setReadUnmappedFlag(true);
record.setReadName(read.getName());
record.setReadString(read.getSeq());
record.setBaseQualityString(read.getQual());
if (comments && read.getComment() != null) {
record.setAttribute("CO", read.getComment());
}
out.addAlignment(record);
}
} else if (serial) {
for (FastqRead read : readers[0]) {
if (verbose) {
i++;
if (i % 100000 == 0) {
System.err.println("Read: " + i);
}
}
SAMRecord record = new SAMRecord(header);
record.setReadPairedFlag(true);
record.setMateUnmappedFlag(true);
record.setReadUnmappedFlag(true);
record.setFirstOfPairFlag(true);
record.setSecondOfPairFlag(false);
record.setReadName(read.getName());
record.setReadString(read.getSeq());
record.setBaseQualityString(read.getQual());
if (comments && read.getComment() != null) {
record.setAttribute("CO", read.getComment());
}
out.addAlignment(record);
}
i = 0;
for (FastqRead read : readers[1]) {
if (verbose) {
i++;
if (i % 100000 == 0) {
System.err.println("Read: " + i);
}
}
SAMRecord record = new SAMRecord(header);
record.setReadPairedFlag(true);
record.setMateUnmappedFlag(true);
record.setReadUnmappedFlag(true);
record.setFirstOfPairFlag(false);
record.setSecondOfPairFlag(true);
record.setReadName(read.getName());
record.setReadString(read.getSeq());
record.setBaseQualityString(read.getQual());
if (comments && read.getComment() != null) {
record.setAttribute("CO", read.getComment());
}
out.addAlignment(record);
}
} else {
IterUtils.zip(readers[0], readers[1], new IterUtils.Each<FastqRead, FastqRead>() {
long i = 0;
public void each(FastqRead one, FastqRead two) {
if (verbose) {
i++;
if (i % 100000 == 0) {
System.err.println("Read: " + i);
}
}
if (one.getName().equals(two.getName())) {
SAMRecord record = new SAMRecord(header);
record.setReadPairedFlag(true);
record.setMateUnmappedFlag(true);
record.setReadUnmappedFlag(true);
record.setFirstOfPairFlag(true);
record.setSecondOfPairFlag(false);
record.setReadName(one.getName());
record.setReadString(one.getSeq());
record.setBaseQualityString(one.getQual());
if (comments && one.getComment() != null) {
record.setAttribute("CO", one.getComment());
}
out.addAlignment(record);
record = new SAMRecord(header);
record.setReadPairedFlag(true);
record.setMateUnmappedFlag(true);
record.setReadUnmappedFlag(true);
record.setFirstOfPairFlag(false);
record.setSecondOfPairFlag(true);
record.setReadName(two.getName());
record.setReadString(two.getSeq());
record.setBaseQualityString(two.getQual());
if (comments && two.getComment() != null) {
record.setAttribute("CO", two.getComment());
}
out.addAlignment(record);
} else {
System.err.println("Error! Unpaired files! ");
System.exit(1);
}
}
});
}
for (FastqReader reader: readers) {
reader.close();
}
out.close();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.